mirror of
https://github.com/AvengeMedia/DankMaterialShell.git
synced 2026-01-24 13:32:50 -05:00
Refactor pre-commit hooks to use prek (#976)
* ci: change to prek for pre-commit * refactor: fix shellcheck warnings for the scripts * chore: unify whitespace formatting * nix: add prek to dev shell
This commit is contained in:
8
.editorconfig
Normal file
8
.editorconfig
Normal file
@@ -0,0 +1,8 @@
|
||||
[*.sh]
|
||||
# like -i=4
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
[*.nix]
|
||||
# like -i=4
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
@@ -1,69 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
HOOK_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
REPO_ROOT="$(cd "$HOOK_DIR/.." && pwd)"
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
# =============================================================================
|
||||
# Go CI checks (when core/ files are staged)
|
||||
# =============================================================================
|
||||
STAGED_CORE_FILES=$(git diff --cached --name-only --diff-filter=ACMR | grep '^core/' || true)
|
||||
|
||||
if [[ -n "$STAGED_CORE_FILES" ]]; then
|
||||
echo "Go files staged in core/, running CI checks..."
|
||||
cd "$REPO_ROOT/core"
|
||||
|
||||
# Format check
|
||||
echo " Checking gofmt..."
|
||||
UNFORMATTED=$(gofmt -s -l . 2>/dev/null || true)
|
||||
if [[ -n "$UNFORMATTED" ]]; then
|
||||
echo "The following files are not formatted:"
|
||||
echo "$UNFORMATTED"
|
||||
echo ""
|
||||
echo "Run: cd core && gofmt -s -w ."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# golangci-lint
|
||||
if command -v golangci-lint &>/dev/null; then
|
||||
echo " Running golangci-lint..."
|
||||
golangci-lint run ./...
|
||||
else
|
||||
echo " Warning: golangci-lint not installed, skipping lint"
|
||||
echo " Install: go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest"
|
||||
fi
|
||||
|
||||
# Tests
|
||||
echo " Running tests..."
|
||||
if ! go test ./... >/dev/null 2>&1; then
|
||||
echo "Tests failed! Run 'go test ./...' for details."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Build checks
|
||||
echo " Building..."
|
||||
mkdir -p bin
|
||||
go build -buildvcs=false -o bin/dms ./cmd/dms
|
||||
go build -buildvcs=false -o bin/dms-distro -tags distro_binary ./cmd/dms
|
||||
go build -buildvcs=false -o bin/dankinstall ./cmd/dankinstall
|
||||
|
||||
echo "All Go CI checks passed!"
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
|
||||
# =============================================================================
|
||||
# i18n sync check (DISABLED for now)
|
||||
# =============================================================================
|
||||
# if [[ -n "${POEDITOR_API_TOKEN:-}" ]] && [[ -n "${POEDITOR_PROJECT_ID:-}" ]]; then
|
||||
# if command -v python3 &>/dev/null; then
|
||||
# if ! python3 scripts/i18nsync.py check &>/dev/null; then
|
||||
# echo "Translations out of sync"
|
||||
# echo "Run: python3 scripts/i18nsync.py sync"
|
||||
# exit 1
|
||||
# fi
|
||||
# fi
|
||||
# fi
|
||||
|
||||
exit 0
|
||||
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -6,7 +6,7 @@ labels: "bug"
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
<!-- If your issue is related to ICONS
|
||||
<!-- If your issue is related to ICONS
|
||||
- Purple and black checkerboards are QT's way of signalling an icon doesn't exist
|
||||
- FIX: Configure a QT6 or Icon Pack in DMS Settings that has the icon you want
|
||||
- Follow the [THEMING](https://danklinux.com/docs/dankmaterialshell/icon-theming) section to ensure your QT environment variable is configured correctly for themes.
|
||||
@@ -62,4 +62,4 @@ Paste error messages or logs here
|
||||
|
||||
## Screenshots/Recordings
|
||||
|
||||
<!-- If applicable, add screenshots or screen recordings -->
|
||||
<!-- If applicable, add screenshots or screen recordings -->
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -30,4 +30,4 @@ Is this feature specific to one compositor?
|
||||
|
||||
## Alternatives/Existing Solutions
|
||||
|
||||
<!-- Include any similar/pre-existing products that solve this problem -->
|
||||
<!-- Include any similar/pre-existing products that solve this problem -->
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/support_request.md
vendored
2
.github/ISSUE_TEMPLATE/support_request.md
vendored
@@ -37,4 +37,4 @@ assignees: ""
|
||||
|
||||
## Screenshots/Recordings
|
||||
|
||||
<!-- If applicable, add screenshots or screen recordings -->
|
||||
<!-- If applicable, add screenshots or screen recordings -->
|
||||
|
||||
14
.github/workflows/go-ci.yml
vendored
14
.github/workflows/go-ci.yml
vendored
@@ -33,20 +33,6 @@ jobs:
|
||||
with:
|
||||
go-version-file: ./core/go.mod
|
||||
|
||||
- name: Format check
|
||||
run: |
|
||||
if [ "$(gofmt -s -l . | wc -l)" -gt 0 ]; then
|
||||
echo "The following files are not formatted:"
|
||||
gofmt -s -l .
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Run golangci-lint
|
||||
uses: golangci/golangci-lint-action@v9
|
||||
with:
|
||||
version: v2.6
|
||||
working-directory: core
|
||||
|
||||
- name: Test
|
||||
run: go test -v ./...
|
||||
|
||||
|
||||
15
.github/workflows/prek.yml
vendored
Normal file
15
.github/workflows/prek.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Pre-commit Checks
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
branches: [master, main]
|
||||
jobs:
|
||||
pre-commit-check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: run pre-commit hooks
|
||||
uses: j178/prek-action@v1
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -272,7 +272,7 @@ jobs:
|
||||
# Create QML source package (exclude build artifacts and git files)
|
||||
# Copy root LICENSE and CONTRIBUTING.md to quickshell/ for packaging
|
||||
cp LICENSE CONTRIBUTING.md quickshell/
|
||||
|
||||
|
||||
# Copy root assets directory to quickshell for systemd service and desktop file
|
||||
cp -r assets quickshell/
|
||||
|
||||
|
||||
44
.github/workflows/run-copr.yml
vendored
44
.github/workflows/run-copr.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
|
||||
- name: Determine version
|
||||
id: version
|
||||
run: |
|
||||
@@ -40,31 +40,31 @@ jobs:
|
||||
echo "version=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "release=$RELEASE" >> $GITHUB_OUTPUT
|
||||
echo "✅ Building DMS hotfix version: $VERSION-$RELEASE"
|
||||
|
||||
|
||||
- name: Setup build environment
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y rpm wget curl jq gzip
|
||||
mkdir -p ~/rpmbuild/{BUILD,BUILDROOT,RPMS,SOURCES,SPECS,SRPMS}
|
||||
echo "✅ RPM build environment ready"
|
||||
|
||||
|
||||
- name: Download release assets
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
cd ~/rpmbuild/SOURCES
|
||||
|
||||
|
||||
echo "📦 Downloading DMS QML source for v${VERSION}..."
|
||||
|
||||
|
||||
# Download DMS QML source
|
||||
wget "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-qml.tar.gz" || {
|
||||
echo "❌ Failed to download dms-qml.tar.gz for v${VERSION}"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
echo "✅ Source downloaded"
|
||||
echo "Note: dms-cli binary will be downloaded during build based on target architecture"
|
||||
ls -lh
|
||||
|
||||
|
||||
- name: Generate stable spec file
|
||||
run: |
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
@@ -211,38 +211,38 @@ jobs:
|
||||
echo ""
|
||||
echo "=== Spec file preview ==="
|
||||
head -40 ~/rpmbuild/SPECS/dms.spec
|
||||
|
||||
|
||||
- name: Build SRPM
|
||||
id: build
|
||||
run: |
|
||||
cd ~/rpmbuild/SPECS
|
||||
|
||||
|
||||
echo "🔨 Building SRPM..."
|
||||
rpmbuild -bs dms.spec
|
||||
|
||||
|
||||
SRPM=$(ls ~/rpmbuild/SRPMS/*.src.rpm | tail -n 1)
|
||||
SRPM_NAME=$(basename "$SRPM")
|
||||
|
||||
|
||||
echo "srpm_path=$SRPM" >> $GITHUB_OUTPUT
|
||||
echo "srpm_name=$SRPM_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
echo "✅ SRPM built: $SRPM_NAME"
|
||||
echo ""
|
||||
echo "=== SRPM Info ==="
|
||||
rpm -qpi "$SRPM"
|
||||
|
||||
|
||||
- name: Upload SRPM artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: dms-stable-srpm-${{ steps.version.outputs.version }}
|
||||
path: ${{ steps.build.outputs.srpm_path }}
|
||||
retention-days: 90
|
||||
|
||||
|
||||
- name: Install Copr CLI
|
||||
run: |
|
||||
sudo apt-get install -y python3-pip
|
||||
pip3 install copr-cli
|
||||
|
||||
|
||||
mkdir -p ~/.config
|
||||
cat > ~/.config/copr << EOF
|
||||
[copr-cli]
|
||||
@@ -252,30 +252,30 @@ jobs:
|
||||
copr_url = https://copr.fedorainfracloud.org
|
||||
EOF
|
||||
chmod 600 ~/.config/copr
|
||||
|
||||
|
||||
echo "✅ Copr CLI configured"
|
||||
|
||||
|
||||
- name: Upload to Copr
|
||||
run: |
|
||||
SRPM="${{ steps.build.outputs.srpm_path }}"
|
||||
VERSION="${{ steps.version.outputs.version }}"
|
||||
|
||||
|
||||
echo "🚀 Uploading SRPM to avengemedia/dms..."
|
||||
echo " SRPM: $(basename $SRPM)"
|
||||
echo " Version: $VERSION"
|
||||
|
||||
|
||||
BUILD_OUTPUT=$(copr-cli build avengemedia/dms "$SRPM" --nowait 2>&1)
|
||||
echo "$BUILD_OUTPUT"
|
||||
|
||||
|
||||
BUILD_ID=$(echo "$BUILD_OUTPUT" | grep -oP 'Build was added to.*\K[0-9]+' || echo "unknown")
|
||||
|
||||
|
||||
if [ "$BUILD_ID" != "unknown" ]; then
|
||||
echo "✅ Build submitted successfully!"
|
||||
echo "🔗 https://copr.fedorainfracloud.org/coprs/avengemedia/dms/build/$BUILD_ID/"
|
||||
else
|
||||
echo "⚠️ Could not extract build ID, but upload may have succeeded"
|
||||
fi
|
||||
|
||||
|
||||
- name: Build summary
|
||||
if: always()
|
||||
run: |
|
||||
|
||||
40
.github/workflows/run-obs.yml
vendored
40
.github/workflows/run-obs.yml
vendored
@@ -21,34 +21,34 @@ jobs:
|
||||
check-updates:
|
||||
name: Check for updates
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
outputs:
|
||||
has_updates: ${{ steps.check.outputs.has_updates }}
|
||||
packages: ${{ steps.check.outputs.packages }}
|
||||
version: ${{ steps.check.outputs.version }}
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
- name: Install OSC
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y osc
|
||||
|
||||
|
||||
mkdir -p ~/.config/osc
|
||||
cat > ~/.config/osc/oscrc << EOF
|
||||
[general]
|
||||
apiurl = https://api.opensuse.org
|
||||
|
||||
|
||||
[https://api.opensuse.org]
|
||||
user = ${{ secrets.OBS_USERNAME }}
|
||||
pass = ${{ secrets.OBS_PASSWORD }}
|
||||
EOF
|
||||
chmod 600 ~/.config/osc/oscrc
|
||||
|
||||
|
||||
- name: Check for updates
|
||||
id: check
|
||||
run: |
|
||||
@@ -116,13 +116,13 @@ jobs:
|
||||
if: |
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
needs.check-updates.outputs.has_updates == 'true'
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
- name: Determine packages to update
|
||||
id: packages
|
||||
run: |
|
||||
@@ -140,7 +140,7 @@ jobs:
|
||||
else
|
||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
- name: Update dms-git spec version
|
||||
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
|
||||
run: |
|
||||
@@ -148,13 +148,13 @@ jobs:
|
||||
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
|
||||
|
||||
|
||||
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
|
||||
echo "📦 Updating dms-git.spec to version: $NEW_VERSION"
|
||||
|
||||
|
||||
# Update version in spec
|
||||
sed -i "s/^Version:.*/Version: $NEW_VERSION/" distro/opensuse/dms-git.spec
|
||||
|
||||
|
||||
# Add changelog entry
|
||||
DATE_STR=$(date "+%a %b %d %Y")
|
||||
CHANGELOG_ENTRY="* $DATE_STR Avenge Media <AvengeMedia.US@gmail.com> - ${NEW_VERSION}-1\n- Git snapshot (commit $COMMIT_COUNT: $COMMIT_HASH)"
|
||||
@@ -163,12 +163,12 @@ jobs:
|
||||
- name: Update Debian dms-git changelog version
|
||||
if: contains(steps.packages.outputs.packages, 'dms-git') || steps.packages.outputs.packages == 'all'
|
||||
run: |
|
||||
# Get commit info for dms-git versioning
|
||||
# Get commit info for dms-git versioning
|
||||
COMMIT_HASH=$(git rev-parse --short=8 HEAD)
|
||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||
BASE_VERSION=$(grep -oP '^Version:\s+\K[0-9.]+' distro/opensuse/dms.spec | head -1 || echo "0.6.2")
|
||||
|
||||
# Debian version format: 0.6.2+git2256.9162e314
|
||||
# Debian version format: 0.6.2+git2256.9162e314
|
||||
NEW_VERSION="${BASE_VERSION}+git${COMMIT_COUNT}.${COMMIT_HASH}"
|
||||
echo "📦 Updating Debian dms-git changelog to version: $NEW_VERSION"
|
||||
|
||||
@@ -182,7 +182,7 @@ jobs:
|
||||
echo "Current Debian version: $CURRENT_VERSION"
|
||||
echo "New version: $NEW_VERSION"
|
||||
|
||||
# Only update if version changed
|
||||
# Only update if version changed
|
||||
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
|
||||
# Create new changelog entry at top
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
@@ -211,10 +211,10 @@ jobs:
|
||||
VERSION="${{ steps.packages.outputs.version }}"
|
||||
VERSION_NO_V="${VERSION#v}"
|
||||
echo "Updating packaging to version $VERSION_NO_V"
|
||||
|
||||
|
||||
# Update openSUSE dms spec (stable only)
|
||||
sed -i "s/^Version:.*/Version: $VERSION_NO_V/" distro/opensuse/dms.spec
|
||||
|
||||
|
||||
# Update Debian _service files
|
||||
for service in distro/debian/*/_service; do
|
||||
if [[ -f "$service" ]]; then
|
||||
@@ -250,18 +250,18 @@ jobs:
|
||||
run: |
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
MESSAGE="Automated update from GitHub Actions"
|
||||
|
||||
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
MESSAGE="Update to ${{ steps.packages.outputs.version }}"
|
||||
fi
|
||||
|
||||
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
bash distro/scripts/obs-upload.sh dms "$MESSAGE"
|
||||
bash distro/scripts/obs-upload.sh dms-git "Automated git update"
|
||||
else
|
||||
bash distro/scripts/obs-upload.sh "$PACKAGES" "$MESSAGE"
|
||||
fi
|
||||
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### OBS Package Update Complete" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
22
.github/workflows/run-ppa.yml
vendored
22
.github/workflows/run-ppa.yml
vendored
@@ -84,12 +84,12 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
- name: Set up Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '1.24'
|
||||
cache: false
|
||||
cache: false
|
||||
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
build-essential \
|
||||
fakeroot \
|
||||
dpkg-dev
|
||||
|
||||
|
||||
- name: Configure GPG
|
||||
env:
|
||||
GPG_KEY: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
echo "$GPG_KEY" | gpg --import
|
||||
GPG_KEY_ID=$(gpg --list-secret-keys --keyid-format LONG | grep sec | awk '{print $2}' | cut -d'/' -f2)
|
||||
echo "DEBSIGN_KEYID=$GPG_KEY_ID" >> $GITHUB_ENV
|
||||
|
||||
|
||||
- name: Determine packages to upload
|
||||
id: packages
|
||||
run: |
|
||||
@@ -123,19 +123,19 @@ jobs:
|
||||
else
|
||||
echo "packages=${{ needs.check-updates.outputs.packages }}" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
- name: Upload to PPA
|
||||
env:
|
||||
REBUILD_RELEASE: ${{ github.event.inputs.rebuild_release }}
|
||||
run: |
|
||||
PACKAGES="${{ steps.packages.outputs.packages }}"
|
||||
|
||||
|
||||
# Export to ensure it's available to subprocesses
|
||||
if [ -n "$REBUILD_RELEASE" ]; then
|
||||
export REBUILD_RELEASE
|
||||
echo "✓ Using rebuild release number: ppa$REBUILD_RELEASE"
|
||||
fi
|
||||
|
||||
|
||||
if [[ "$PACKAGES" == "all" ]]; then
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading dms to PPA..."
|
||||
@@ -144,13 +144,13 @@ jobs:
|
||||
fi
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms" dms questing
|
||||
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading dms-git to PPA..."
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/dms-git" dms-git questing
|
||||
|
||||
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "Uploading dms-greeter to PPA..."
|
||||
@@ -163,7 +163,7 @@ jobs:
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
REBUILD_RELEASE="$REBUILD_RELEASE" bash distro/scripts/ppa-upload.sh "distro/ubuntu/$PACKAGES" "$PPA_NAME" questing
|
||||
fi
|
||||
|
||||
|
||||
- name: Summary
|
||||
run: |
|
||||
echo "### PPA Package Upload Complete" >> $GITHUB_STEP_SUMMARY
|
||||
@@ -186,7 +186,7 @@ jobs:
|
||||
elif [[ "$PACKAGES" == "dms-greeter" ]]; then
|
||||
echo "- **PPA**: https://launchpad.net/~avengemedia/+archive/ubuntu/danklinux/+packages" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
|
||||
if [[ -n "${{ steps.packages.outputs.version }}" ]]; then
|
||||
echo "- **Version**: ${{ steps.packages.outputs.version }}" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -104,7 +104,7 @@ go.work.sum
|
||||
|
||||
bin/
|
||||
|
||||
# Extracted source trees in Ubuntu package directories
|
||||
# Extracted source trees in Ubuntu package directories
|
||||
distro/ubuntu/*/dms-git-repo/
|
||||
distro/ubuntu/*/DankMaterialShell-*/
|
||||
distro/ubuntu/danklinux/*/dsearch-*/
|
||||
|
||||
12
.pre-commit-config.yaml
Normal file
12
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.10.0.1
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
args: [-e, SC2164, -e, SC2001, -e, SC2012, -e, SC2317]
|
||||
@@ -6,10 +6,10 @@ To contribute fork this repository, make your changes, and open a pull request.
|
||||
|
||||
## Setup
|
||||
|
||||
Enable pre-commit hooks to catch CI failures before pushing:
|
||||
Install [prek](https://prek.j178.dev/) then activate pre-commit hooks:
|
||||
|
||||
```bash
|
||||
git config core.hooksPath .githooks
|
||||
prek install
|
||||
```
|
||||
|
||||
### Nix Development Shell
|
||||
|
||||
@@ -14,4 +14,4 @@ RestartSec=1.23
|
||||
TimeoutStopSec=10
|
||||
|
||||
[Install]
|
||||
WantedBy=graphical-session.target
|
||||
WantedBy=graphical-session.target
|
||||
|
||||
7
core/.pre-commit-config.yaml
Normal file
7
core/.pre-commit-config.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
repos:
|
||||
- repo: https://github.com/golangci/golangci-lint
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: golangci-lint-full
|
||||
- id: golangci-lint-fmt
|
||||
- id: golangci-lint-config-verify
|
||||
@@ -139,4 +139,4 @@ Most packages available in standard repos. Minimal building required.
|
||||
**Gentoo**
|
||||
Uses Portage with GURU overlay. Automatically configures USE flags. Variable success depending on system configuration.
|
||||
|
||||
See installer output for distribution-specific details during installation.
|
||||
See installer output for distribution-specific details during installation.
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<rect x="0" y="29" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="20" y="29" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="0" y="37" width="24" height="8" fill="#CCBEFF"/>
|
||||
|
||||
|
||||
<!-- A -->
|
||||
<rect x="36" y="5" width="20" height="8" fill="#CCBEFF"/>
|
||||
<rect x="32" y="13" width="8" height="8" fill="#CCBEFF"/>
|
||||
@@ -18,7 +18,7 @@
|
||||
<rect x="52" y="29" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="32" y="37" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="52" y="37" width="8" height="8" fill="#CCBEFF"/>
|
||||
|
||||
|
||||
<!-- N -->
|
||||
<rect x="64" y="5" width="12" height="8" fill="#CCBEFF"/>
|
||||
<rect x="92" y="5" width="8" height="8" fill="#CCBEFF"/>
|
||||
@@ -32,7 +32,7 @@
|
||||
<rect x="92" y="29" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="64" y="37" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="84" y="37" width="16" height="8" fill="#CCBEFF"/>
|
||||
|
||||
|
||||
<!-- K -->
|
||||
<rect x="104" y="5" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="124" y="5" width="8" height="8" fill="#CCBEFF"/>
|
||||
@@ -43,4 +43,4 @@
|
||||
<rect x="120" y="29" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="104" y="37" width="8" height="8" fill="#CCBEFF"/>
|
||||
<rect x="124" y="37" width="8" height="8" fill="#CCBEFF"/>
|
||||
</svg>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
@@ -17,8 +17,8 @@ func getThemedASCII() string {
|
||||
logo := `
|
||||
██████╗ █████╗ ███╗ ██╗██╗ ██╗
|
||||
██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝
|
||||
██║ ██║███████║██╔██╗ ██║█████╔╝
|
||||
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
|
||||
██║ ██║███████║██╔██╗ ██║█████╔╝
|
||||
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
|
||||
██████╔╝██║ ██║██║ ╚████║██║ ██╗
|
||||
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝`
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
@@ -9,8 +9,8 @@ NC='\033[0m' # No Color
|
||||
|
||||
# Check for root privileges
|
||||
if [ "$(id -u)" == "0" ]; then
|
||||
printf "%bError: This script must not be run as root%b\n" "$RED" "$NC"
|
||||
exit 1
|
||||
printf "%bError: This script must not be run as root%b\n" "$RED" "$NC"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if running on Linux
|
||||
@@ -22,17 +22,17 @@ fi
|
||||
# Detect architecture
|
||||
ARCH=$(uname -m)
|
||||
case "$ARCH" in
|
||||
x86_64)
|
||||
ARCH="amd64"
|
||||
;;
|
||||
aarch64)
|
||||
ARCH="arm64"
|
||||
;;
|
||||
*)
|
||||
printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC"
|
||||
printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n"
|
||||
exit 1
|
||||
;;
|
||||
x86_64)
|
||||
ARCH="amd64"
|
||||
;;
|
||||
aarch64)
|
||||
ARCH="arm64"
|
||||
;;
|
||||
*)
|
||||
printf "%bError: Unsupported architecture: %s%b\n" "$RED" "$ARCH" "$NC"
|
||||
printf "This installer only supports x86_64 (amd64) and aarch64 (arm64) architectures\n"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Get the latest release version
|
||||
@@ -55,7 +55,7 @@ curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LAT
|
||||
curl -L "https://github.com/AvengeMedia/DankMaterialShell/releases/download/$LATEST_VERSION/dankinstall-$ARCH.gz.sha256" -o "expected.sha256"
|
||||
|
||||
# Get the expected checksum
|
||||
EXPECTED_CHECKSUM=$(cat expected.sha256 | awk '{print $1}')
|
||||
EXPECTED_CHECKSUM=$(awk '{print $1}' expected.sha256)
|
||||
|
||||
# Calculate actual checksum
|
||||
printf "%bVerifying checksum...%b\n" "$GREEN" "$NC"
|
||||
@@ -67,7 +67,7 @@ if [ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]; then
|
||||
printf "Expected: %s\n" "$EXPECTED_CHECKSUM"
|
||||
printf "Got: %s\n" "$ACTUAL_CHECKSUM"
|
||||
printf "The downloaded file may be corrupted or tampered with\n"
|
||||
cd - > /dev/null
|
||||
cd - >/dev/null
|
||||
rm -rf "$TEMP_DIR"
|
||||
exit 1
|
||||
fi
|
||||
@@ -82,5 +82,5 @@ printf "%bRunning installer...%b\n" "$GREEN" "$NC"
|
||||
./installer
|
||||
|
||||
# Cleanup
|
||||
cd - > /dev/null
|
||||
rm -rf "$TEMP_DIR"
|
||||
cd - >/dev/null
|
||||
rm -rf "$TEMP_DIR"
|
||||
|
||||
@@ -192,4 +192,4 @@ binds {
|
||||
// === System Controls ===
|
||||
Mod+Escape allow-inhibiting=false { toggle-keyboard-shortcuts-inhibit; }
|
||||
Mod+Shift+P { power-off-monitors; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,4 +33,4 @@ recent-windows {
|
||||
active-color "#124a73"
|
||||
urgent-color "#ffb4ab"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@
|
||||
</description>
|
||||
<arg name="workspace" type="new_id" interface="ext_workspace_handle_v1"/>
|
||||
</event>
|
||||
|
||||
|
||||
<request name="commit">
|
||||
<description summary="all requests about the workspaces have been sent">
|
||||
The client must send this request after it has finished sending other
|
||||
@@ -242,7 +242,7 @@
|
||||
- a list of states, conveyed to the client with the state event
|
||||
- and optionally a set of coordinates, conveyed to the client with the
|
||||
coordinates event
|
||||
|
||||
|
||||
The client may request that the compositor activate or deactivate the workspace.
|
||||
|
||||
Each workspace can belong to only a single workspace group.
|
||||
|
||||
@@ -6,8 +6,8 @@ func (m Model) renderBanner() string {
|
||||
logo := `
|
||||
██████╗ █████╗ ███╗ ██╗██╗ ██╗
|
||||
██╔══██╗██╔══██╗████╗ ██║██║ ██╔╝
|
||||
██║ ██║███████║██╔██╗ ██║█████╔╝
|
||||
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
|
||||
██║ ██║███████║██╔██╗ ██║█████╔╝
|
||||
██║ ██║██╔══██║██║╚██╗██║██╔═██╗
|
||||
██████╔╝██║ ██║██║ ╚████║██║ ██╗
|
||||
╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ `
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
#!/bin/sh
|
||||
#!/usr/bin/env sh
|
||||
|
||||
# Runs go generate for each directory, but in parallel. Any arguments are appended to the
|
||||
# go generate command.
|
||||
# Usage: $ ./generatep [go generate arguments]
|
||||
# Print all generate commands: $ ./generatep -x
|
||||
|
||||
cd ./wayland
|
||||
find . -type f -name '*.go' -exec dirname {} \; | sort -u | parallel -j 0 go generate $1 {}/.
|
||||
cd ./wayland || exit 1
|
||||
find . -type f -name '*.go' -exec dirname {} \; | sort -u | parallel -j 0 go generate "$1" {}/.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Include files that are normally excluded by .gitignore
|
||||
# These are needed for the build process on Launchpad
|
||||
# These are needed for the build process on Launchpad
|
||||
tar-ignore = !dms-distropkg-amd64.gz
|
||||
tar-ignore = !dms-source.tar.gz
|
||||
|
||||
@@ -104,19 +104,19 @@ if [ -x /usr/sbin/semanage ] && [ -x /usr/sbin/restorecon ]; then
|
||||
# Greeter launcher binary
|
||||
semanage fcontext -a -t bin_t '%{_bindir}/dms-greeter' >/dev/null 2>&1 || true
|
||||
restorecon %{_bindir}/dms-greeter >/dev/null 2>&1 || true
|
||||
|
||||
|
||||
# Greeter home directory
|
||||
semanage fcontext -a -t user_home_dir_t '%{_sharedstatedir}/greeter(/.*)?' >/dev/null 2>&1 || true
|
||||
restorecon -R %{_sharedstatedir}/greeter >/dev/null 2>&1 || true
|
||||
|
||||
|
||||
# Cache directory for greeter data
|
||||
semanage fcontext -a -t cache_home_t '%{_localstatedir}/cache/dms-greeter(/.*)?' >/dev/null 2>&1 || true
|
||||
restorecon -R %{_localstatedir}/cache/dms-greeter >/dev/null 2>&1 || true
|
||||
|
||||
|
||||
# Shared data directory
|
||||
semanage fcontext -a -t usr_t '%{_datadir}/quickshell/dms-greeter(/.*)?' >/dev/null 2>&1 || true
|
||||
restorecon -R %{_datadir}/quickshell/dms-greeter >/dev/null 2>&1 || true
|
||||
|
||||
|
||||
# PAM configuration
|
||||
restorecon %{_sysconfdir}/pam.d/greetd >/dev/null 2>&1 || true
|
||||
fi
|
||||
|
||||
@@ -134,4 +134,4 @@ pkill -USR1 -x dms >/dev/null 2>&1 || :
|
||||
%{_datadir}/fish/vendor_completions.d/dms.fish
|
||||
|
||||
%changelog
|
||||
{{{ git_repo_changelog }}}
|
||||
{{{ git_repo_changelog }}}
|
||||
|
||||
@@ -45,7 +45,7 @@ rm -rf "$TEMP_DIR"
|
||||
echo "Generating spec file..."
|
||||
CHANGELOG_DATE="$(date '+%a %b %d %Y')"
|
||||
|
||||
cat > ~/rpmbuild/SPECS/dms.spec <<'SPECEOF'
|
||||
cat >~/rpmbuild/SPECS/dms.spec <<'SPECEOF'
|
||||
# Spec for DMS stable releases - Built locally
|
||||
|
||||
%global debug_package %{nil}
|
||||
@@ -187,7 +187,7 @@ echo "Building SRPM..."
|
||||
cd ~/rpmbuild/SPECS
|
||||
rpmbuild -bs dms.spec
|
||||
|
||||
SRPM=$(ls ~/rpmbuild/SRPMS/dms-${VERSION}-*.src.rpm | tail -n 1)
|
||||
SRPM=$(ls ~/rpmbuild/SRPMS/dms-"${VERSION}"-*.src.rpm | tail -n 1)
|
||||
if [ ! -f "$SRPM" ]; then
|
||||
echo "Error: SRPM not found!"
|
||||
exit 1
|
||||
@@ -196,7 +196,7 @@ fi
|
||||
echo "SRPM built successfully: $SRPM"
|
||||
|
||||
# Check if copr-cli is installed
|
||||
if ! command -v copr-cli &> /dev/null; then
|
||||
if ! command -v copr-cli &>/dev/null; then
|
||||
echo ""
|
||||
echo "copr-cli is not installed. Install it with:"
|
||||
echo " pip install copr-cli"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
# Unified OBS status checker for dms packages
|
||||
# Checks all platforms (Debian, OpenSUSE) and architectures (x86_64, aarch64)
|
||||
# Only pulls logs if build failed
|
||||
@@ -35,81 +35,81 @@ cd "$OBS_BASE" || {
|
||||
|
||||
for pkg in "${PACKAGES[@]}"; do
|
||||
case "$pkg" in
|
||||
dms)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$pkg'"
|
||||
continue
|
||||
;;
|
||||
dms)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="$OBS_BASE_PROJECT:dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$pkg'"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "=========================================="
|
||||
echo "=== $pkg ==="
|
||||
echo "=========================================="
|
||||
|
||||
# Checkout if needed
|
||||
if [[ ! -d "$PROJECT/$pkg" ]]; then
|
||||
osc co "$PROJECT/$pkg" 2>&1 | tail -1
|
||||
fi
|
||||
|
||||
cd "$PROJECT/$pkg"
|
||||
|
||||
ALL_RESULTS=$(osc results 2>&1)
|
||||
|
||||
# Check each repository and architecture
|
||||
FAILED_BUILDS=()
|
||||
for repo in "${REPOS[@]}"; do
|
||||
for arch in "${ARCHES[@]}"; do
|
||||
STATUS=$(echo "$ALL_RESULTS" | grep "$repo.*$arch" | awk '{print $NF}' | head -1)
|
||||
|
||||
if [[ -n "$STATUS" ]]; then
|
||||
# Color code status
|
||||
case "$STATUS" in
|
||||
(
|
||||
|
||||
echo "=========================================="
|
||||
echo "=== $pkg ==="
|
||||
echo "=========================================="
|
||||
|
||||
# Checkout if needed
|
||||
if [[ ! -d "$PROJECT/$pkg" ]]; then
|
||||
osc co "$PROJECT/$pkg" 2>&1 | tail -1
|
||||
fi
|
||||
|
||||
cd "$PROJECT/$pkg"
|
||||
|
||||
ALL_RESULTS=$(osc results 2>&1)
|
||||
|
||||
# Check each repository and architecture
|
||||
FAILED_BUILDS=()
|
||||
for repo in "${REPOS[@]}"; do
|
||||
for arch in "${ARCHES[@]}"; do
|
||||
STATUS=$(echo "$ALL_RESULTS" | grep "$repo.*$arch" | awk '{print $NF}' | head -1)
|
||||
|
||||
if [[ -n "$STATUS" ]]; then
|
||||
# Color code status
|
||||
case "$STATUS" in
|
||||
succeeded)
|
||||
COLOR="\033[0;32m" # Green
|
||||
COLOR="\033[0;32m" # Green
|
||||
SYMBOL="✅"
|
||||
;;
|
||||
failed)
|
||||
COLOR="\033[0;31m" # Red
|
||||
COLOR="\033[0;31m" # Red
|
||||
SYMBOL="❌"
|
||||
FAILED_BUILDS+=("$repo $arch")
|
||||
;;
|
||||
unresolvable)
|
||||
COLOR="\033[0;33m" # Yellow
|
||||
COLOR="\033[0;33m" # Yellow
|
||||
SYMBOL="⚠️"
|
||||
;;
|
||||
*)
|
||||
COLOR="\033[0;37m" # White
|
||||
COLOR="\033[0;37m" # White
|
||||
SYMBOL="⏳"
|
||||
;;
|
||||
esac
|
||||
echo -e " $SYMBOL $repo $arch: ${COLOR}$STATUS\033[0m"
|
||||
fi
|
||||
esac
|
||||
echo -e " $SYMBOL $repo $arch: ${COLOR}$STATUS\033[0m"
|
||||
fi
|
||||
done
|
||||
done
|
||||
done
|
||||
|
||||
# Pull logs for failed builds
|
||||
if [[ ${#FAILED_BUILDS[@]} -gt 0 ]]; then
|
||||
echo ""
|
||||
echo " 📋 Fetching logs for failed builds..."
|
||||
for build in "${FAILED_BUILDS[@]}"; do
|
||||
read -r repo arch <<< "$build"
|
||||
|
||||
# Pull logs for failed builds
|
||||
if [[ ${#FAILED_BUILDS[@]} -gt 0 ]]; then
|
||||
echo ""
|
||||
echo " ────────────────────────────────────────────"
|
||||
echo " Build log: $repo $arch"
|
||||
echo " ────────────────────────────────────────────"
|
||||
osc remotebuildlog "$PROJECT" "$pkg" "$repo" "$arch" 2>&1 | tail -100
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
cd - > /dev/null
|
||||
echo " 📋 Fetching logs for failed builds..."
|
||||
for build in "${FAILED_BUILDS[@]}"; do
|
||||
read -r repo arch <<<"$build"
|
||||
echo ""
|
||||
echo " ────────────────────────────────────────────"
|
||||
echo " Build log: $repo $arch"
|
||||
echo " ────────────────────────────────────────────"
|
||||
osc remotebuildlog "$PROJECT" "$pkg" "$repo" "$arch" 2>&1 | tail -100
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
)
|
||||
done
|
||||
|
||||
echo "=========================================="
|
||||
echo "Status check complete!"
|
||||
|
||||
|
||||
@@ -17,21 +17,21 @@ MESSAGE=""
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
debian)
|
||||
UPLOAD_DEBIAN=true
|
||||
UPLOAD_OPENSUSE=false
|
||||
;;
|
||||
opensuse)
|
||||
UPLOAD_DEBIAN=false
|
||||
UPLOAD_OPENSUSE=true
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$PACKAGE" ]]; then
|
||||
PACKAGE="$arg"
|
||||
elif [[ -z "$MESSAGE" ]]; then
|
||||
MESSAGE="$arg"
|
||||
fi
|
||||
;;
|
||||
debian)
|
||||
UPLOAD_DEBIAN=true
|
||||
UPLOAD_OPENSUSE=false
|
||||
;;
|
||||
opensuse)
|
||||
UPLOAD_DEBIAN=false
|
||||
UPLOAD_OPENSUSE=true
|
||||
;;
|
||||
*)
|
||||
if [[ -z "$PACKAGE" ]]; then
|
||||
PACKAGE="$arg"
|
||||
elif [[ -z "$MESSAGE" ]]; then
|
||||
MESSAGE="$arg"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
@@ -46,17 +46,17 @@ if [[ -z "$PACKAGE" ]]; then
|
||||
echo " 2. dms-git - Nightly DMS"
|
||||
echo " a. all"
|
||||
echo ""
|
||||
read -p "Select package (1-${#AVAILABLE_PACKAGES[@]}, a): " selection
|
||||
|
||||
read -r -p "Select package (1-${#AVAILABLE_PACKAGES[@]}, a): " selection
|
||||
|
||||
if [[ "$selection" == "a" ]] || [[ "$selection" == "all" ]]; then
|
||||
PACKAGE="all"
|
||||
elif [[ "$selection" =~ ^[0-9]+$ ]] && [[ "$selection" -ge 1 ]] && [[ "$selection" -le ${#AVAILABLE_PACKAGES[@]} ]]; then
|
||||
PACKAGE="${AVAILABLE_PACKAGES[$((selection-1))]}"
|
||||
PACKAGE="${AVAILABLE_PACKAGES[$((selection - 1))]}"
|
||||
else
|
||||
echo "Error: Invalid selection"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
fi
|
||||
|
||||
if [[ -z "$MESSAGE" ]]; then
|
||||
@@ -107,7 +107,7 @@ if [[ "$PACKAGE" == "all" ]]; then
|
||||
echo "⚠️ Skipping $pkg (not found in distro/debian/)"
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [[ ${#FAILED[@]} -eq 0 ]]; then
|
||||
echo "✅ All packages uploaded successfully!"
|
||||
exit 0
|
||||
@@ -124,16 +124,16 @@ if [[ ! -d "distro/debian/$PACKAGE" ]]; then
|
||||
fi
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
PROJECT="dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$PACKAGE'"
|
||||
exit 1
|
||||
;;
|
||||
dms)
|
||||
PROJECT="dms"
|
||||
;;
|
||||
dms-git)
|
||||
PROJECT="dms-git"
|
||||
;;
|
||||
*)
|
||||
echo "Error: Unknown package '$PACKAGE'"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
OBS_PROJECT="${OBS_BASE_PROJECT}:${PROJECT}"
|
||||
@@ -216,8 +216,8 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]];
|
||||
# However, we need to check if we are also updating Debian, or if this script is expected to continue.
|
||||
# If this is OpenSUSE only run, we can exit.
|
||||
if [[ "$UPLOAD_DEBIAN" == false ]]; then
|
||||
echo "✅ No changes needed for OpenSUSE (not manual). Exiting."
|
||||
exit 0
|
||||
echo "✅ No changes needed for OpenSUSE (not manual). Exiting."
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@@ -235,7 +235,7 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f
|
||||
echo " - OpenSUSE-only upload: creating source tarball"
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
trap 'rm -rf $TEMP_DIR' EXIT
|
||||
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]] && grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
|
||||
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
|
||||
@@ -244,8 +244,8 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f
|
||||
if [[ -n "$GIT_URL" ]]; then
|
||||
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
|
||||
SOURCE_DIR="$TEMP_DIR/dms-git-source"
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null ||
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
cd "$SOURCE_DIR"
|
||||
if [[ -n "$GIT_REVISION" ]]; then
|
||||
git checkout "$GIT_REVISION" 2>/dev/null || true
|
||||
@@ -265,16 +265,16 @@ if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ "$UPLOAD_DEBIAN" == false ]] && [[ -f
|
||||
cd "$OBS_TARBALL_DIR"
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
;;
|
||||
dms-git)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
;;
|
||||
*)
|
||||
EXPECTED_DIR=$(basename "$SOURCE_DIR")
|
||||
;;
|
||||
dms)
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
;;
|
||||
dms-git)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
;;
|
||||
*)
|
||||
EXPECTED_DIR=$(basename "$SOURCE_DIR")
|
||||
;;
|
||||
esac
|
||||
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
@@ -295,12 +295,12 @@ fi
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; then
|
||||
# Use CHANGELOG_VERSION already set above, or get it if not set
|
||||
if [[ -z "$CHANGELOG_VERSION" ]]; then
|
||||
CHANGELOG_VERSION=$(grep -m1 "^$PACKAGE" distro/debian/$PACKAGE/debian/changelog 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "0.1.11")
|
||||
CHANGELOG_VERSION=$(grep -m1 "^$PACKAGE" distro/debian/"$PACKAGE"/debian/changelog 2>/dev/null | sed 's/.*(\([^)]*\)).*/\1/' || echo "0.1.11")
|
||||
fi
|
||||
|
||||
|
||||
# Determine source format
|
||||
SOURCE_FORMAT=$(cat "distro/debian/$PACKAGE/debian/source/format" 2>/dev/null || echo "3.0 (quilt)")
|
||||
|
||||
|
||||
# For native format, remove any Debian revision (-N) from version
|
||||
# Native format cannot have revisions, so strip them if present
|
||||
if [[ "$SOURCE_FORMAT" == *"native"* ]] && [[ "$CHANGELOG_VERSION" == *"-"* ]]; then
|
||||
@@ -308,26 +308,26 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
CHANGELOG_VERSION=$(echo "$CHANGELOG_VERSION" | sed 's/-[0-9]*$//')
|
||||
echo " Warning: Removed Debian revision from version for native format: $CHANGELOG_VERSION"
|
||||
fi
|
||||
|
||||
|
||||
if [[ "$SOURCE_FORMAT" == *"native"* ]]; then
|
||||
echo " - Native format detected: creating combined tarball"
|
||||
|
||||
VERSION="$CHANGELOG_VERSION"
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
trap 'rm -rf $TEMP_DIR' EXIT
|
||||
COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz"
|
||||
SOURCE_DIR=""
|
||||
|
||||
|
||||
if [[ -f "distro/debian/$PACKAGE/_service" ]]; then
|
||||
if grep -q "tar_scm" "distro/debian/$PACKAGE/_service"; then
|
||||
GIT_URL=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "url" | sed 's/.*<param name="url">\(.*\)<\/param>.*/\1/')
|
||||
GIT_REVISION=$(grep -A 5 'name="tar_scm"' "distro/debian/$PACKAGE/_service" | grep "revision" | sed 's/.*<param name="revision">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
|
||||
if [[ -n "$GIT_URL" ]]; then
|
||||
echo " Cloning git source from: $GIT_URL (revision: ${GIT_REVISION:-master})"
|
||||
SOURCE_DIR="$TEMP_DIR/dms-git-source"
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null || \
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
if git clone --depth 1 --branch "${GIT_REVISION:-master}" "$GIT_URL" "$SOURCE_DIR" 2>/dev/null ||
|
||||
git clone --depth 1 "$GIT_URL" "$SOURCE_DIR" 2>/dev/null; then
|
||||
cd "$SOURCE_DIR"
|
||||
if [[ -n "$GIT_REVISION" ]]; then
|
||||
git checkout "$GIT_REVISION" 2>/dev/null || true
|
||||
@@ -341,19 +341,19 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
fi
|
||||
fi
|
||||
elif grep -q "download_url" "distro/debian/$PACKAGE/_service" && [[ "$PACKAGE" != "dms-git" ]]; then
|
||||
ALL_PATHS=$(grep -A 5 '<service name="download_url">' "distro/debian/$PACKAGE/_service" | \
|
||||
grep '<param name="path">' | \
|
||||
ALL_PATHS=$(grep -A 5 '<service name="download_url">' "distro/debian/$PACKAGE/_service" |
|
||||
grep '<param name="path">' |
|
||||
sed 's/.*<param name="path">\(.*\)<\/param>.*/\1/')
|
||||
|
||||
|
||||
SOURCE_PATH=""
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "(source|archive|\.tar\.(gz|xz|bz2))" && \
|
||||
! echo "$path" | grep -qE "(distropkg|binary)"; then
|
||||
if echo "$path" | grep -qE "(source|archive|\.tar\.(gz|xz|bz2))" &&
|
||||
! echo "$path" | grep -qE "(distropkg|binary)"; then
|
||||
SOURCE_PATH="$path"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [[ -z "$SOURCE_PATH" ]]; then
|
||||
for path in $ALL_PATHS; do
|
||||
if echo "$path" | grep -qE "\.tar\.(gz|xz|bz2)$"; then
|
||||
@@ -362,12 +362,12 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
|
||||
if [[ -n "$SOURCE_PATH" ]]; then
|
||||
SOURCE_BLOCK=$(awk -v target="$SOURCE_PATH" '
|
||||
/<service name="download_url">/ { in_block=1; block="" }
|
||||
in_block { block=block"\n"$0 }
|
||||
/<\/service>/ {
|
||||
/<\/service>/ {
|
||||
if (in_block && block ~ target) {
|
||||
print block
|
||||
exit
|
||||
@@ -375,18 +375,18 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
in_block=0
|
||||
}
|
||||
' "distro/debian/$PACKAGE/_service")
|
||||
|
||||
|
||||
URL_PROTOCOL=$(echo "$SOURCE_BLOCK" | grep "protocol" | sed 's/.*<param name="protocol">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_HOST=$(echo "$SOURCE_BLOCK" | grep "host" | sed 's/.*<param name="host">\(.*\)<\/param>.*/\1/' | head -1)
|
||||
URL_PATH="$SOURCE_PATH"
|
||||
fi
|
||||
|
||||
|
||||
if [[ -n "$URL_PROTOCOL" && -n "$URL_HOST" && -n "$URL_PATH" ]]; then
|
||||
SOURCE_URL="${URL_PROTOCOL}://${URL_HOST}${URL_PATH}"
|
||||
echo " Downloading source from: $SOURCE_URL"
|
||||
|
||||
if wget -q -O "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null || \
|
||||
curl -L -f -s -o "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null; then
|
||||
|
||||
if wget -q -O "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null ||
|
||||
curl -L -f -s -o "$TEMP_DIR/source-archive" "$SOURCE_URL" 2>/dev/null; then
|
||||
cd "$TEMP_DIR"
|
||||
if [[ "$SOURCE_URL" == *.tar.xz ]]; then
|
||||
tar -xJf source-archive
|
||||
@@ -414,7 +414,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then
|
||||
echo "Error: Could not determine or obtain source for $PACKAGE"
|
||||
echo "SOURCE_DIR: $SOURCE_DIR"
|
||||
@@ -424,15 +424,15 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo " Found source directory: $SOURCE_DIR"
|
||||
|
||||
# Vendor Go dependencies for dms-git
|
||||
# Vendor Go dependencies for dms-git
|
||||
if [[ "$PACKAGE" == "dms-git" ]] && [[ -d "$SOURCE_DIR/core" ]]; then
|
||||
echo " - Vendoring Go dependencies for offline OBS build..."
|
||||
cd "$SOURCE_DIR/core"
|
||||
|
||||
if ! command -v go &> /dev/null; then
|
||||
if ! command -v go &>/dev/null; then
|
||||
echo "ERROR: Go not found. Install Go to vendor dependencies."
|
||||
echo " Install: sudo apt-get install golang-go (Debian/Ubuntu)"
|
||||
echo " or: sudo dnf install golang (Fedora)"
|
||||
@@ -454,7 +454,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
# Create OpenSUSE-compatible source tarballs BEFORE adding debian/ directory
|
||||
if [[ "$UPLOAD_OPENSUSE" == true ]] && [[ -f "distro/opensuse/$PACKAGE.spec" ]]; then
|
||||
echo " - Creating OpenSUSE-compatible source tarballs"
|
||||
|
||||
|
||||
SOURCE0=$(grep "^Source0:" "distro/opensuse/$PACKAGE.spec" | awk '{print $2}' | head -1)
|
||||
if [[ -z "$SOURCE0" && "$PACKAGE" == "dms-git" ]]; then
|
||||
SOURCE0="dms-git-source.tar.gz"
|
||||
@@ -463,68 +463,68 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
if [[ -n "$SOURCE0" ]]; then
|
||||
OBS_TARBALL_DIR=$(mktemp -d -t obs-tarball-XXXXXX)
|
||||
cd "$OBS_TARBALL_DIR"
|
||||
|
||||
|
||||
case "$PACKAGE" in
|
||||
dms)
|
||||
if [[ -n "$CHANGELOG_VERSION" ]]; then
|
||||
DMS_VERSION="$CHANGELOG_VERSION"
|
||||
else
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
fi
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
dms-git)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
*)
|
||||
DIR_NAME=$(basename "$SOURCE_DIR")
|
||||
echo " Creating $SOURCE0 (directory: $DIR_NAME)"
|
||||
cp -r "$SOURCE_DIR" "$DIR_NAME"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
fi
|
||||
rm -rf "$DIR_NAME"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
dms)
|
||||
if [[ -n "$CHANGELOG_VERSION" ]]; then
|
||||
DMS_VERSION="$CHANGELOG_VERSION"
|
||||
else
|
||||
DMS_VERSION=$(grep "^Version:" "$REPO_ROOT/distro/opensuse/$PACKAGE.spec" | sed 's/^Version:[[:space:]]*//' | head -1)
|
||||
fi
|
||||
EXPECTED_DIR="DankMaterialShell-${DMS_VERSION}"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
dms-git)
|
||||
EXPECTED_DIR="dms-git-source"
|
||||
echo " Creating $SOURCE0 (directory: $EXPECTED_DIR)"
|
||||
cp -r "$SOURCE_DIR" "$EXPECTED_DIR"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$EXPECTED_DIR"
|
||||
fi
|
||||
rm -rf "$EXPECTED_DIR"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
*)
|
||||
DIR_NAME=$(basename "$SOURCE_DIR")
|
||||
echo " Creating $SOURCE0 (directory: $DIR_NAME)"
|
||||
cp -r "$SOURCE_DIR" "$DIR_NAME"
|
||||
if [[ "$SOURCE0" == *.tar.xz ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cJf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
elif [[ "$SOURCE0" == *.tar.bz2 ]]; then
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -cjf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
else
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$SOURCE0" "$DIR_NAME"
|
||||
fi
|
||||
rm -rf "$DIR_NAME"
|
||||
echo " Created $SOURCE0 ($(stat -c%s "$WORK_DIR/$SOURCE0" 2>/dev/null || echo 0) bytes)"
|
||||
;;
|
||||
esac
|
||||
cd "$REPO_ROOT"
|
||||
rm -rf "$OBS_TARBALL_DIR"
|
||||
echo " - OpenSUSE source tarballs created"
|
||||
fi
|
||||
|
||||
|
||||
cp "distro/opensuse/$PACKAGE.spec" "$WORK_DIR/"
|
||||
fi
|
||||
|
||||
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
echo " Copying debian/ directory into source"
|
||||
cp -r "distro/debian/$PACKAGE/debian" "$SOURCE_DIR/"
|
||||
|
||||
|
||||
# For dms, rename directory to match what debian/rules expects
|
||||
# debian/rules uses UPSTREAM_VERSION which is the full version from changelog
|
||||
if [[ "$PACKAGE" == "dms" ]]; then
|
||||
@@ -542,15 +542,15 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
rm -f "$WORK_DIR/$COMBINED_TARBALL"
|
||||
|
||||
|
||||
echo " Creating combined tarball: $COMBINED_TARBALL"
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
TARBALL_BASE=$(basename "$SOURCE_DIR")
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
|
||||
if [[ "$PACKAGE" == "dms" ]]; then
|
||||
TARBALL_DIR=$(tar -tzf "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null | head -1 | cut -d'/' -f1)
|
||||
EXPECTED_TARBALL_DIR="DankMaterialShell-${VERSION}"
|
||||
@@ -563,10 +563,10 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null)
|
||||
TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1)
|
||||
|
||||
|
||||
# Extract Build-Depends from debian/control using awk for proper multi-line parsing
|
||||
if [[ -f "$REPO_ROOT/distro/debian/$PACKAGE/debian/control" ]]; then
|
||||
BUILD_DEPS=$(awk '
|
||||
@@ -591,8 +591,8 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ -d "distro/debian/$PACKAGE/debian" ]]; t
|
||||
else
|
||||
BUILD_DEPS="debhelper-compat (= 13)"
|
||||
fi
|
||||
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
|
||||
cat >"$WORK_DIR/$PACKAGE.dsc" <<EOF
|
||||
Format: 3.0 (native)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
@@ -603,7 +603,7 @@ Build-Depends: $BUILD_DEPS
|
||||
Files:
|
||||
$TARBALL_MD5 $TARBALL_SIZE $COMBINED_TARBALL
|
||||
EOF
|
||||
|
||||
|
||||
echo " - Generated $PACKAGE.dsc for native format"
|
||||
fi
|
||||
else
|
||||
@@ -613,12 +613,12 @@ EOF
|
||||
else
|
||||
VERSION="${CHANGELOG_VERSION}-1"
|
||||
fi
|
||||
|
||||
|
||||
echo " - Quilt format detected: creating debian.tar.gz"
|
||||
tar -czf "$WORK_DIR/debian.tar.gz" -C "distro/debian/$PACKAGE" debian/
|
||||
|
||||
|
||||
echo " - Generating $PACKAGE.dsc for quilt format"
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
cat >"$WORK_DIR/$PACKAGE.dsc" <<EOF
|
||||
Format: 3.0 (quilt)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
@@ -671,7 +671,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
|
||||
if [[ -n "$OLD_DSC_VERSION" ]] && [[ "$OLD_DSC_BASE" == "$CHANGELOG_BASE" ]]; then
|
||||
if [[ "$IS_MANUAL" == true ]]; then
|
||||
echo "==> Detected rebuild of same base version $CHANGELOG_BASE, incrementing version"
|
||||
|
||||
|
||||
# If REBUILD_RELEASE is set, use that number directly
|
||||
if [[ -n "${REBUILD_RELEASE:-}" ]]; then
|
||||
if [[ "$CHANGELOG_VERSION" =~ ^([0-9.]+)\+git([0-9]+)(\.[a-f0-9]+)?$ ]]; then
|
||||
@@ -739,18 +739,18 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
|
||||
echo " Warning: Could not parse version format, appending ppa1: $CHANGELOG_VERSION -> $NEW_VERSION"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if [[ -z "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR" ]] || [[ ! -d "$SOURCE_DIR/debian" ]]; then
|
||||
echo " Error: Source directory with debian/ not found for version increment"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
SOURCE_CHANGELOG="$SOURCE_DIR/debian/changelog"
|
||||
if [[ ! -f "$SOURCE_CHANGELOG" ]]; then
|
||||
echo " Error: Changelog not found in source directory: $SOURCE_CHANGELOG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
REPO_CHANGELOG="$REPO_ROOT/distro/debian/$PACKAGE/debian/changelog"
|
||||
TEMP_CHANGELOG=$(mktemp)
|
||||
{
|
||||
@@ -763,24 +763,24 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
|
||||
if [[ -f "$REPO_CHANGELOG" ]]; then
|
||||
OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1)
|
||||
if [[ -n "$OLD_ENTRY_START" ]]; then
|
||||
tail -n +$OLD_ENTRY_START "$REPO_CHANGELOG"
|
||||
tail -n +"$OLD_ENTRY_START" "$REPO_CHANGELOG"
|
||||
fi
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
} >"$TEMP_CHANGELOG"
|
||||
cp "$TEMP_CHANGELOG" "$SOURCE_CHANGELOG"
|
||||
rm -f "$TEMP_CHANGELOG"
|
||||
|
||||
|
||||
CHANGELOG_VERSION="$NEW_VERSION"
|
||||
VERSION="$NEW_VERSION"
|
||||
COMBINED_TARBALL="${PACKAGE}_${VERSION}.tar.gz"
|
||||
|
||||
|
||||
for old_tarball in "${PACKAGE}"_*.tar.gz; do
|
||||
if [[ -f "$old_tarball" ]] && [[ "$old_tarball" != "${PACKAGE}_${NEW_VERSION}.tar.gz" ]]; then
|
||||
echo " Removing old tarball from OBS: $old_tarball"
|
||||
osc rm -f "$old_tarball" 2>/dev/null || rm -f "$old_tarball"
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [[ "$PACKAGE" == "dms" ]] && [[ -f "$WORK_DIR/dms-source.tar.gz" ]]; then
|
||||
echo " Recreating dms-source.tar.gz with new directory name for incremented version"
|
||||
EXPECTED_SOURCE_DIR="DankMaterialShell-${NEW_VERSION}"
|
||||
@@ -810,7 +810,7 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
|
||||
cd "$REPO_ROOT"
|
||||
rm -rf "$TEMP_SOURCE_DIR"
|
||||
fi
|
||||
|
||||
|
||||
echo " Recreating tarball with new version: $COMBINED_TARBALL"
|
||||
if [[ -n "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR" ]] && [[ -d "$SOURCE_DIR/debian" ]]; then
|
||||
if [[ "$PACKAGE" == "dms" ]]; then
|
||||
@@ -848,10 +848,10 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
|
||||
if [[ -f "$REPO_CHANGELOG" ]]; then
|
||||
OLD_ENTRY_START=$(grep -n "^$PACKAGE (" "$REPO_CHANGELOG" | sed -n '2p' | cut -d: -f1)
|
||||
if [[ -n "$OLD_ENTRY_START" ]]; then
|
||||
tail -n +$OLD_ENTRY_START "$REPO_CHANGELOG"
|
||||
tail -n +"$OLD_ENTRY_START" "$REPO_CHANGELOG"
|
||||
fi
|
||||
fi
|
||||
} > "$TEMP_CHANGELOG"
|
||||
} >"$TEMP_CHANGELOG"
|
||||
cp "$TEMP_CHANGELOG" "$EXPECTED_DIR/debian/changelog"
|
||||
rm -f "$TEMP_CHANGELOG"
|
||||
fi
|
||||
@@ -867,19 +867,19 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
rm -f "$WORK_DIR/$COMBINED_TARBALL"
|
||||
|
||||
|
||||
echo " Creating combined tarball: $COMBINED_TARBALL"
|
||||
cd "$(dirname "$SOURCE_DIR")"
|
||||
TARBALL_BASE=$(basename "$SOURCE_DIR")
|
||||
tar --sort=name --mtime='2000-01-01 00:00:00' --owner=0 --group=0 -czf "$WORK_DIR/$COMBINED_TARBALL" "$TARBALL_BASE"
|
||||
cd "$REPO_ROOT"
|
||||
fi
|
||||
|
||||
|
||||
TARBALL_SIZE=$(stat -c%s "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null || stat -f%z "$WORK_DIR/$COMBINED_TARBALL" 2>/dev/null)
|
||||
TARBALL_MD5=$(md5sum "$WORK_DIR/$COMBINED_TARBALL" | cut -d' ' -f1)
|
||||
|
||||
|
||||
# Extract Build-Depends from debian/control using awk for proper multi-line parsing
|
||||
if [[ -f "$REPO_ROOT/distro/debian/$PACKAGE/debian/control" ]]; then
|
||||
BUILD_DEPS=$(awk '
|
||||
@@ -904,8 +904,8 @@ if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$SOURCE_FORMAT" == *"native"* ]] && [[
|
||||
else
|
||||
BUILD_DEPS="debhelper-compat (= 13)"
|
||||
fi
|
||||
|
||||
cat > "$WORK_DIR/$PACKAGE.dsc" << EOF
|
||||
|
||||
cat >"$WORK_DIR/$PACKAGE.dsc" <<EOF
|
||||
Format: 3.0 (native)
|
||||
Source: $PACKAGE
|
||||
Binary: $PACKAGE
|
||||
@@ -954,11 +954,11 @@ ls -la 2>&1 | head -20
|
||||
echo "==> Staging changes"
|
||||
echo "Files to upload:"
|
||||
if [[ "$UPLOAD_DEBIAN" == true ]] && [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
ls -lh *.tar.gz *.tar.xz *.tar *.spec *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
ls -lh ./*.tar.gz ./*.tar.xz ./*.tar ./*.spec ./*.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
elif [[ "$UPLOAD_DEBIAN" == true ]]; then
|
||||
ls -lh *.tar.gz *.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
ls -lh ./*.tar.gz ./*.dsc _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
elif [[ "$UPLOAD_OPENSUSE" == true ]]; then
|
||||
ls -lh *.tar.gz *.tar.xz *.tar *.spec _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
ls -lh ./*.tar.gz ./*.tar.xz ./*.tar ./*.spec _service 2>/dev/null | awk '{print " " $9 " (" $5 ")"}'
|
||||
fi
|
||||
echo ""
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/usr/bin/env bash
|
||||
# Generic source package builder for DMS PPA packages
|
||||
# Usage: ./create-source.sh <package-dir> [ubuntu-series]
|
||||
#
|
||||
@@ -54,7 +54,7 @@ PACKAGE_PARENT=$(dirname "$PACKAGE_DIR")
|
||||
|
||||
# Create temporary working directory (like OBS)
|
||||
TEMP_WORK_DIR=$(mktemp -d -t ppa_build_work_XXXXXX)
|
||||
trap "rm -rf '$TEMP_WORK_DIR'" EXIT
|
||||
trap 'rm -rf "$TEMP_WORK_DIR"' EXIT
|
||||
|
||||
info "Building source package for: $PACKAGE_NAME"
|
||||
info "Package directory: $PACKAGE_DIR"
|
||||
@@ -79,7 +79,7 @@ done
|
||||
|
||||
# Verify GPG key is set up
|
||||
info "Checking GPG key setup..."
|
||||
if ! gpg --list-secret-keys &> /dev/null; then
|
||||
if ! gpg --list-secret-keys &>/dev/null; then
|
||||
error "No GPG secret keys found. Please set up GPG first!"
|
||||
error "See GPG_SETUP.md for instructions"
|
||||
exit 1
|
||||
@@ -88,7 +88,7 @@ fi
|
||||
success "GPG key found"
|
||||
|
||||
# Check if debuild is installed
|
||||
if ! command -v debuild &> /dev/null; then
|
||||
if ! command -v debuild &>/dev/null; then
|
||||
error "debuild not found. Install devscripts:"
|
||||
error " sudo dnf install devscripts"
|
||||
exit 1
|
||||
@@ -137,7 +137,7 @@ cd "$WORK_PACKAGE_DIR"
|
||||
get_latest_tag() {
|
||||
local repo="$1"
|
||||
# Try GitHub API first (faster)
|
||||
if command -v curl &> /dev/null; then
|
||||
if command -v curl &>/dev/null; then
|
||||
LATEST_TAG=$(curl -s "https://api.github.com/repos/$repo/releases/latest" 2>/dev/null | grep '"tag_name":' | sed 's/.*"tag_name": "\(.*\)".*/\1/' | head -1)
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
echo "$LATEST_TAG" | sed 's/^v//'
|
||||
@@ -175,69 +175,69 @@ fi
|
||||
|
||||
# Special handling for known packages
|
||||
case "$PACKAGE_NAME" in
|
||||
dms-git)
|
||||
IS_GIT_PACKAGE=true
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
SOURCE_DIR="dms-git-repo"
|
||||
;;
|
||||
dms)
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
info "Downloading pre-built binaries and source for dms..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
dms-git)
|
||||
IS_GIT_PACKAGE=true
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
SOURCE_DIR="dms-git-repo"
|
||||
;;
|
||||
dms)
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
info "Downloading pre-built binaries and source for dms..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
|
||||
# Download amd64 binary (will be included in source package)
|
||||
if [ ! -f "dms-distropkg-amd64.gz" ]; then
|
||||
info "Downloading dms binary for amd64..."
|
||||
if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-distropkg-amd64.gz"; then
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dms-distropkg-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
# Download amd64 binary (will be included in source package)
|
||||
if [ ! -f "dms-distropkg-amd64.gz" ]; then
|
||||
info "Downloading dms binary for amd64..."
|
||||
if wget -O dms-distropkg-amd64.gz "https://github.com/AvengeMedia/DankMaterialShell/releases/download/v${VERSION}/dms-distropkg-amd64.gz"; then
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dms-distropkg-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Download source tarball for QML files
|
||||
if [ ! -f "dms-source.tar.gz" ]; then
|
||||
info "Downloading dms source for QML files..."
|
||||
if wget -O dms-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
|
||||
success "source tarball downloaded"
|
||||
else
|
||||
error "Failed to download dms-source.tar.gz"
|
||||
exit 1
|
||||
fi
|
||||
# Download source tarball for QML files
|
||||
if [ ! -f "dms-source.tar.gz" ]; then
|
||||
info "Downloading dms source for QML files..."
|
||||
if wget -O dms-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
|
||||
success "source tarball downloaded"
|
||||
else
|
||||
error "Failed to download dms-source.tar.gz"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
dms-greeter)
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
info "Downloading source for dms-greeter..."
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
fi
|
||||
;;
|
||||
dms-greeter)
|
||||
GIT_REPO="AvengeMedia/DankMaterialShell"
|
||||
info "Downloading source for dms-greeter..."
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
|
||||
if [ ! -f "dms-greeter-source.tar.gz" ]; then
|
||||
info "Downloading dms-greeter source..."
|
||||
if wget -O dms-greeter-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
|
||||
success "source tarball downloaded"
|
||||
else
|
||||
error "Failed to download dms-greeter-source.tar.gz"
|
||||
exit 1
|
||||
fi
|
||||
if [ ! -f "dms-greeter-source.tar.gz" ]; then
|
||||
info "Downloading dms-greeter source..."
|
||||
if wget -O dms-greeter-source.tar.gz "https://github.com/AvengeMedia/DankMaterialShell/archive/refs/tags/v${VERSION}.tar.gz"; then
|
||||
success "source tarball downloaded"
|
||||
else
|
||||
error "Failed to download dms-greeter-source.tar.gz"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
danksearch)
|
||||
# danksearch uses pre-built binary from releases
|
||||
GIT_REPO="AvengeMedia/danksearch"
|
||||
;;
|
||||
dgop)
|
||||
# dgop uses pre-built binary from releases
|
||||
GIT_REPO="AvengeMedia/dgop"
|
||||
;;
|
||||
fi
|
||||
;;
|
||||
danksearch)
|
||||
# danksearch uses pre-built binary from releases
|
||||
GIT_REPO="AvengeMedia/danksearch"
|
||||
;;
|
||||
dgop)
|
||||
# dgop uses pre-built binary from releases
|
||||
GIT_REPO="AvengeMedia/dgop"
|
||||
;;
|
||||
esac
|
||||
|
||||
# Handle git packages
|
||||
if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
info "Detected git package: $PACKAGE_NAME"
|
||||
|
||||
|
||||
# Determine source directory name
|
||||
if [ -z "$SOURCE_DIR" ]; then
|
||||
# Default: use package name without -git suffix + -source or -repo
|
||||
@@ -252,7 +252,7 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
SOURCE_DIR="${BASE_NAME}-source"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Always clone fresh source to get latest commit info
|
||||
info "Cloning $GIT_REPO from GitHub (getting latest commit info)..."
|
||||
TEMP_CLONE=$(mktemp -d)
|
||||
@@ -260,7 +260,7 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
# Get git commit info from fresh clone
|
||||
GIT_COMMIT_HASH=$(cd "$TEMP_CLONE" && git rev-parse --short HEAD)
|
||||
GIT_COMMIT_COUNT=$(cd "$TEMP_CLONE" && git rev-list --count HEAD)
|
||||
|
||||
|
||||
# Get upstream version from latest git tag (e.g., 0.2.1)
|
||||
# Sort all tags by version and get the latest one (not just the one reachable from HEAD)
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git tag -l "v*" | sed 's/^v//' | sort -V | tail -1)
|
||||
@@ -272,36 +272,36 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
# Last resort: use git describe
|
||||
UPSTREAM_VERSION=$(cd "$TEMP_CLONE" && git describe --tags --abbrev=0 2>/dev/null | sed 's/^v//' || echo "0.0.1")
|
||||
fi
|
||||
|
||||
|
||||
# Verify we got valid commit info
|
||||
if [ -z "$GIT_COMMIT_COUNT" ] || [ "$GIT_COMMIT_COUNT" = "0" ]; then
|
||||
error "Failed to get commit count from $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if [ -z "$GIT_COMMIT_HASH" ]; then
|
||||
error "Failed to get commit hash from $GIT_REPO"
|
||||
rm -rf "$TEMP_CLONE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
success "Got commit info: $GIT_COMMIT_COUNT ($GIT_COMMIT_HASH), upstream: $UPSTREAM_VERSION"
|
||||
|
||||
|
||||
# Update changelog with git commit info
|
||||
info "Updating changelog with git commit info..."
|
||||
# Format: 0.2.1+git705.fdbb86appa1
|
||||
# Check if we're rebuilding the same commit (increment PPA number if so)
|
||||
BASE_VERSION="${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}"
|
||||
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
|
||||
|
||||
|
||||
# Use REBUILD_RELEASE if provided, otherwise auto-increment
|
||||
if [[ -n "${REBUILD_RELEASE:-}" ]]; then
|
||||
PPA_NUM=$REBUILD_RELEASE
|
||||
info "Using REBUILD_RELEASE=$REBUILD_RELEASE for PPA number"
|
||||
else
|
||||
PPA_NUM=1
|
||||
|
||||
|
||||
# If current version matches the base version, increment PPA number
|
||||
# Escape special regex characters in BASE_VERSION for pattern matching
|
||||
ESCAPED_BASE=$(echo "$BASE_VERSION" | sed 's/\./\\./g' | sed 's/+/\\+/g')
|
||||
@@ -318,36 +318,36 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
info "New commit or first build, using PPA number $PPA_NUM"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
NEW_VERSION="${BASE_VERSION}ppa${PPA_NUM}"
|
||||
|
||||
|
||||
# Use sed to update changelog (non-interactive, faster)
|
||||
# Get current changelog content - find the next package header line (starts with package name)
|
||||
# Skip the first entry entirely by finding the second occurrence of the package name at start of line
|
||||
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
|
||||
if [ -n "$OLD_ENTRY_START" ]; then
|
||||
# Found second entry, use everything from there
|
||||
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog)
|
||||
CHANGELOG_CONTENT=$(tail -n +"$OLD_ENTRY_START" debian/changelog)
|
||||
else
|
||||
# No second entry found, changelog will only have new entry
|
||||
CHANGELOG_CONTENT=""
|
||||
fi
|
||||
|
||||
|
||||
# Create new changelog entry with proper format
|
||||
CHANGELOG_ENTRY="${SOURCE_NAME} (${NEW_VERSION}) ${UBUNTU_SERIES}; urgency=medium
|
||||
|
||||
* Git snapshot (commit ${GIT_COMMIT_COUNT}: ${GIT_COMMIT_HASH})
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
|
||||
|
||||
# Write new changelog (new entry, blank line, then old entries)
|
||||
echo "$CHANGELOG_ENTRY" > debian/changelog
|
||||
echo "$CHANGELOG_ENTRY" >debian/changelog
|
||||
if [ -n "$CHANGELOG_CONTENT" ]; then
|
||||
echo "" >> debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >> debian/changelog
|
||||
echo "" >>debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >>debian/changelog
|
||||
fi
|
||||
success "Version updated to $NEW_VERSION"
|
||||
|
||||
|
||||
# Now clone to source directory (without .git for inclusion in package)
|
||||
rm -rf "$SOURCE_DIR"
|
||||
cp -r "$TEMP_CLONE" "$SOURCE_DIR"
|
||||
@@ -355,8 +355,8 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
# Save version info for dms-git build process
|
||||
if [ "$PACKAGE_NAME" = "dms-git" ]; then
|
||||
info "Saving version info to .dms-version for build process..."
|
||||
echo "VERSION=${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}" > "$SOURCE_DIR/.dms-version"
|
||||
echo "COMMIT=${GIT_COMMIT_HASH}" >> "$SOURCE_DIR/.dms-version"
|
||||
echo "VERSION=${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}" >"$SOURCE_DIR/.dms-version"
|
||||
echo "COMMIT=${GIT_COMMIT_HASH}" >>"$SOURCE_DIR/.dms-version"
|
||||
success "Version info saved: ${UPSTREAM_VERSION}+git${GIT_COMMIT_COUNT}.${GIT_COMMIT_HASH}"
|
||||
|
||||
# Vendor Go dependencies (Launchpad has no internet access)
|
||||
@@ -397,7 +397,7 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
/^\[source\.crates-io\]/ { printing=1 }
|
||||
printing { print }
|
||||
/^directory = "vendor"$/ { exit }
|
||||
' > .cargo/config.toml
|
||||
' >.cargo/config.toml
|
||||
|
||||
# Verify vendor directory was created
|
||||
if [ ! -d "vendor" ]; then
|
||||
@@ -428,7 +428,6 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
success "Source prepared for packaging"
|
||||
else
|
||||
error "Failed to clone $GIT_REPO"
|
||||
@@ -439,15 +438,15 @@ if [ "$IS_GIT_PACKAGE" = true ] && [ -n "$GIT_REPO" ]; then
|
||||
elif [ -n "$GIT_REPO" ]; then
|
||||
info "Detected stable package: $PACKAGE_NAME"
|
||||
info "Fetching latest tag from $GIT_REPO..."
|
||||
|
||||
|
||||
LATEST_TAG=$(get_latest_tag "$GIT_REPO")
|
||||
if [ -n "$LATEST_TAG" ]; then
|
||||
# Check source format - native packages can't use dashes
|
||||
SOURCE_FORMAT=$(cat debian/source/format 2>/dev/null | head -1 || echo "3.0 (quilt)")
|
||||
SOURCE_FORMAT=$(head -1 debian/source/format 2>/dev/null || echo "3.0 (quilt)")
|
||||
|
||||
# Get current version to check if we need to increment PPA number
|
||||
CURRENT_VERSION=$(dpkg-parsechangelog -S Version 2>/dev/null || echo "")
|
||||
|
||||
|
||||
# Use REBUILD_RELEASE if provided, otherwise auto-increment
|
||||
if [[ -n "${REBUILD_RELEASE:-}" ]]; then
|
||||
PPA_NUM=$REBUILD_RELEASE
|
||||
@@ -498,11 +497,11 @@ elif [ -n "$GIT_REPO" ]; then
|
||||
# Get current changelog content - find the next package header line
|
||||
OLD_ENTRY_START=$(grep -n "^${SOURCE_NAME} (" debian/changelog | sed -n '2p' | cut -d: -f1)
|
||||
if [ -n "$OLD_ENTRY_START" ]; then
|
||||
CHANGELOG_CONTENT=$(tail -n +$OLD_ENTRY_START debian/changelog)
|
||||
CHANGELOG_CONTENT=$(tail -n +"$OLD_ENTRY_START" debian/changelog)
|
||||
else
|
||||
CHANGELOG_CONTENT=""
|
||||
fi
|
||||
|
||||
|
||||
# Create appropriate changelog message
|
||||
if [ "$PPA_NUM" -gt 1 ]; then
|
||||
CHANGELOG_MSG="Rebuild for packaging fixes (ppa${PPA_NUM})"
|
||||
@@ -515,10 +514,10 @@ elif [ -n "$GIT_REPO" ]; then
|
||||
* ${CHANGELOG_MSG}
|
||||
|
||||
-- Avenge Media <AvengeMedia.US@gmail.com> $(date -R)"
|
||||
echo "$CHANGELOG_ENTRY" > debian/changelog
|
||||
echo "$CHANGELOG_ENTRY" >debian/changelog
|
||||
if [ -n "$CHANGELOG_CONTENT" ]; then
|
||||
echo "" >> debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >> debian/changelog
|
||||
echo "" >>debian/changelog
|
||||
echo "$CHANGELOG_CONTENT" >>debian/changelog
|
||||
fi
|
||||
success "Version updated to $NEW_VERSION"
|
||||
else
|
||||
@@ -532,47 +531,47 @@ fi
|
||||
# Handle packages that need pre-built binaries downloaded
|
||||
cd "$PACKAGE_DIR"
|
||||
case "$PACKAGE_NAME" in
|
||||
danksearch)
|
||||
info "Downloading pre-built binaries for danksearch..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
danksearch)
|
||||
info "Downloading pre-built binaries for danksearch..."
|
||||
# Get version from changelog (remove ppa suffix for both quilt and native formats)
|
||||
# Native: 0.5.2ppa1 -> 0.5.2, Quilt: 0.5.2-1ppa1 -> 0.5.2
|
||||
VERSION=$(dpkg-parsechangelog -S Version | sed 's/-[^-]*$//' | sed 's/ppa[0-9]*$//')
|
||||
|
||||
# Download both amd64 and arm64 binaries (will be included in source package)
|
||||
# Launchpad can't download during build, so we include both architectures
|
||||
if [ ! -f "dsearch-amd64" ]; then
|
||||
info "Downloading dsearch binary for amd64..."
|
||||
if wget -O dsearch-amd64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-amd64.gz"; then
|
||||
gunzip dsearch-amd64.gz
|
||||
chmod +x dsearch-amd64
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
# Download both amd64 and arm64 binaries (will be included in source package)
|
||||
# Launchpad can't download during build, so we include both architectures
|
||||
if [ ! -f "dsearch-amd64" ]; then
|
||||
info "Downloading dsearch binary for amd64..."
|
||||
if wget -O dsearch-amd64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-amd64.gz"; then
|
||||
gunzip dsearch-amd64.gz
|
||||
chmod +x dsearch-amd64
|
||||
success "amd64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-amd64.gz"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ ! -f "dsearch-arm64" ]; then
|
||||
info "Downloading dsearch binary for arm64..."
|
||||
if wget -O dsearch-arm64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-arm64.gz"; then
|
||||
gunzip dsearch-arm64.gz
|
||||
chmod +x dsearch-arm64
|
||||
success "arm64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-arm64.gz"
|
||||
exit 1
|
||||
fi
|
||||
if [ ! -f "dsearch-arm64" ]; then
|
||||
info "Downloading dsearch binary for arm64..."
|
||||
if wget -O dsearch-arm64.gz "https://github.com/AvengeMedia/danksearch/releases/download/v${VERSION}/dsearch-linux-arm64.gz"; then
|
||||
gunzip dsearch-arm64.gz
|
||||
chmod +x dsearch-arm64
|
||||
success "arm64 binary downloaded"
|
||||
else
|
||||
error "Failed to download dsearch-arm64.gz"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
dgop)
|
||||
# dgop binary should already be committed in the repo
|
||||
if [ ! -f "dgop" ]; then
|
||||
warn "dgop binary not found - should be committed to repo"
|
||||
fi
|
||||
;;
|
||||
fi
|
||||
;;
|
||||
dgop)
|
||||
# dgop binary should already be committed in the repo
|
||||
if [ ! -f "dgop" ]; then
|
||||
warn "dgop binary not found - should be committed to repo"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
cd - > /dev/null
|
||||
cd - >/dev/null
|
||||
|
||||
# Check if this version already exists on PPA (only in CI environment)
|
||||
if command -v rmadison >/dev/null 2>&1; then
|
||||
@@ -586,10 +585,10 @@ if command -v rmadison >/dev/null 2>&1; then
|
||||
cd "$PACKAGE_DIR"
|
||||
# Still clean up extracted sources
|
||||
case "$PACKAGE_NAME" in
|
||||
dms-git)
|
||||
rm -rf DankMaterialShell-*
|
||||
success "Cleaned up DankMaterialShell-*/ directory"
|
||||
;;
|
||||
dms-git)
|
||||
rm -rf DankMaterialShell-*
|
||||
success "Cleaned up DankMaterialShell-*/ directory"
|
||||
;;
|
||||
esac
|
||||
exit 0
|
||||
fi
|
||||
@@ -621,11 +620,11 @@ if yes | DEBIAN_FRONTEND=noninteractive debuild -S $DEBUILD_SOURCE_FLAG -d; then
|
||||
|
||||
# Copy build artifacts back to parent directory
|
||||
info "Copying build artifacts to $PACKAGE_PARENT..."
|
||||
cp -v "$TEMP_WORK_DIR"/${SOURCE_NAME}_${CHANGELOG_VERSION}* "$PACKAGE_PARENT/" 2>/dev/null || true
|
||||
cp -v "$TEMP_WORK_DIR"/"${SOURCE_NAME}"_"${CHANGELOG_VERSION}"* "$PACKAGE_PARENT/" 2>/dev/null || true
|
||||
|
||||
# List generated files
|
||||
info "Generated files in $PACKAGE_PARENT:"
|
||||
ls -lh "$PACKAGE_PARENT"/${SOURCE_NAME}_${CHANGELOG_VERSION}* 2>/dev/null || true
|
||||
ls -lh "$PACKAGE_PARENT"/"${SOURCE_NAME}"_"${CHANGELOG_VERSION}"* 2>/dev/null || true
|
||||
|
||||
# Show what to do next
|
||||
echo
|
||||
|
||||
@@ -12,7 +12,7 @@ RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
@@ -58,23 +58,18 @@ CHANGES_FILE=$(realpath "$CHANGES_FILE")
|
||||
info "Uploading to PPA: ppa:avengemedia/$PPA_NAME"
|
||||
info "Changes file: $CHANGES_FILE"
|
||||
|
||||
# Check if dput or lftp is installed
|
||||
UPLOAD_METHOD=""
|
||||
if command -v dput &> /dev/null; then
|
||||
UPLOAD_METHOD="dput"
|
||||
elif command -v lftp &> /dev/null; then
|
||||
UPLOAD_METHOD="lftp"
|
||||
warn "dput not found, using lftp as fallback"
|
||||
# Check if dput is installed
|
||||
if command -v dput &>/dev/null; then
|
||||
info "dput found"
|
||||
else
|
||||
error "Neither dput nor lftp found. Install one with:"
|
||||
error " sudo dnf install dput-ng # Preferred but broken on Fedora"
|
||||
error " sudo dnf install lftp # Alternative upload method"
|
||||
error "dput not found. Install with:"
|
||||
error " sudo dnf install dput-ng"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if ~/.dput.cf exists
|
||||
if [ ! -f "$HOME/.dput.cf" ]; then
|
||||
error "~/.dput.cf not found!"
|
||||
error "$HOME/.dput.cf not found!"
|
||||
echo
|
||||
info "Create it from template:"
|
||||
echo " cp $(dirname "$0")/../dput.cf.template ~/.dput.cf"
|
||||
@@ -160,7 +155,7 @@ elif [ "$UPLOAD_METHOD" = "lftp" ]; then
|
||||
# Use lftp to upload to Launchpad PPA
|
||||
CHANGES_DIR=$(dirname "$CHANGES_FILE")
|
||||
CHANGES_BASENAME=$(basename "$CHANGES_FILE")
|
||||
|
||||
|
||||
# Extract files to upload from .changes file
|
||||
FILES_TO_UPLOAD=("$CHANGES_BASENAME")
|
||||
while IFS= read -r line; do
|
||||
@@ -168,14 +163,14 @@ elif [ "$UPLOAD_METHOD" = "lftp" ]; then
|
||||
FILES_TO_UPLOAD+=("${BASH_REMATCH[1]}")
|
||||
fi
|
||||
done < "$CHANGES_FILE"
|
||||
|
||||
|
||||
# Build lftp command to upload all files
|
||||
LFTP_COMMANDS="set ftp:ssl-allow no; open ftp://ppa.launchpad.net; user anonymous ''; cd ~avengemedia/ubuntu/$PPA_NAME/;"
|
||||
for file in "${FILES_TO_UPLOAD[@]}"; do
|
||||
LFTP_COMMANDS="$LFTP_COMMANDS put '$CHANGES_DIR/$file';"
|
||||
done
|
||||
LFTP_COMMANDS="$LFTP_COMMANDS bye"
|
||||
|
||||
|
||||
if echo "$LFTP_COMMANDS" | lftp; then
|
||||
UPLOAD_SUCCESS=true
|
||||
fi
|
||||
|
||||
@@ -12,7 +12,7 @@ RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
NC='\033[0m'
|
||||
|
||||
info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||
@@ -98,24 +98,20 @@ info "Step 2: Uploading to PPA..."
|
||||
# Check if using lftp (for all PPAs) or dput
|
||||
if [ "$PPA_NAME" = "danklinux" ] || [ "$PPA_NAME" = "dms" ] || [ "$PPA_NAME" = "dms-git" ]; then
|
||||
warn "Using lftp for upload"
|
||||
|
||||
# Extract version from changes file
|
||||
VERSION=$(grep "^Version:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
SOURCE_NAME=$(grep "^Source:" "$CHANGES_FILE" | awk '{print $2}')
|
||||
|
||||
|
||||
# Find all files to upload
|
||||
BUILD_DIR=$(dirname "$CHANGES_FILE")
|
||||
CHANGES_BASENAME=$(basename "$CHANGES_FILE")
|
||||
DSC_FILE="${CHANGES_BASENAME/_source.changes/.dsc}"
|
||||
TARBALL="${CHANGES_BASENAME/_source.changes/.tar.xz}"
|
||||
BUILDINFO="${CHANGES_BASENAME/_source.changes/_source.buildinfo}"
|
||||
|
||||
|
||||
# Check all files exist
|
||||
MISSING_FILES=()
|
||||
[ ! -f "$BUILD_DIR/$DSC_FILE" ] && MISSING_FILES+=("$DSC_FILE")
|
||||
[ ! -f "$BUILD_DIR/$TARBALL" ] && MISSING_FILES+=("$TARBALL")
|
||||
[ ! -f "$BUILD_DIR/$BUILDINFO" ] && MISSING_FILES+=("$BUILDINFO")
|
||||
|
||||
|
||||
if [ ${#MISSING_FILES[@]} -gt 0 ]; then
|
||||
error "Missing required files:"
|
||||
for file in "${MISSING_FILES[@]}"; do
|
||||
@@ -123,17 +119,17 @@ if [ "$PPA_NAME" = "danklinux" ] || [ "$PPA_NAME" = "dms" ] || [ "$PPA_NAME" = "
|
||||
done
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
info "Uploading files:"
|
||||
info " - $CHANGES_BASENAME"
|
||||
info " - $DSC_FILE"
|
||||
info " - $TARBALL"
|
||||
info " - $BUILDINFO"
|
||||
echo
|
||||
|
||||
|
||||
# lftp build dir change
|
||||
LFTP_SCRIPT=$(mktemp)
|
||||
cat > "$LFTP_SCRIPT" <<EOF
|
||||
cat >"$LFTP_SCRIPT" <<EOF
|
||||
cd ~avengemedia/ubuntu/$PPA_NAME/
|
||||
lcd $BUILD_DIR
|
||||
mput $CHANGES_BASENAME
|
||||
@@ -142,8 +138,8 @@ mput $TARBALL
|
||||
mput $BUILDINFO
|
||||
bye
|
||||
EOF
|
||||
|
||||
if lftp -d ftp://anonymous:@ppa.launchpad.net < "$LFTP_SCRIPT"; then
|
||||
|
||||
if lftp -d ftp://anonymous:@ppa.launchpad.net <"$LFTP_SCRIPT"; then
|
||||
success "Upload successful!"
|
||||
rm -f "$LFTP_SCRIPT"
|
||||
else
|
||||
@@ -197,41 +193,41 @@ if [ "$KEEP_BUILDS" = "false" ]; then
|
||||
|
||||
# Clean up downloaded binaries in package directory
|
||||
case "$PACKAGE_NAME" in
|
||||
danksearch)
|
||||
if [ -f "$PACKAGE_DIR/dsearch-amd64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-amd64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dsearch-arm64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-arm64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms)
|
||||
# Remove downloaded binaries and source
|
||||
if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dms-source.tar.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-source.tar.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms-git)
|
||||
# Remove git source directory binary
|
||||
if [ -d "$PACKAGE_DIR/dms-git-repo" ]; then
|
||||
rm -rf "$PACKAGE_DIR/dms-git-repo"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms-greeter)
|
||||
# Remove downloaded source
|
||||
if [ -f "$PACKAGE_DIR/dms-greeter-source.tar.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-greeter-source.tar.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
danksearch)
|
||||
if [ -f "$PACKAGE_DIR/dsearch-amd64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-amd64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dsearch-arm64" ]; then
|
||||
rm -f "$PACKAGE_DIR/dsearch-arm64"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms)
|
||||
# Remove downloaded binaries and source
|
||||
if [ -f "$PACKAGE_DIR/dms-distropkg-amd64.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-distropkg-amd64.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
if [ -f "$PACKAGE_DIR/dms-source.tar.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-source.tar.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms-git)
|
||||
# Remove git source directory binary
|
||||
if [ -d "$PACKAGE_DIR/dms-git-repo" ]; then
|
||||
rm -rf "$PACKAGE_DIR/dms-git-repo"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
dms-greeter)
|
||||
# Remove downloaded source
|
||||
if [ -f "$PACKAGE_DIR/dms-greeter-source.tar.gz" ]; then
|
||||
rm -f "$PACKAGE_DIR/dms-greeter-source.tar.gz"
|
||||
REMOVED=$((REMOVED + 1))
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ $REMOVED -gt 0 ]; then
|
||||
@@ -246,4 +242,3 @@ fi
|
||||
|
||||
echo
|
||||
success "Done!"
|
||||
|
||||
|
||||
@@ -7,21 +7,21 @@ case "$1" in
|
||||
if ! getent group greeter >/dev/null; then
|
||||
addgroup --system greeter
|
||||
fi
|
||||
|
||||
|
||||
if ! getent passwd greeter >/dev/null; then
|
||||
adduser --system --ingroup greeter --home /var/lib/greeter \
|
||||
--shell /bin/bash --gecos "System Greeter" greeter
|
||||
fi
|
||||
|
||||
|
||||
if [ -d /var/cache/dms-greeter ]; then
|
||||
chown -R greeter:greeter /var/cache/dms-greeter 2>/dev/null || true
|
||||
fi
|
||||
|
||||
|
||||
if [ -d /var/lib/greeter ]; then
|
||||
chown -R greeter:greeter /var/lib/greeter 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# Check and set graphical.target as default
|
||||
|
||||
# Check and set graphical.target as default
|
||||
CURRENT_TARGET=$(systemctl get-default 2>/dev/null || echo "unknown")
|
||||
if [ "$CURRENT_TARGET" != "graphical.target" ]; then
|
||||
systemctl set-default graphical.target >/dev/null 2>&1 || true
|
||||
@@ -29,10 +29,10 @@ case "$1" in
|
||||
else
|
||||
TARGET_STATUS="Already graphical.target ✓"
|
||||
fi
|
||||
|
||||
|
||||
GREETD_CONFIG="/etc/greetd/config.toml"
|
||||
CONFIG_STATUS="Not modified (already configured)"
|
||||
|
||||
|
||||
# Check if niri or hyprland exists
|
||||
COMPOSITOR="niri"
|
||||
if ! command -v niri >/dev/null 2>&1; then
|
||||
@@ -40,7 +40,7 @@ case "$1" in
|
||||
COMPOSITOR="hyprland"
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# If config doesn't exist, create a default one
|
||||
if [ ! -f "$GREETD_CONFIG" ]; then
|
||||
mkdir -p /etc/greetd
|
||||
@@ -58,13 +58,13 @@ GREETD_EOF
|
||||
# Backup existing config
|
||||
BACKUP_FILE="${GREETD_CONFIG}.backup-$(date +%Y%m%d-%H%M%S)"
|
||||
cp "$GREETD_CONFIG" "$BACKUP_FILE" 2>/dev/null || true
|
||||
|
||||
|
||||
# Update command in default_session section
|
||||
sed -i "/^\[default_session\]/,/^\[/ s|^command =.*|command = \"/usr/bin/dms-greeter --command $COMPOSITOR\"|" "$GREETD_CONFIG"
|
||||
sed -i '/^\[default_session\]/,/^\[/ s|^user =.*|user = "greeter"|' "$GREETD_CONFIG"
|
||||
CONFIG_STATUS="Updated existing config (backed up) with $COMPOSITOR ✓"
|
||||
fi
|
||||
|
||||
|
||||
# Only show banner on initial install
|
||||
if [ -z "$2" ]; then
|
||||
cat << 'EOF'
|
||||
|
||||
@@ -14,7 +14,7 @@ BASE_VERSION := $(shell echo $(UPSTREAM_VERSION) | sed 's/ppa[0-9]*$$//' | sed '
|
||||
override_dh_auto_build:
|
||||
# All files are included in source package
|
||||
test -f dms-greeter-source.tar.gz || (echo "ERROR: dms-greeter-source.tar.gz not found!" && exit 1)
|
||||
|
||||
|
||||
# Extract source tarball
|
||||
tar -xzf dms-greeter-source.tar.gz
|
||||
# Find the extracted directory
|
||||
@@ -27,22 +27,22 @@ override_dh_auto_install:
|
||||
# Install greeter files to shared data location
|
||||
mkdir -p debian/dms-greeter/usr/share/quickshell/dms-greeter
|
||||
cp -r DankMaterialShell-$(BASE_VERSION)/quickshell/* debian/dms-greeter/usr/share/quickshell/dms-greeter/
|
||||
|
||||
|
||||
# Install launcher script
|
||||
install -Dm755 DankMaterialShell-$(BASE_VERSION)/quickshell/Modules/Greetd/assets/dms-greeter \
|
||||
debian/dms-greeter/usr/bin/dms-greeter
|
||||
|
||||
|
||||
# Install documentation
|
||||
install -Dm644 DankMaterialShell-$(BASE_VERSION)/quickshell/Modules/Greetd/README.md \
|
||||
debian/dms-greeter/usr/share/doc/dms-greeter/README.md
|
||||
|
||||
|
||||
# Install LICENSE file
|
||||
install -Dm644 DankMaterialShell-$(BASE_VERSION)/LICENSE \
|
||||
debian/dms-greeter/usr/share/doc/dms-greeter/LICENSE
|
||||
|
||||
|
||||
# Create cache directory structure (will be created by postinst)
|
||||
mkdir -p debian/dms-greeter/var/cache/dms-greeter
|
||||
|
||||
|
||||
# Remove build and development files
|
||||
rm -rf debian/dms-greeter/usr/share/quickshell/dms-greeter/core
|
||||
rm -rf debian/dms-greeter/usr/share/quickshell/dms-greeter/distro
|
||||
|
||||
@@ -77,7 +77,7 @@ There are example themes you can start from:
|
||||
|
||||
- [Cyberpunk Electric](theme_cyberpunk_electric.json) - Neon green and magenta cyberpunk aesthetic
|
||||
- [Hotline Miami](theme_hotline_miami.json) - Retro 80s inspired hot pink and blue
|
||||
- [Miami Vice](theme_miami_vice.json) - Classic teal and pink vice aesthetic
|
||||
- [Miami Vice](theme_miami_vice.json) - Classic teal and pink vice aesthetic
|
||||
- [Synthwave Electric](theme_synthwave_electric.json) - Electric purple and cyan synthwave vibes
|
||||
|
||||
### Color Definitions
|
||||
@@ -87,7 +87,7 @@ There are example themes you can start from:
|
||||
- `primaryText` - Text color that contrasts well with primary background
|
||||
- `primaryContainer` - Darker/lighter variant of primary for containers
|
||||
|
||||
**Secondary Colors**
|
||||
**Secondary Colors**
|
||||
- `secondary` - Supporting accent color for variety and hierarchy
|
||||
- `surfaceTint` - Tint color applied to surfaces, usually derived from primary
|
||||
|
||||
@@ -115,7 +115,7 @@ While the core colors above are required, you can also customize these optional
|
||||
```json
|
||||
{
|
||||
"error": "#f44336",
|
||||
"warning": "#ff9800",
|
||||
"warning": "#ff9800",
|
||||
"info": "#2196f3"
|
||||
}
|
||||
```
|
||||
@@ -158,4 +158,4 @@ You can also edit `~/.config/DankMaterialShell/settings.json` manually
|
||||
|
||||
### Reactivity
|
||||
|
||||
Editing the custom theme file will auto-update the shell if it's the current theme.
|
||||
Editing the custom theme file will auto-update the shell if it's the current theme.
|
||||
|
||||
16
docs/IPC.md
16
docs/IPC.md
@@ -22,7 +22,7 @@ Audio system control and information.
|
||||
- Returns: Confirmation message
|
||||
|
||||
**`decrement <step>`**
|
||||
- Decrease output volume by step amount
|
||||
- Decrease output volume by step amount
|
||||
- Parameters: `step` - Volume decrease amount (default: 5)
|
||||
- Returns: Confirmation message
|
||||
|
||||
@@ -72,7 +72,7 @@ Display brightness control for internal and external displays.
|
||||
**`decrement <step> [device]`**
|
||||
- Decrease brightness by step amount
|
||||
- Parameters:
|
||||
- `step` - Brightness decrease amount
|
||||
- `step` - Brightness decrease amount
|
||||
- `device` - Optional device name (empty string for default)
|
||||
- Returns: Confirmation with new brightness level
|
||||
|
||||
@@ -105,7 +105,7 @@ Night mode (gamma/color temperature) control.
|
||||
- Enable night mode
|
||||
- Returns: Confirmation message
|
||||
|
||||
**`disable`**
|
||||
**`disable`**
|
||||
- Disable night mode
|
||||
- Returns: Confirmation message
|
||||
|
||||
@@ -163,7 +163,7 @@ Media player control via MPRIS interface.
|
||||
- Returns: Nothing
|
||||
|
||||
**`pause`**
|
||||
- Pause playback on active player
|
||||
- Pause playback on active player
|
||||
- Returns: Nothing
|
||||
|
||||
**`playPause`**
|
||||
@@ -272,7 +272,7 @@ Wallpaper management and retrieval with support for per-monitor configurations.
|
||||
|
||||
**`setFor <screenName> <path>`**
|
||||
- Set wallpaper for specific monitor (automatically enables per-monitor mode)
|
||||
- Parameters:
|
||||
- Parameters:
|
||||
- `screenName` - Monitor name (e.g., "DP-2", "eDP-1")
|
||||
- `path` - Absolute or relative path to image file
|
||||
- Returns: Success confirmation with monitor and path info
|
||||
@@ -365,7 +365,7 @@ Theme mode control (light/dark mode switching).
|
||||
- Returns: "light"
|
||||
|
||||
**`dark`**
|
||||
- Switch to dark theme mode
|
||||
- Switch to dark theme mode
|
||||
- Returns: "dark"
|
||||
|
||||
**`getMode`**
|
||||
@@ -445,7 +445,7 @@ Clipboard history modal control.
|
||||
|
||||
**Functions:**
|
||||
- `open` - Show clipboard history
|
||||
- `close` - Hide clipboard history
|
||||
- `close` - Hide clipboard history
|
||||
- `toggle` - Toggle clipboard history visibility
|
||||
|
||||
### Target: `notifications`
|
||||
@@ -685,4 +685,4 @@ Most IPC functions return string messages indicating:
|
||||
- Status information for query functions
|
||||
- Empty/void return for simple action functions
|
||||
|
||||
Functions that return void (like media controls) execute the action but don't provide feedback. Check the application state through other means if needed.
|
||||
Functions that return void (like media controls) execute the action but don't provide feedback. Check the application state through other means if needed.
|
||||
|
||||
@@ -39,4 +39,4 @@
|
||||
"warning": "#99CC00",
|
||||
"info": "#00B899"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,4 +39,4 @@
|
||||
"warning": "#B3B300",
|
||||
"info": "#00B359"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,4 +39,4 @@
|
||||
"warning": "#CC9900",
|
||||
"info": "#0099CC"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,4 +39,4 @@
|
||||
"warning": "#CC9900",
|
||||
"info": "#0066CC"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,11 +171,13 @@
|
||||
delve
|
||||
go-tools
|
||||
gnumake
|
||||
prek
|
||||
]
|
||||
++ devQmlPkgs;
|
||||
|
||||
shellHook = ''
|
||||
touch quickshell/.qmlls.ini 2>/dev/null
|
||||
if [ ! -f .git/hooks/pre-commit ]; then prek install; fi
|
||||
'';
|
||||
|
||||
QML2_IMPORT_PATH = mkQmlImportPath pkgs devQmlPkgs;
|
||||
|
||||
@@ -1 +1 @@
|
||||
Spicy Miso
|
||||
Spicy Miso
|
||||
|
||||
@@ -542,4 +542,4 @@ function getCatppuccinVariantNames() {
|
||||
|
||||
function getThemeCategories() {
|
||||
return ThemeCategories
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1304,4 +1304,4 @@ function byLengthAsc(a, b, selector) {
|
||||
}
|
||||
function byStartAsc(a, b) {
|
||||
return a.start - b.start;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
// May not be necessary if that's possible tbh.
|
||||
function markdownToHtml(text) {
|
||||
if (!text) return "";
|
||||
|
||||
|
||||
// Store code blocks and inline code to protect them from further processing
|
||||
const codeBlocks = [];
|
||||
const inlineCode = [];
|
||||
let blockIndex = 0;
|
||||
let inlineIndex = 0;
|
||||
|
||||
|
||||
// First, extract and replace code blocks with placeholders
|
||||
let html = text.replace(/```([\s\S]*?)```/g, (match, code) => {
|
||||
// Trim leading and trailing blank lines only
|
||||
@@ -21,7 +21,7 @@ function markdownToHtml(text) {
|
||||
codeBlocks.push(`<pre><code>${escapedCode}</code></pre>`);
|
||||
return `\x00CODEBLOCK${blockIndex++}\x00`;
|
||||
});
|
||||
|
||||
|
||||
// Extract and replace inline code
|
||||
html = html.replace(/`([^`]+)`/g, (match, code) => {
|
||||
// Escape HTML entities in code
|
||||
@@ -31,18 +31,18 @@ function markdownToHtml(text) {
|
||||
inlineCode.push(`<code>${escapedCode}</code>`);
|
||||
return `\x00INLINECODE${inlineIndex++}\x00`;
|
||||
});
|
||||
|
||||
|
||||
// Now process everything else
|
||||
// Escape HTML entities (but not in code blocks)
|
||||
html = html.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>');
|
||||
|
||||
|
||||
// Headers
|
||||
html = html.replace(/^### (.*?)$/gm, '<h3>$1</h3>');
|
||||
html = html.replace(/^## (.*?)$/gm, '<h2>$1</h2>');
|
||||
html = html.replace(/^# (.*?)$/gm, '<h1>$1</h1>');
|
||||
|
||||
|
||||
// Bold and italic (order matters!)
|
||||
html = html.replace(/\*\*\*(.*?)\*\*\*/g, '<b><i>$1</i></b>');
|
||||
html = html.replace(/\*\*(.*?)\*\*/g, '<b>$1</b>');
|
||||
@@ -50,15 +50,15 @@ function markdownToHtml(text) {
|
||||
html = html.replace(/___(.*?)___/g, '<b><i>$1</i></b>');
|
||||
html = html.replace(/__(.*?)__/g, '<b>$1</b>');
|
||||
html = html.replace(/_(.*?)_/g, '<i>$1</i>');
|
||||
|
||||
|
||||
// Links
|
||||
html = html.replace(/\[([^\]]+)\]\(([^)]+)\)/g, '<a href="$2">$1</a>');
|
||||
|
||||
|
||||
// Lists
|
||||
html = html.replace(/^\* (.*?)$/gm, '<li>$1</li>');
|
||||
html = html.replace(/^- (.*?)$/gm, '<li>$1</li>');
|
||||
html = html.replace(/^\d+\. (.*?)$/gm, '<li>$1</li>');
|
||||
|
||||
|
||||
// Wrap consecutive list items in ul/ol tags
|
||||
html = html.replace(/(<li>[\s\S]*?<\/li>\s*)+/g, function(match) {
|
||||
return '<ul>' + match + '</ul>';
|
||||
@@ -71,36 +71,36 @@ function markdownToHtml(text) {
|
||||
html = html.replace(/\x00CODEBLOCK(\d+)\x00/g, (match, index) => {
|
||||
return codeBlocks[parseInt(index)];
|
||||
});
|
||||
|
||||
|
||||
html = html.replace(/\x00INLINECODE(\d+)\x00/g, (match, index) => {
|
||||
return inlineCode[parseInt(index)];
|
||||
});
|
||||
|
||||
|
||||
// Line breaks (after code blocks are restored)
|
||||
html = html.replace(/\n\n/g, '</p><p>');
|
||||
html = html.replace(/\n/g, '<br/>');
|
||||
|
||||
|
||||
// Wrap in paragraph tags if not already wrapped
|
||||
if (!html.startsWith('<')) {
|
||||
html = '<p>' + html + '</p>';
|
||||
}
|
||||
|
||||
|
||||
// Clean up the final HTML
|
||||
// Remove <br/> tags immediately before block elements
|
||||
html = html.replace(/<br\/>\s*<pre>/g, '<pre>');
|
||||
html = html.replace(/<br\/>\s*<ul>/g, '<ul>');
|
||||
html = html.replace(/<br\/>\s*<h[1-6]>/g, '<h$1>');
|
||||
|
||||
|
||||
// Remove empty paragraphs
|
||||
html = html.replace(/<p>\s*<\/p>/g, '');
|
||||
html = html.replace(/<p>\s*<br\/>\s*<\/p>/g, '');
|
||||
|
||||
|
||||
// Remove excessive line breaks
|
||||
html = html.replace(/(<br\/>){3,}/g, '<br/><br/>'); // Max 2 consecutive line breaks
|
||||
html = html.replace(/(<\/p>)\s*(<p>)/g, '$1$2'); // Remove whitespace between paragraphs
|
||||
|
||||
|
||||
// Remove leading/trailing whitespace
|
||||
html = html.trim();
|
||||
|
||||
|
||||
return html;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,18 +18,18 @@ PanelWindow {
|
||||
property var parentModal: null
|
||||
property real menuPositionX: 0
|
||||
property real menuPositionY: 0
|
||||
|
||||
|
||||
readonly property real shadowBuffer: 5
|
||||
|
||||
|
||||
screen: parentModal?.effectiveScreen
|
||||
|
||||
function show(x, y, app, fromKeyboard) {
|
||||
fromKeyboard = fromKeyboard || false;
|
||||
menuContent.currentApp = app;
|
||||
|
||||
|
||||
let screenX = x;
|
||||
let screenY = y;
|
||||
|
||||
|
||||
if (parentModal) {
|
||||
if (fromKeyboard) {
|
||||
screenX = x + parentModal.alignedX;
|
||||
@@ -39,14 +39,14 @@ PanelWindow {
|
||||
screenY = y + (parentModal.alignedY - shadowBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
menuPositionX = screenX;
|
||||
menuPositionY = screenY;
|
||||
|
||||
|
||||
menuContent.selectedMenuIndex = fromKeyboard ? 0 : -1;
|
||||
menuContent.keyboardNavigation = true;
|
||||
visible = true;
|
||||
|
||||
|
||||
if (parentHandler) {
|
||||
parentHandler.enabled = false;
|
||||
}
|
||||
|
||||
@@ -143,7 +143,7 @@ Item {
|
||||
|
||||
implicitWidth: Math.max(180, menuColumn.implicitWidth + Theme.spacingS * 2)
|
||||
implicitHeight: menuColumn.implicitHeight + Theme.spacingS * 2
|
||||
|
||||
|
||||
width: implicitWidth
|
||||
height: implicitHeight
|
||||
|
||||
|
||||
@@ -14,20 +14,20 @@ Popup {
|
||||
function show(x, y, app, fromKeyboard) {
|
||||
fromKeyboard = fromKeyboard || false;
|
||||
menuContent.currentApp = app;
|
||||
|
||||
|
||||
root.x = x + 4;
|
||||
root.y = y + 4;
|
||||
|
||||
|
||||
menuContent.selectedMenuIndex = fromKeyboard ? 0 : -1;
|
||||
menuContent.keyboardNavigation = true;
|
||||
|
||||
|
||||
if (parentHandler) {
|
||||
parentHandler.enabled = false;
|
||||
}
|
||||
|
||||
|
||||
open();
|
||||
}
|
||||
|
||||
|
||||
onOpened: {
|
||||
Qt.callLater(() => {
|
||||
menuContent.keyboardHandler.forceActiveFocus();
|
||||
|
||||
@@ -117,4 +117,4 @@ Rectangle {
|
||||
easing.type: Theme.standardEasing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -88,4 +88,4 @@ Item {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,4 +286,4 @@ Item {
|
||||
ColorAnimation { duration: Theme.shortDuration }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,4 +237,4 @@ Row {
|
||||
onClicked: root.clearAll()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,4 +49,4 @@ Rectangle {
|
||||
cursorShape: Qt.PointingHandCursor
|
||||
onPressed: root.pressed()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,4 +49,4 @@ Row {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,4 +43,4 @@ StyledText {
|
||||
default: return Theme.surfaceText
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,7 +161,7 @@ Rectangle {
|
||||
|
||||
const pins = SettingsData.bluetoothDevicePins || {}
|
||||
const pinnedAddr = pins["preferredDevice"]
|
||||
|
||||
|
||||
let devices = [...BluetoothService.adapter.devices.values.filter(dev => dev && (dev.paired || dev.trusted))]
|
||||
devices.sort((a, b) => {
|
||||
// Pinned device first
|
||||
@@ -337,13 +337,13 @@ Rectangle {
|
||||
onClicked: {
|
||||
const pins = JSON.parse(JSON.stringify(SettingsData.bluetoothDevicePins || {}))
|
||||
const isCurrentlyPinned = pins["preferredDevice"] === modelData.address
|
||||
|
||||
|
||||
if (isCurrentlyPinned) {
|
||||
delete pins["preferredDevice"]
|
||||
} else {
|
||||
pins["preferredDevice"] = modelData.address
|
||||
}
|
||||
|
||||
|
||||
SettingsData.set("bluetoothDevicePins", pins)
|
||||
}
|
||||
}
|
||||
@@ -642,4 +642,4 @@ Rectangle {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,4 +163,4 @@ Rectangle {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -45,4 +45,4 @@ CompoundPill {
|
||||
onToggled: {
|
||||
expandClicked()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,4 +30,4 @@ CompoundPill {
|
||||
colorPickerModal.show()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,4 +67,4 @@ Rectangle {
|
||||
onSliderValueChanged: root.sliderValueChanged(newValue / 100.0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -167,4 +167,4 @@ Rectangle {
|
||||
if (ev.key === Qt.Key_Space || ev.key === Qt.Key_Return) { root.toggled(); ev.accepted = true }
|
||||
else if (ev.key === Qt.Key_Right) { root.expandClicked(); ev.accepted = true }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,4 +26,4 @@ Rectangle {
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,4 +75,4 @@ CompoundPill {
|
||||
onToggled: {
|
||||
expandClicked()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,4 +97,4 @@ Rectangle {
|
||||
easing.type: Theme.standardEasing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,4 +77,4 @@ Rectangle {
|
||||
easing.type: Theme.standardEasing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,4 +118,4 @@ Rectangle {
|
||||
easing.type: Theme.standardEasing
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,4 +42,4 @@ function calculateRowsAndWidgets(controlCenterColumn, expandedSection, expandedW
|
||||
}
|
||||
|
||||
return { rows: rows, expandedRowIndex: expandedRow }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,4 +22,4 @@ function toggleSection(root, section) {
|
||||
} else {
|
||||
root.expandedSection = section
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,4 +87,4 @@ function resetToDefault() {
|
||||
|
||||
function clearAll() {
|
||||
SettingsData.set("controlCenterWidgets", [])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,4 +54,4 @@ QtObject {
|
||||
axisOrientationChanged()
|
||||
changed()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,4 +22,4 @@ BasePill {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,4 +60,4 @@ BasePill {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,4 +21,4 @@ BasePill {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -444,4 +444,4 @@ Rectangle {
|
||||
id: systemClock
|
||||
precision: SystemClock.Hours
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,4 +19,4 @@ Rectangle {
|
||||
anchors.fill: parent
|
||||
anchors.margins: card.pad
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -110,4 +110,4 @@ Card {
|
||||
id: systemClock
|
||||
precision: SettingsData.showSeconds ? SystemClock.Seconds : SystemClock.Minutes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,4 +215,4 @@ Card {
|
||||
onClicked: root.clicked()
|
||||
visible: activePlayer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -175,4 +175,4 @@ Card {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,4 +86,4 @@ Card {
|
||||
cursorShape: Qt.PointingHandCursor
|
||||
onClicked: root.clicked()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,4 +73,4 @@ Item {
|
||||
onClicked: root.switchToMediaTab()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1093,7 +1093,7 @@ Item {
|
||||
command: {
|
||||
var paths = [
|
||||
"/usr/share/wayland-sessions",
|
||||
"/usr/share/xsessions",
|
||||
"/usr/share/xsessions",
|
||||
"/usr/local/share/wayland-sessions",
|
||||
"/usr/local/share/xsessions"
|
||||
]
|
||||
@@ -1110,7 +1110,7 @@ Item {
|
||||
}
|
||||
})
|
||||
}
|
||||
// 1. Explicit system/user paths
|
||||
// 1. Explicit system/user paths
|
||||
var explicitFind = "find " + paths.join(" ") + " -maxdepth 1 -name '*.desktop' -type f -follow 2>/dev/null"
|
||||
// 2. Scan all /home user directories for local session files
|
||||
var homeScan = "find /home -maxdepth 5 \\( -path '*/wayland-sessions/*.desktop' -o -path '*/xsessions/*.desktop' \\) -type f -follow 2>/dev/null"
|
||||
|
||||
@@ -45,7 +45,7 @@ setfacl -m u:greeter:x ~ ~/.config ~/.local ~/.cache ~/.local/state
|
||||
|
||||
# Set group ownership on config directories
|
||||
sudo chgrp -R greeter ~/.config/DankMaterialShell
|
||||
sudo chgrp -R greeter ~/.local/state/DankMaterialShell
|
||||
sudo chgrp -R greeter ~/.local/state/DankMaterialShell
|
||||
sudo chgrp -R greeter ~/.cache/DankMaterialShell
|
||||
sudo chmod -R g+rX ~/.config/DankMaterialShell ~/.cache/DankMaterialShell ~/.cache/quickshell
|
||||
|
||||
|
||||
@@ -20,4 +20,4 @@ gestures {
|
||||
|
||||
layout {
|
||||
background-color "#000000"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,4 +5,4 @@ export QT_QPA_PLATFORM=wayland
|
||||
export QT_WAYLAND_DISABLE_WINDOWDECORATION=1
|
||||
export EGL_PLATFORM=gbm
|
||||
|
||||
exec niri -c /etc/greetd/dms-niri.kdl
|
||||
exec niri -c /etc/greetd/dms-niri.kdl
|
||||
|
||||
@@ -616,4 +616,4 @@ Column {
|
||||
autoSaveToSession()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -241,4 +241,4 @@ FocusScope {
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ Singleton {
|
||||
}
|
||||
|
||||
const profileValue = BatteryService.isPluggedIn
|
||||
? SettingsData.acProfileName
|
||||
? SettingsData.acProfileName
|
||||
: SettingsData.batteryProfileName;
|
||||
|
||||
if (profileValue !== "") {
|
||||
|
||||
@@ -984,4 +984,4 @@ Singleton {
|
||||
"bssid": network.bssid
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -429,4 +429,4 @@ Singleton {
|
||||
id: mkdirProcess
|
||||
command: ["mkdir", "-p", root.baseDir, root.filesDir]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ Singleton {
|
||||
if (!node || !node.ready) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
if (node.properties && node.properties["media.class"] === "Stream/Input/Video") {
|
||||
if (node.properties["stream.is-live"] === "true") {
|
||||
return true
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
layer-rule {
|
||||
match namespace="dms:blurwallpaper"
|
||||
place-within-backdrop true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ layout(std140, binding = 0) uniform buf {
|
||||
float centerY; // Y coordinate of disc center (0.0 to 1.0)
|
||||
float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth)
|
||||
float aspectRatio; // Width / Height of the screen
|
||||
|
||||
|
||||
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
|
||||
float imageWidth1;
|
||||
float imageHeight1;
|
||||
@@ -97,30 +97,30 @@ void main() {
|
||||
// This makes distances circular instead of elliptical
|
||||
vec2 adjustedUV = vec2(uv.x * ubuf.aspectRatio, uv.y);
|
||||
vec2 adjustedCenter = vec2(ubuf.centerX * ubuf.aspectRatio, ubuf.centerY);
|
||||
|
||||
|
||||
// Calculate distance in aspect-corrected space
|
||||
float dist = distance(adjustedUV, adjustedCenter);
|
||||
|
||||
|
||||
// Calculate the maximum possible distance (corner to corner)
|
||||
// This ensures the disc can cover the entire screen
|
||||
float maxDistX = max(ubuf.centerX * ubuf.aspectRatio,
|
||||
float maxDistX = max(ubuf.centerX * ubuf.aspectRatio,
|
||||
(1.0 - ubuf.centerX) * ubuf.aspectRatio);
|
||||
float maxDistY = max(ubuf.centerY, 1.0 - ubuf.centerY);
|
||||
float maxDist = length(vec2(maxDistX, maxDistY));
|
||||
|
||||
|
||||
// Scale progress to cover the maximum distance
|
||||
// Add extra range for smoothness to ensure complete coverage
|
||||
// Adjust smoothness for aspect ratio to maintain consistent visual appearance
|
||||
float adjustedSmoothness = mappedSmoothness * max(1.0, ubuf.aspectRatio);
|
||||
float radius = ubuf.progress * (maxDist + adjustedSmoothness);
|
||||
|
||||
|
||||
// Use smoothstep for a smooth edge transition
|
||||
float factor = smoothstep(radius - adjustedSmoothness, radius + adjustedSmoothness, dist);
|
||||
|
||||
|
||||
// Mix the textures (factor = 0 inside disc, 1 outside)
|
||||
fragColor = mix(color2, color1, factor);
|
||||
|
||||
|
||||
if (ubuf.progress <= 0.0) fragColor = color1;
|
||||
|
||||
fragColor *= ubuf.qt_Opacity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ layout(std140, binding = 0) uniform buf {
|
||||
mat4 qt_Matrix;
|
||||
float qt_Opacity;
|
||||
float progress;
|
||||
|
||||
|
||||
// Fill mode parameters
|
||||
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
|
||||
float imageWidth1; // Width of source1 image
|
||||
@@ -81,11 +81,11 @@ vec4 sampleWithFillMode(sampler2D tex, vec2 uv, float imgWidth, float imgHeight)
|
||||
|
||||
void main() {
|
||||
vec2 uv = qt_TexCoord0;
|
||||
|
||||
|
||||
// Sample textures with fill mode
|
||||
vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1);
|
||||
vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2);
|
||||
|
||||
|
||||
// Mix the two textures based on progress value
|
||||
fragColor = mix(color1, color2, ubuf.progress) * ubuf.qt_Opacity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -104,7 +104,7 @@ void main() {
|
||||
float radius = p * maxDist;
|
||||
|
||||
// squash factor for the "eye" slit
|
||||
float squash = mix(0.2, 1.0, p);
|
||||
float squash = mix(0.2, 1.0, p);
|
||||
q.y /= squash;
|
||||
|
||||
float dist = length(q);
|
||||
|
||||
@@ -15,7 +15,7 @@ layout(std140, binding = 0) uniform buf {
|
||||
float angle; // Angle of stripes in degrees (default 30.0)
|
||||
float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth)
|
||||
float aspectRatio; // Width / Height of the screen
|
||||
|
||||
|
||||
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
|
||||
float imageWidth1;
|
||||
float imageHeight1;
|
||||
@@ -84,58 +84,58 @@ vec4 sampleWithFillMode(sampler2D tex, vec2 uv, float imgWidth, float imgHeight)
|
||||
|
||||
void main() {
|
||||
vec2 uv = qt_TexCoord0;
|
||||
|
||||
|
||||
// Sample textures with fill mode
|
||||
vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1);
|
||||
vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2);
|
||||
|
||||
|
||||
// Map smoothness from 0.0-1.0 to 0.001-0.3 range
|
||||
// Using a non-linear mapping for better control at low values
|
||||
float mappedSmoothness = mix(0.001, 0.3, ubuf.smoothness * ubuf.smoothness);
|
||||
|
||||
|
||||
// Use values directly without forcing defaults
|
||||
float stripes = (ubuf.stripeCount > 0.0) ? ubuf.stripeCount : 12.0;
|
||||
float angleRad = radians(ubuf.angle);
|
||||
float edgeSmooth = mappedSmoothness;
|
||||
|
||||
|
||||
// Create a coordinate system for stripes based on angle
|
||||
// At 0°: vertical stripes (divide by x)
|
||||
// At 45°: diagonal stripes
|
||||
// At 90°: horizontal stripes (divide by y)
|
||||
|
||||
|
||||
// Transform coordinates based on angle
|
||||
float cosA = cos(angleRad);
|
||||
float sinA = sin(angleRad);
|
||||
|
||||
|
||||
// Project the UV position onto the stripe direction
|
||||
// This gives us the position along the stripe direction
|
||||
float stripeCoord = uv.x * cosA + uv.y * sinA;
|
||||
|
||||
|
||||
// Perpendicular coordinate (for edge movement)
|
||||
float perpCoord = -uv.x * sinA + uv.y * cosA;
|
||||
|
||||
|
||||
// Calculate the range of perpCoord based on angle
|
||||
// This determines how far edges need to travel to fully cover the screen
|
||||
float minPerp = min(min(0.0 * -sinA + 0.0 * cosA, 1.0 * -sinA + 0.0 * cosA),
|
||||
min(0.0 * -sinA + 1.0 * cosA, 1.0 * -sinA + 1.0 * cosA));
|
||||
float maxPerp = max(max(0.0 * -sinA + 0.0 * cosA, 1.0 * -sinA + 0.0 * cosA),
|
||||
max(0.0 * -sinA + 1.0 * cosA, 1.0 * -sinA + 1.0 * cosA));
|
||||
|
||||
|
||||
// Determine which stripe we're in
|
||||
float stripePos = stripeCoord * stripes;
|
||||
int stripeIndex = int(floor(stripePos));
|
||||
|
||||
|
||||
// Determine if this is an odd or even stripe
|
||||
bool isOddStripe = mod(float(stripeIndex), 2.0) != 0.0;
|
||||
|
||||
|
||||
// Calculate the progress for this specific stripe with wave delay
|
||||
// Use absolute stripe position for consistent delay across all stripes
|
||||
float normalizedStripePos = clamp(stripePos / stripes, 0.0, 1.0);
|
||||
|
||||
|
||||
// Increased delay and better distribution
|
||||
float maxDelay = 0.1;
|
||||
float stripeDelay = normalizedStripePos * maxDelay;
|
||||
|
||||
|
||||
// Better progress mapping that uses the full 0.0-1.0 range
|
||||
// Map progress so that:
|
||||
// - First stripe starts at progress = 0.0
|
||||
@@ -151,13 +151,13 @@ void main() {
|
||||
float activeEnd = stripeDelay + (1.0 - maxDelay);
|
||||
stripeProgress = (ubuf.progress - activeStart) / (activeEnd - activeStart);
|
||||
}
|
||||
|
||||
|
||||
// Use gentler easing curve
|
||||
stripeProgress = stripeProgress * stripeProgress * (3.0 - 2.0 * stripeProgress); // Smootherstep instead of smoothstep
|
||||
|
||||
|
||||
// Use the perpendicular coordinate for edge comparison
|
||||
float yPos = perpCoord;
|
||||
|
||||
|
||||
// Calculate edge position for this stripe
|
||||
// Use the actual perpendicular coordinate range for this angle
|
||||
float perpRange = maxPerp - minPerp;
|
||||
@@ -170,7 +170,7 @@ void main() {
|
||||
// Even stripes: edge moves from min to max
|
||||
edgePosition = minPerp - margin + stripeProgress * (perpRange + margin * 2.0);
|
||||
}
|
||||
|
||||
|
||||
// Determine which wallpaper to show based on rotated position
|
||||
float mask;
|
||||
if (isOddStripe) {
|
||||
@@ -180,10 +180,10 @@ void main() {
|
||||
// Even stripes reveal new wallpaper from top
|
||||
mask = 1.0 - smoothstep(edgePosition - edgeSmooth, edgePosition + edgeSmooth, yPos);
|
||||
}
|
||||
|
||||
|
||||
// Mix the wallpapers
|
||||
fragColor = mix(color1, color2, mask);
|
||||
|
||||
|
||||
// Force exact values at start and end to prevent any bleed-through
|
||||
if (ubuf.progress <= 0.0) {
|
||||
fragColor = color1; // Only show old wallpaper at start
|
||||
@@ -195,11 +195,11 @@ void main() {
|
||||
float shadowStrength = 1.0 - smoothstep(0.0, edgeSmooth * 2.5, edgeDist);
|
||||
shadowStrength *= 0.2 * (1.0 - abs(stripeProgress - 0.5) * 2.0);
|
||||
fragColor.rgb *= (1.0 - shadowStrength);
|
||||
|
||||
// Add slight vignette during transition for dramatic effect
|
||||
|
||||
// Add slight vignette during transition for dramatic effect
|
||||
float vignette = 1.0 - ubuf.progress * 0.1 * (1.0 - abs(stripeProgress - 0.5) * 2.0);
|
||||
fragColor.rgb *= vignette;
|
||||
}
|
||||
|
||||
|
||||
fragColor *= ubuf.qt_Opacity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ layout(std140, binding = 0) uniform buf {
|
||||
float progress; // Transition progress (0.0 to 1.0)
|
||||
float direction; // 0=left, 1=right, 2=up, 3=down
|
||||
float smoothness; // Edge smoothness (0.0 to 1.0, 0=sharp, 1=very smooth)
|
||||
|
||||
|
||||
float fillMode; // 0=stretch, 1=fit, 2=crop, 3=tile, 4=tileV, 5=tileH, 6=pad
|
||||
float imageWidth1;
|
||||
float imageHeight1;
|
||||
@@ -82,22 +82,22 @@ vec4 sampleWithFillMode(sampler2D tex, vec2 uv, float imgWidth, float imgHeight)
|
||||
|
||||
void main() {
|
||||
vec2 uv = qt_TexCoord0;
|
||||
|
||||
|
||||
// Sample textures with fill mode
|
||||
vec4 color1 = sampleWithFillMode(source1, uv, ubuf.imageWidth1, ubuf.imageHeight1);
|
||||
vec4 color2 = sampleWithFillMode(source2, uv, ubuf.imageWidth2, ubuf.imageHeight2);
|
||||
|
||||
|
||||
// Map smoothness from 0.0-1.0 to 0.001-0.5 range
|
||||
// Using a non-linear mapping for better control
|
||||
float mappedSmoothness = mix(0.001, 0.5, ubuf.smoothness * ubuf.smoothness);
|
||||
|
||||
|
||||
float edge = 0.0;
|
||||
float factor = 0.0;
|
||||
|
||||
|
||||
// Extend the progress range to account for smoothness
|
||||
// This ensures the transition completes fully at the edges
|
||||
float extendedProgress = ubuf.progress * (1.0 + 2.0 * mappedSmoothness) - mappedSmoothness;
|
||||
|
||||
|
||||
// Calculate edge position based on direction
|
||||
// As progress goes from 0 to 1, we reveal source2 (new wallpaper)
|
||||
if (ubuf.direction < 0.5) {
|
||||
@@ -105,7 +105,7 @@ void main() {
|
||||
edge = 1.0 - extendedProgress;
|
||||
factor = smoothstep(edge - mappedSmoothness, edge + mappedSmoothness, uv.x);
|
||||
fragColor = mix(color1, color2, factor);
|
||||
}
|
||||
}
|
||||
else if (ubuf.direction < 1.5) {
|
||||
// Wipe from left to right (new image enters from left)
|
||||
edge = extendedProgress;
|
||||
@@ -124,6 +124,6 @@ void main() {
|
||||
factor = smoothstep(edge - mappedSmoothness, edge + mappedSmoothness, uv.y);
|
||||
fragColor = mix(color2, color1, factor);
|
||||
}
|
||||
|
||||
|
||||
fragColor *= ubuf.qt_Opacity;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -210,4 +210,4 @@ Flow {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,4 +76,4 @@ Rectangle {
|
||||
font.weight: Font.Bold
|
||||
color: Theme.surfaceVariantText
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user