Compare commits

..

No commits in common. "next" and "v11.0.0" have entirely different histories.

204 changed files with 11639 additions and 35837 deletions

View File

@ -1,26 +0,0 @@
# Use the jguer/yay-builder image as a parent image with archlinux
FROM docker.io/jguer/yay-builder
# Install extra packages (pacman-contrib and fish)
RUN sudo pacman -Syu --noconfirm pacman-contrib fish git-delta openssh bat go
# Set passwordless sudo for the docker user
RUN echo "docker ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/docker
# Create a non-root user and switch to it
USER docker
# Install xgotext
RUN go install github.com/leonelquinteros/gotext/cli/xgotext@latest
# Add /app/bin to the PATH
ENV PATH="/app/bin:$PATH"
# add /home/docker/go/bin to the PATH
ENV PATH="/home/docker/go/bin:$PATH"
# Set the working directory
WORKDIR /workspace
# Command to run when starting the container
CMD ["bash"]

View File

@ -1,14 +0,0 @@
{
"name": "Existing Dockerfile",
"build": {
"context": "..",
"dockerfile": "../.devcontainer/Dockerfile"
},
"customizations": {
"vscode": {
"extensions": [
"golang.go"
]
}
}
}

5
.github/FUNDING.yml vendored
View File

@ -1 +1,6 @@
github: [Jguer] github: [Jguer]
custom:
[
"https://www.blockchain.com/btc/payment_request?address=bc1q703ukdwfpfh24hqzusckt4r6dn88eqve5d0847&message=yay!",
"https://coinrequest.io/request/kOSc4hkqYzeJoQv",
]

View File

@ -32,11 +32,6 @@ Example: `yay v8.1139.r0.g9ac4ab6 - libalpm v11.0.1` -->
Include the FULL output of any relevant commands/configs Include the FULL output of any relevant commands/configs
The current yay config can be printed with `yay -Pg` The current yay config can be printed with `yay -Pg`
Paste services are only needed for excessive output (>500 lines) Paste services are only needed for excessive output (>500 lines)
Use --debug to add pacman and yay debug logs
or add the following key to your ~/.config/yay/config.json to only get yay debug logs
{
"debug": true
}
--> -->
```sh ```sh

View File

@ -1,15 +0,0 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
version: 2
updates:
- package-ecosystem: "gomod" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "weekly"
groups:
go-all:
patterns:
- '*'

2
.github/stale.yml vendored
View File

@ -7,8 +7,6 @@ exemptLabels:
- "Status: In Progress" - "Status: In Progress"
- "Status: Confirmed" - "Status: Confirmed"
- "Status: Approved" - "Status: Approved"
- "Status: Triage"
- "Type: Bug"
# Label to use when marking an issue as stale # Label to use when marking an issue as stale
staleLabel: stale staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable # Comment to post when marking an issue as stale. Set to `false` to disable

View File

@ -1,143 +1,40 @@
name: Builder Image name: Builder image
on: on:
schedule: schedule:
- cron: "0 3 * * 1" # Every Monday at 3 AM - cron: "0 3 * * 1"
push: push:
paths: paths:
- "ci.Dockerfile" - "ci.Dockerfile"
- ".github/workflows/builder-image.yml" - "**/builder-image.yml"
env:
REGISTRY_IMAGE: jguer/yay-builder
jobs: jobs:
build: build:
name: Push builder image to Docker Hub
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm/v7
- linux/arm64
steps: steps:
- name: Checkout repository - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v2
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx - name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3 uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
- name: Login to Docker Hub uses: docker/login-action@v1
uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker Hub
- name: Login to GitHub Container Registry uses: docker/build-push-action@v2
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.REGISTRY_IMAGE }}
ghcr.io/${{ env.REGISTRY_IMAGE }}
tags: |
type=raw,value=latest
type=sha,format=long
- name: Build and push by digest
id: build
uses: docker/build-push-action@v5
with:
context: .
file: ci.Dockerfile
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
echo -n "$digest" > "/tmp/digests/$(echo "${{ matrix.platform }}" | tr '/' '_')"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digest-${{ matrix.platform == 'linux/amd64' && 'amd64' || matrix.platform == 'linux/arm/v7' && 'armv7' || 'arm64' }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
needs: [build]
runs-on: ubuntu-latest
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
pattern: digest-*
merge-multiple: true
path: /tmp/digests
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.REGISTRY_IMAGE }}
ghcr.io/${{ env.REGISTRY_IMAGE }}
tags: |
type=raw,value=latest
type=sha,format=short
- name: Create and push manifest list
env: env:
DOCKER_CLI_EXPERIMENTAL: enabled DOCKER_BUILDKIT: 0
run: | COMPOSE_DOCKER_CLI_BUILD: 0
# Extract Docker Hub tags with:
DH_TAGS=$(echo '${{ steps.meta.outputs.tags }}' | grep -v "^ghcr.io" | xargs -I {} echo "-t {}") platforms: linux/amd64, linux/arm/v6,linux/arm/v7,linux/arm64
file: ci.Dockerfile
# Extract GitHub Container Registry tags push: true
GHCR_TAGS=$(echo '${{ steps.meta.outputs.tags }}' | grep "^ghcr.io" | xargs -I {} echo "-t {}") tags: jguer/yay-builder:latest
secrets: |
# Create a manifest list using the image digests from /tmp/digests/* DOCKER_BUILDKIT=0
DIGESTS=$(for file in /tmp/digests/*; do COMPOSE_DOCKER_CLI_BUILD=0
echo -n "${{ env.REGISTRY_IMAGE }}@$(cat $file) " cache-from: type=registry,ref=jguer/yay-builder:latest
done) cache-to: type=inline
# Create the manifest list for Docker Hub
docker buildx imagetools create $DH_TAGS $DIGESTS
# Create the manifest list for GitHub Container Registry
docker buildx imagetools create $GHCR_TAGS $DIGESTS
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:latest

View File

@ -1,5 +1,4 @@
name: Build Release name: Build Release
on: on:
push: push:
tags: tags:
@ -9,36 +8,36 @@ jobs:
build-releases: build-releases:
strategy: strategy:
matrix: matrix:
arch: ["linux/amd64 x86_64", "linux/arm/v7 armv7h", "linux/arm64 aarch64"] arch:
[
"linux/amd64 x86_64",
"linux/arm/v6 armv6h",
"linux/arm/v7 armv7h",
"linux/arm64 aarch64",
]
name: Build ${{ matrix.arch }} name: Build ${{ matrix.arch }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} platforms: all
password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
with:
version: latest
- name: Read info - name: Read info
id: tags id: tags
shell: bash
run: | run: |
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\/v/}
echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT echo ::set-output name=TAG::${GITHUB_REF/refs\/tags\//}
arch="${{ matrix.arch }}" arch="${{ matrix.arch }}"
echo "PLATFORM=${arch%% *}" >> $GITHUB_OUTPUT echo ::set-output name=PLATFORM::${arch%% *}
echo "ARCH=${arch##* }" >> $GITHUB_OUTPUT echo ::set-output name=ARCH::${arch##* }
- name: Build ${{ matrix.arch }} release - name: Build ${{ matrix.arch }} release
run: | run: |
mkdir artifacts mkdir artifacts
@ -49,45 +48,88 @@ jobs:
-t yay:${{ steps.tags.outputs.arch }} . --load -t yay:${{ steps.tags.outputs.arch }} . --load
make docker-release ARCH=${{ steps.tags.outputs.arch }} VERSION=${{ steps.tags.outputs.version }} PREFIX="/usr" make docker-release ARCH=${{ steps.tags.outputs.arch }} VERSION=${{ steps.tags.outputs.version }} PREFIX="/usr"
mv *.tar.gz artifacts mv *.tar.gz artifacts
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v4
with: with:
name: yay_${{ steps.tags.outputs.arch }} name: yay_${{ steps.tags.outputs.arch }}
path: artifacts path: artifacts
create_release: create_release:
name: Create release from this build name: Create release from this build
needs: [build-releases] needs: [build-releases]
runs-on: ubuntu-latest runs-on: ubuntu-latest
permissions:
contents: write
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Read info - name: Read info
id: tags id: tags
shell: bash
run: | run: |
echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_OUTPUT echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\/v/}
echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT echo ::set-output name=TAG::${GITHUB_REF/refs\/tags\//}
- uses: actions/download-artifact@v2
- uses: actions/download-artifact@v4
with: with:
pattern: yay_* name: yay_x86_64
merge-multiple: true - uses: actions/download-artifact@v2
with:
name: yay_armv7h
- uses: actions/download-artifact@v2
with:
name: yay_armv6h
- uses: actions/download-artifact@v2
with:
name: yay_aarch64
- name: Create Release - name: Create Release
id: create_release
uses: actions/create-release@master
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | with:
gh release create ${{ steps.tags.outputs.tag }} \ tag_name: ${{ steps.tags.outputs.tag }}
--title "${{ steps.tags.outputs.tag }}" \ release_name: ${{ steps.tags.outputs.tag }}
--generate-notes \ draft: false
./yay_${{ steps.tags.outputs.version }}_*.tar.gz prerelease: false
- name: Upload x86_64 asset
id: upload-release-asset-x86_64
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./yay_${{ steps.tags.outputs.version }}_x86_64.tar.gz
asset_name: yay_${{ steps.tags.outputs.version }}_x86_64.tar.gz
asset_content_type: application/tar+gzip
- name: Upload armv7h asset
id: upload-release-asset-armv7h
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./yay_${{ steps.tags.outputs.version }}_armv7h.tar.gz
asset_name: yay_${{ steps.tags.outputs.version }}_armv7h.tar.gz
asset_content_type: application/tar+gzip
- name: Upload armv6h asset
id: upload-release-asset-armv6h
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./yay_${{ steps.tags.outputs.version }}_armv6h.tar.gz
asset_name: yay_${{ steps.tags.outputs.version }}_armv6h.tar.gz
asset_content_type: application/tar+gzip
- name: Upload aarch64 asset
id: upload-release-asset-aarch64
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./yay_${{ steps.tags.outputs.version }}_aarch64.tar.gz
asset_name: yay_${{ steps.tags.outputs.version }}_aarch64.tar.gz
asset_content_type: application/tar+gzip
- name: Release Notary Action - name: Release Notary Action
uses: docker://aevea/release-notary:latest uses: docker://aevea/release-notary:0.9.3
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

21
.github/workflows/tag.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: Tag
on:
push:
paths-ignore:
- ".github/**"
- "README.md"
- ".gitignore"
branches:
- master
jobs:
tag:
name: Tag release
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: mathieudutour/github-tag-action@v4.5
id: tag_version
with:
github_token: ${{ secrets.PAT }}
tag_prefix: "v"

View File

@ -1,39 +0,0 @@
name: Test against pacman-git
on:
pull_request:
paths-ignore:
- "doc/**"
- "**/*.po"
- "README.md"
- ".gitignore"
jobs:
build:
name: Lint and test yay (-git)
runs-on: ubuntu-latest
container:
image: ghcr.io/jguer/yay-builder:latest
steps:
- uses: actions/checkout@v4
- uses: actions/cache@v3
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
- uses: actions/cache@v3
with:
path: /home/runner/work/yay/yay/pacman-git
key: ${{ runner.os }}-pacman-${{ hashFiles('/home/runner/work/yay/yay/pacman-git/PKGBUILD') }}
restore-keys: |
${{ runner.os }}-pacman-
- name: checkout pacman-git
run: |
git -C ./pacman-git pull || git clone https://aur.archlinux.org/pacman-git
useradd github
echo 'github ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers
chmod -R 777 pacman-git
su github -c 'cd pacman-git; yes | makepkg -i --nocheck'
- name: Run Build and Tests with pacman-git
run: |
make test

View File

@ -1,5 +1,13 @@
name: Test against pacman name: Build
# This workflow is triggered on pushes to the repository.
on: on:
push:
paths-ignore:
- "doc/**"
- "README.md"
- ".gitignore"
branches-ignore:
- "master"
pull_request: pull_request:
jobs: jobs:
@ -7,38 +15,27 @@ jobs:
name: Lint and test yay name: Lint and test yay
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: ghcr.io/jguer/yay-builder:latest image: jguer/yay-builder:latest
steps: steps:
- uses: actions/checkout@v4 - name: Checkout
- uses: actions/cache@v3 uses: actions/checkout@v2
- uses: actions/cache@v1
with: with:
path: ~/go/pkg/mod path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: | restore-keys: |
${{ runner.os }}-go- ${{ runner.os }}-go-
- name: Lint - name: Lint
env: run: /app/bin/golangci-lint run ./...
GOFLAGS: -buildvcs=false -tags=next
run: /app/bin/golangci-lint run -v ./...
- name: Run Build and Tests - name: Run Build and Tests
run: make test run: make test
- name: checkout pacman-git
- name: Run Integration Tests
continue-on-error: true
run: | run: |
useradd -m yay && pacman -Sy --noconfirm sudo base-devel
chown -R yay:yay . && git clone https://aur.archlinux.org/pacman-git
cp -r ~/go/ /home/yay/go/ && useradd github
chown -R yay:yay /home/yay/go/ && chmod -R 777 pacman-git
su yay -c "make test-integration" echo 'github ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers
- name: Build yay Artifact su github -c 'cd pacman-git; yes | makepkg -si --nocheck'
env: - name: Run Build and Tests with pacman-git
GOFLAGS: -buildvcs=false -tags=next run: make test
run: make
- name: Upload yay Artifact
uses: actions/upload-artifact@v4
with:
name: yay
path: ./yay
if-no-files-found: error
overwrite: true

5
.gitignore vendored
View File

@ -6,7 +6,6 @@
# Folders # Folders
_obj _obj
_test _test
.vscode
*.cgo1.go *.cgo1.go
*.cgo2.c *.cgo2.c
@ -28,7 +27,3 @@ qemu-*
*.pot *.pot
*.po~ *.po~
*.pprof *.pprof
node_modules/
xgotext
.devcontainer/

View File

@ -1,31 +1,4 @@
version: "2" linters-settings:
run:
go: "1.20"
linters:
default: none
enable:
- bodyclose
- dogsled
- dupl
- errcheck
- errorlint
- gochecknoinits
- gocritic
- goprintffuncname
- gosec
- govet
- ineffassign
- lll
- misspell
- nakedret
- noctx
- nolintlint
- staticcheck
- unconvert
- unparam
- unused
- whitespace
settings:
dupl: dupl:
threshold: 100 threshold: 100
funlen: funlen:
@ -43,52 +16,75 @@ linters:
- style - style
gocyclo: gocyclo:
min-complexity: 15 min-complexity: 15
goimports:
local-prefixes: github.com/Jguer/yay/v11
gomnd:
settings:
mnd:
# don't include the "operation" and "assign"
checks: argument,case,condition,return
govet:
check-shadowing: true
lll: lll:
line-length: 140 line-length: 140
maligned:
suggest-new: true
misspell: misspell:
locale: US locale: US
nolintlint:
require-explanation: false linters:
require-specific: false # please, do not use `enable-all`: it's deprecated and will be removed soon.
allow-unused: false # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint
exclusions: disable-all: true
generated: lax enable:
presets: - bodyclose
- comments - deadcode
- common-false-positives - depguard
- legacy
- std-error-handling
rules:
- linters:
- dupl - dupl
- errcheck - errcheck
- errorlint
- gochecknoinits - gochecknoinits
- gocritic - gocritic
- godot
- govet
- lll
- revive
- staticcheck
- wsl
path: (.+)_test.go
- path: (.+)\.go$
text: G204
paths:
- third_party$
- builtin$
- examples$
formatters:
enable:
- gofmt - gofmt
- goimports - goimports
settings: - goprintffuncname
goimports: - gosec
local-prefixes: - gosimple
- github.com/Jguer/yay/v12 - govet
exclusions: - ineffassign
generated: lax - lll
paths: - misspell
- third_party$ - nakedret
- builtin$ - prealloc
- examples$ - revive
- rowserrcheck
- staticcheck
- structcheck
- stylecheck
- typecheck
- unconvert
- unparam
- unused
- varcheck
- whitespace
- wsl
- godot
run:
issues:
exclude-rules:
- path: _test\.go
linters:
- lll
- revive
- wsl
- govet
- godot
- errcheck
- stylecheck
- dupl
- gocritic
- gochecknoinits
exclude:
- G204

View File

@ -1,29 +1,31 @@
default_stages: [commit] default_stages: [commit]
repos: repos:
- repo: https://github.com/dnephin/pre-commit-golang - repo: git://github.com/dnephin/pre-commit-golang
rev: v0.5.1 rev: v0.3.5
hooks: hooks:
- id: go-fmt - id: go-fmt
- id: go-imports
args: [-local=github.com/Jguer/yay/v11/]
- id: golangci-lint - id: golangci-lint
- id: go-unit-tests - id: go-unit-tests
- id: go-build - id: go-build
- repo: https://github.com/pre-commit/mirrors-prettier - repo: https://github.com/pre-commit/mirrors-prettier
rev: v4.0.0-alpha.8 # Use the sha or tag you want to point at rev: v2.2.1 # Use the sha or tag you want to point at
hooks: hooks:
- id: prettier - id: prettier
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0 # Use the ref you want to point at rev: v3.4.0 # Use the ref you want to point at
hooks: hooks:
- id: trailing-whitespace - id: trailing-whitespace
- id: check-json - id: check-json
- id: check-yaml - id: check-yaml
- id: check-added-large-files - id: check-added-large-files
- repo: https://github.com/commitizen-tools/commitizen - repo: https://github.com/Woile/commitizen
rev: v3.15.0 rev: v2.17.6
hooks: hooks:
- id: commitizen - id: commitizen
stages: [commit-msg] stages: [commit-msg]

View File

@ -1,7 +0,0 @@
{
"go.lintTool": "golangci-lint",
"gopls": {
"formatting.gofumpt": true,
"formatting.local": "github.com/Jguer/yay/v12"
}
}

View File

@ -1,5 +1,5 @@
FROM ghcr.io/jguer/yay-builder:latest FROM jguer/yay-builder:latest
LABEL maintainer="Jguer,docker@jguer.space" LABEL maintainer="Jguer,joaogg3 at google mail"
ARG VERSION ARG VERSION
ARG PREFIX ARG PREFIX

View File

@ -10,7 +10,7 @@ GO ?= go
PKGNAME := yay PKGNAME := yay
PREFIX := /usr/local PREFIX := /usr/local
MAJORVERSION := 12 MAJORVERSION := 11
MINORVERSION := 0 MINORVERSION := 0
PATCHVERSION := 0 PATCHVERSION := 0
VERSION ?= ${MAJORVERSION}.${MINORVERSION}.${PATCHVERSION} VERSION ?= ${MAJORVERSION}.${MINORVERSION}.${PATCHVERSION}
@ -18,15 +18,14 @@ VERSION ?= ${MAJORVERSION}.${MINORVERSION}.${PATCHVERSION}
LOCALEDIR := po LOCALEDIR := po
SYSTEMLOCALEPATH := $(PREFIX)/share/locale/ SYSTEMLOCALEPATH := $(PREFIX)/share/locale/
# ls -1 po | sed -e 's/\.po$//' | paste -sd " " LANGS := pt pt_BR en es eu fr_FR ja pl_PL ru_RU zh_CN ko
LANGS := ca cs de en es eu fr_FR he id it_IT ja ko pl_PL pt_BR pt ru_RU ru sv tr uk zh_CN zh_TW
POTFILE := default.pot POTFILE := default.pot
POFILES := $(addprefix $(LOCALEDIR)/,$(addsuffix .po,$(LANGS))) POFILES := $(addprefix $(LOCALEDIR)/,$(addsuffix .po,$(LANGS)))
MOFILES := $(POFILES:.po=.mo) MOFILES := $(POFILES:.po=.mo)
FLAGS ?= -trimpath -mod=readonly -modcacherw FLAGS ?= -trimpath -mod=readonly -modcacherw
EXTRA_FLAGS ?= -buildmode=pie EXTRA_FLAGS ?= -buildmode=pie
LDFLAGS := -X "main.yayVersion=${VERSION}" -X "main.localePath=${SYSTEMLOCALEPATH}" -linkmode=external -compressdwarf=false LDFLAGS := -X "main.yayVersion=${VERSION}" -X "main.localePath=${SYSTEMLOCALEPATH}" -linkmode=external
RELEASE_DIR := ${PKGNAME}_${VERSION}_${ARCH} RELEASE_DIR := ${PKGNAME}_${VERSION}_${ARCH}
PACKAGE := $(RELEASE_DIR).tar.gz PACKAGE := $(RELEASE_DIR).tar.gz
@ -52,10 +51,6 @@ test_lint: test lint
test: test:
$(GO) test -race -covermode=atomic $(FLAGS) ./... $(GO) test -race -covermode=atomic $(FLAGS) ./...
.PHONY: test-integration
test-integration:
$(GO) test -tags=integration $(FLAGS) ./...
.PHONY: build .PHONY: build
build: $(BIN) build: $(BIN)
@ -69,7 +64,7 @@ docker-release-all:
make docker-release-aarch64 ARCH=aarch64 make docker-release-aarch64 ARCH=aarch64
docker-release: docker-release:
docker create --name yay-$(ARCH) yay:${ARCH} /bin/sh docker create --name yay-$(ARCH) yay:${ARCH}
docker cp yay-$(ARCH):/app/${PACKAGE} $(PACKAGE) docker cp yay-$(ARCH):/app/${PACKAGE} $(PACKAGE)
docker container rm yay-$(ARCH) docker container rm yay-$(ARCH)
@ -82,7 +77,9 @@ docker-build:
.PHONY: lint .PHONY: lint
lint: lint:
GOFLAGS="$(FLAGS)" golangci-lint run ./... $(GO) vet $(FLAGS) ./...
@test -z "$$(gofmt -l $(SOURCES))" || (echo "Files need to be linted. Use make fmt" && false)
golangci-lint run ./...
.PHONY: fmt .PHONY: fmt
fmt: fmt:
@ -123,9 +120,8 @@ $(PACKAGE): $(BIN) $(RELEASE_DIR) ${MOFILES}
locale: locale:
xgotext -in . -out po xgotext -in . -out po
mv po/default.pot po/en.po
for lang in ${LANGS}; do \ for lang in ${LANGS}; do \
test -f po/$$lang.po || msginit --no-translator -l po/$$lang.po -i po/${POTFILE} -o po/$$lang.po; \ test -f po/$$lang.po || msginit -l po/$$lang.po -i po/${POTFILE} -o po/$$lang.po \
msgmerge -U po/$$lang.po po/${POTFILE}; \ msgmerge -U po/$$lang.po po/${POTFILE}; \
touch po/$$lang.po; \ touch po/$$lang.po; \
done done

132
README.md
View File

@ -19,8 +19,6 @@ Yet Another Yogurt - An AUR Helper Written in Go
- Narrow search (`yay linux header` will first search `linux` and then narrow on `header`) - Narrow search (`yay linux header` will first search `linux` and then narrow on `header`)
- Find matching package providers during search and allow selection - Find matching package providers during search and allow selection
- Remove make dependencies at the end of the build process - Remove make dependencies at the end of the build process
- Build local PKGBUILDs with AUR dependencies
- Un/Vote for packages
[![asciicast](https://asciinema.org/a/399431.svg)](https://asciinema.org/a/399431) [![asciicast](https://asciinema.org/a/399431.svg)](https://asciinema.org/a/399431)
@ -30,47 +28,32 @@ Yet Another Yogurt - An AUR Helper Written in Go
If you are migrating from another AUR helper, you can simply install Yay with that helper. If you are migrating from another AUR helper, you can simply install Yay with that helper.
> [!WARNING]
> We are using `sudo` in these examples, you can switch that out for a different privilege escalation tool.
### Source ### Source
The initial installation of Yay can be done by cloning the PKGBUILD and The initial installation of Yay can be done by cloning the PKGBUILD and
building with makepkg: building with makepkg:
We make sure we have the `base-devel` package group installed. Before you begin, make sure you have the `base-devel` package group installed.
```sh ```sh
sudo pacman -S --needed git base-devel pacman -S --needed git base-devel
git clone https://aur.archlinux.org/yay.git git clone https://aur.archlinux.org/yay.git
cd yay cd yay
makepkg -si makepkg -si
``` ```
If you want to do all of this at once, we can chain the commands like so:
```sh
sudo pacman -S --needed git base-devel && git clone https://aur.archlinux.org/yay.git && cd yay && makepkg -si
```
### Binary ### Binary
If you do not want to compile yay yourself you can use the builds generated by If you do not want to compile yay yourself you can use the builds generated by
GitHub Actions. GitHub Actions.
```sh ```sh
sudo pacman -S --needed git base-devel pacman -S --needed git base-devel
git clone https://aur.archlinux.org/yay-bin.git git clone https://aur.archlinux.org/yay-bin.git
cd yay-bin cd yay-bin
makepkg -si makepkg -si
``` ```
If you want to do all of this at once, we can chain the commands like so:
```sh
sudo pacman -S --needed git base-devel && git clone https://aur.archlinux.org/yay-bin.git && cd yay-bin && makepkg -si
```
### Other distributions ### Other distributions
If you're using Manjaro or [another distribution that packages `yay`](https://repology.org/project/yay/versions) If you're using Manjaro or [another distribution that packages `yay`](https://repology.org/project/yay/versions)
@ -79,8 +62,8 @@ you can simply install yay using pacman (as root):
```sh ```sh
pacman -S --needed git base-devel yay pacman -S --needed git base-devel yay
``` ```
> [!WARNING]
> distributions sometimes lag updating yay on their repositories. ⚠️ distributions sometimes lag updating yay on their repositories.
## First Use ## First Use
@ -98,64 +81,87 @@ pacman -S --needed git base-devel yay
## Examples of Custom Operations ## Examples of Custom Operations
| Command | Description | | Command | Description |
| --------------------------------- | ---------------------------------------------------------------------------------------------------------- | | --------------------------------- | --------------------------------------------------------------------------------------------------- |
| `yay` | Alias to `yay -Syu`. | | `yay` | Alias to `yay -Syu`. |
| `yay <Search Term>` | Present package-installation selection menu. | | `yay <Search Term>` | Present package-installation selection menu. |
| `yay -Bi <dir>` | Install dependencies and build a local PKGBUILD. | | `yay -Y --combinedupgrade --save` | Make combined upgrade the default mode. |
| `yay -G <AUR Package>` | Download PKGBUILD from ABS or AUR. (yay v12.0+) |
| `yay -Gp <AUR Package>` | Print to stdout PKGBUILD from ABS or AUR. |
| `yay -Ps` | Print system statistics. | | `yay -Ps` | Print system statistics. |
| `yay -Yc` | Clean unneeded dependencies. |
| `yay -G <AUR Package>` | Download PKGBUILD from ABS or AUR. |
| `yay -Gp <AUR Package>` | Print to stdout PKGBUILD from ABS or AUR. |
| `yay -Y --gendb` | Generate development package database used for devel update. |
| `yay -Syu --devel` | Perform system upgrade, but also check for development package updates. | | `yay -Syu --devel` | Perform system upgrade, but also check for development package updates. |
| `yay -Syu --timeupdate` | Perform system upgrade and use PKGBUILD modification time (not version number) to determine update. | | `yay -Syu --timeupdate` | Perform system upgrade and use PKGBUILD modification time (not version number) to determine update. |
| `yay -Wu <AUR Package>` | Unvote for package (Requires setting `AUR_USERNAME` and `AUR_PASSWORD` environment variables) (yay v11.3+) |
| `yay -Wv <AUR Package>` | Vote for package (Requires setting `AUR_USERNAME` and `AUR_PASSWORD` environment variables). (yay v11.3+) |
| `yay -Y --combinedupgrade --save` | Make combined upgrade the default mode. |
| `yay -Y --gendb` | Generate development package database used for devel update. |
| `yay -Yc` | Clean unneeded dependencies. |
## Frequently Asked Questions ## Frequently Asked Questions
- **Yay does not display colored output. How do I fix it?** - Yay does not display colored output. How do I fix it?
Make sure you have the `Color` option in your `/etc/pacman.conf` Make sure you have the `Color` option in your `/etc/pacman.conf`
(see issue [#123](https://github.com/Jguer/yay/issues/123)). (see issue [#123](https://github.com/Jguer/yay/issues/123)).
- **Sometimes diffs are printed to the terminal, and other times they are paged via less. How do I fix this?** - Yay is not prompting to skip packages during system upgrade.
Yay uses `git diff` to display diffs, which by default tells less not to The default behavior was changed after
page if the output can fit into one terminal length. This behavior can be [v8.918](https://github.com/Jguer/yay/releases/tag/v8.918)
overridden by exporting your own flags (`export LESS=SRX`). (see [3bdb534](https://github.com/Jguer/yay/commit/3bdb5343218d99d40f8a449b887348611f6bdbfc)
and issue [#554](https://github.com/Jguer/yay/issues/554)).
To restore the package-skip behavior use `--combinedupgrade` (make
it permanent by appending `--save`). Note: skipping packages will leave your
system in a
[partially-upgraded state](https://wiki.archlinux.org/index.php/System_maintenance#Partial_upgrades_are_unsupported).
- **Yay is not asking me to edit PKGBUILDS, and I don't like the diff menu! What can I do?** - Sometimes diffs are printed to the terminal, and other times they are paged via less. How do I fix this?
`yay --editmenu --diffmenu=false --save` Yay uses `git diff` to display diffs, which by default tells less not to
page if the output can fit into one terminal length. This behavior can be
overridden by exporting your own flags (`export LESS=SRX`).
- **How can I tell Yay to act only on AUR packages, or only on repo packages?** - Yay is not asking me to edit PKGBUILDS, and I don't like the diff menu! What can I do?
`yay -{OPERATION} --aur` `yay --editmenu --nodiffmenu --save`
`yay -{OPERATION} --repo`
- **A `Flagged Out Of Date AUR Packages` message is displayed. Why doesn't Yay update them?** - How can I tell Yay to act only on AUR packages, or only on repo packages?
This message does not mean that updated AUR packages are available. It means `yay -{OPERATION} --aur`
the packages have been flagged out of date on the AUR, but `yay -{OPERATION} --repo`
their maintainers have not yet updated the `PKGBUILD`s
(see [outdated AUR packages](https://wiki.archlinux.org/index.php/Arch_User_Repository#Foo_in_the_AUR_is_outdated.3B_what_should_I_do.3F)).
- **Yay doesn't install dependencies added to a PKGBUILD during installation.** - An `Out Of Date AUR Packages` message is displayed. Why doesn't Yay update them?
Yay resolves all dependencies ahead of time. You are free to edit the This message does not mean that updated AUR packages are available. It means
PKGBUILD in any way, but any problems you cause are your own and should not be the packages have been flagged out of date on the AUR, but
reported unless they can be reproduced with the original PKGBUILD. their maintainers have not yet updated the `PKGBUILD`s
(see [outdated AUR packages](https://wiki.archlinux.org/index.php/Arch_User_Repository#Foo_in_the_AUR_is_outdated.3B_what_should_I_do.3F)).
- **I know my `-git` package has updates but yay doesn't offer to update it** - Yay doesn't install dependencies added to a PKGBUILD during installation.
Yay uses a hash cache for development packages. Normally it is updated at the end of the package install with the message `Found git repo`. Yay resolves all dependencies ahead of time. You are free to edit the
If you transition between aur helpers and did not install the devel package using yay at some point, it is possible it never got added to the cache. `yay -Y --gendb` will fix the current version of every devel package and start checking from there. PKGBUILD in any way, but any problems you cause are your own and should not be
reported unless they can be reproduced with the original PKGBUILD.
- **I want to help out!** - I know my `-git` package has updates but yay doesn't offer to update it
Check [CONTRIBUTING.md](./CONTRIBUTING.md) for more information. Yay uses an hash cache for development packages. Normally it is updated at the end of the package install with the message `Found git repo`.
If you transition between aur helpers and did not install the devel package using yay at some point, it is possible it never got added to the cache. `yay -Y --gendb` will fix the current version of every devel package and start checking from there.
- I want to help out!
Check [CONTRIBUTING.md](./CONTRIBUTING.md) for more information.
- What settings do you use?
```sh
yay -Y --devel --combinedupgrade --batchinstall --save
```
Pacman conf options:
```conf
UseSyslog
Color
CheckSpace
VerbosePkgLists
```
## Support ## Support
@ -172,14 +178,14 @@ tools.
## Images ## Images
<p align="center"> <p float="left">
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay.png" width="42%"> <img src="https://rawcdn.githack.com/Jguer/jguer.github.io/77647f396cb7156fd32e30970dbeaf6d6dc7f983/yay/yay.png" width="42%"/>
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay-s.png" width="42%"> <img src="https://rawcdn.githack.com/Jguer/jguer.github.io/77647f396cb7156fd32e30970dbeaf6d6dc7f983/yay/yay-s.png" width="42%"/>
</p> </p>
<p align="center"> <p float="left">
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay-y.png" width="42%"> <img src="https://rawcdn.githack.com/Jguer/jguer.github.io/77647f396cb7156fd32e30970dbeaf6d6dc7f983/yay/yay-y.png" width="42%"/>
<img src="https://raw.githubusercontent.com/Jguer/jguer.github.io/refs/heads/master/yay/yay-ps.png" width="42%"> <img src="https://rawcdn.githack.com/Jguer/jguer.github.io/77647f396cb7156fd32e30970dbeaf6d6dc7f983/yay/yay-ps.png" width="42%"/>
</p> </p>
### Other AUR helpers/tools ### Other AUR helpers/tools

View File

@ -1,13 +0,0 @@
# Security Policy
Thank you for helping keep yay secure!
## Supported Versions
We only provide security updates and support for the latest released version of yay. Please ensure you are using the most up-to-date version before reporting vulnerabilities.
## Reporting a Vulnerability
If you discover a security vulnerability, please email us at [security@jguer.space](mailto:security@jguer.space). We will respond as quickly as possible and coordinate a fix.
We appreciate responsible disclosure and your help in making this project safe for everyone.

121
aur_source.go Normal file
View File

@ -0,0 +1,121 @@
package main
import (
"context"
"fmt"
"path/filepath"
"runtime"
"sync"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v11/pkg/dep"
"github.com/Jguer/yay/v11/pkg/multierror"
"github.com/Jguer/yay/v11/pkg/settings/exe"
"github.com/Jguer/yay/v11/pkg/stringset"
"github.com/Jguer/yay/v11/pkg/text"
)
type ErrDownloadSource struct {
inner error
pkgName string
errOut string
}
func (e ErrDownloadSource) Error() string {
return fmt.Sprintln(gotext.Get("error downloading sources: %s", text.Cyan(e.pkgName)),
"\n\t context:", e.inner.Error(), "\n\t", e.errOut)
}
func (e *ErrDownloadSource) Unwrap() error {
return e.inner
}
func downloadPKGBUILDSource(ctx context.Context, cmdBuilder exe.ICmdBuilder, dest,
base string, incompatible stringset.StringSet) (err error) {
dir := filepath.Join(dest, base)
args := []string{"--verifysource", "-Ccf"}
if incompatible.Get(base) {
args = append(args, "--ignorearch")
}
err = cmdBuilder.Show(
cmdBuilder.BuildMakepkgCmd(ctx, dir, args...))
if err != nil {
return ErrDownloadSource{inner: err, pkgName: base, errOut: ""}
}
return nil
}
func downloadPKGBUILDSourceWorker(ctx context.Context, wg *sync.WaitGroup, dest string,
cBase <-chan string, valOut chan<- string, errOut chan<- error,
cmdBuilder exe.ICmdBuilder, incompatible stringset.StringSet) {
for base := range cBase {
err := downloadPKGBUILDSource(ctx, cmdBuilder, dest, base, incompatible)
if err != nil {
errOut <- ErrDownloadSource{inner: err, pkgName: base, errOut: ""}
} else {
valOut <- base
}
}
wg.Done()
}
func downloadPKGBUILDSourceFanout(ctx context.Context, cmdBuilder exe.ICmdBuilder, dest string,
bases []dep.Base, incompatible stringset.StringSet) error {
if len(bases) == 1 {
return downloadPKGBUILDSource(ctx, cmdBuilder, dest, bases[0].Pkgbase(), incompatible)
}
var (
numOfWorkers = runtime.NumCPU()
wg = &sync.WaitGroup{}
c = make(chan string)
fanInChanValues = make(chan string)
fanInChanErrors = make(chan error)
)
go func() {
for _, base := range bases {
c <- base.Pkgbase()
}
close(c)
}()
// Launch Workers
wg.Add(numOfWorkers)
for s := 0; s < numOfWorkers; s++ {
go downloadPKGBUILDSourceWorker(ctx, wg, dest, c,
fanInChanValues, fanInChanErrors, cmdBuilder, incompatible)
}
go func() {
wg.Wait()
close(fanInChanValues)
close(fanInChanErrors)
}()
returnErr := multierror.MultiError{}
receiver:
for {
select {
case _, ok := <-fanInChanValues:
if !ok {
break receiver
}
case err, ok := <-fanInChanErrors:
if !ok {
break receiver
}
returnErr.Add(err)
}
}
return returnErr.Return()
}

154
aur_source_test.go Normal file
View File

@ -0,0 +1,154 @@
package main
import (
"context"
"os/exec"
"sync/atomic"
"testing"
"github.com/stretchr/testify/assert"
"github.com/Jguer/aur"
"github.com/Jguer/yay/v11/pkg/dep"
"github.com/Jguer/yay/v11/pkg/multierror"
"github.com/Jguer/yay/v11/pkg/settings/exe"
"github.com/Jguer/yay/v11/pkg/stringset"
)
type TestMakepkgBuilder struct {
exe.ICmdBuilder
parentBuilder *exe.CmdBuilder
test *testing.T
passes uint32
want string
wantDir string
showError error
}
func (z *TestMakepkgBuilder) BuildMakepkgCmd(ctx context.Context, dir string, extraArgs ...string) *exec.Cmd {
cmd := z.parentBuilder.BuildMakepkgCmd(ctx, dir, extraArgs...)
if z.want != "" {
assert.Contains(z.test, cmd.String(), z.want)
}
if z.wantDir != "" {
assert.Equal(z.test, z.wantDir, cmd.Dir)
}
atomic.AddUint32(&z.passes, 1)
return cmd
}
func (z *TestMakepkgBuilder) Show(cmd *exec.Cmd) error {
return z.showError
}
// GIVEN 1 package
// WHEN downloadPKGBUILDSource is called
// THEN 1 call should be made to makepkg with the specified parameters and dir
func Test_downloadPKGBUILDSource(t *testing.T) {
t.Parallel()
cmdBuilder := &TestMakepkgBuilder{
parentBuilder: &exe.CmdBuilder{MakepkgConfPath: "/etc/not.conf", MakepkgFlags: []string{"--nocheck"}, MakepkgBin: "makepkg"},
test: t,
want: "makepkg --nocheck --config /etc/not.conf --verifysource -Ccf",
wantDir: "/tmp/yay-bin",
}
err := downloadPKGBUILDSource(context.TODO(), cmdBuilder, "/tmp", "yay-bin", stringset.Make())
assert.NoError(t, err)
assert.Equal(t, 1, int(cmdBuilder.passes))
}
// GIVEN 1 package
// WHEN downloadPKGBUILDSource is called
// THEN 1 call should be made to makepkg which should return error
func Test_downloadPKGBUILDSourceError(t *testing.T) {
t.Parallel()
cmdBuilder := &TestMakepkgBuilder{
parentBuilder: &exe.CmdBuilder{MakepkgConfPath: "/etc/not.conf", MakepkgFlags: []string{"--nocheck"}, MakepkgBin: "makepkg"},
test: t,
want: "makepkg --nocheck --config /etc/not.conf --verifysource -Ccf",
wantDir: "/tmp/yay-bin",
showError: &exec.ExitError{},
}
err := downloadPKGBUILDSource(context.TODO(), cmdBuilder, "/tmp", "yay-bin", stringset.Make())
assert.Error(t, err)
assert.EqualError(t, err, "error downloading sources: \x1b[36myay-bin\x1b[0m \n\t context: <nil> \n\t \n")
}
// GIVEN 5 packages
// WHEN downloadPKGBUILDSourceFanout is called
// THEN 5 calls should be made to makepkg
func Test_downloadPKGBUILDSourceFanout(t *testing.T) {
t.Parallel()
cmdBuilder := &TestMakepkgBuilder{
parentBuilder: &exe.CmdBuilder{
MakepkgConfPath: "/etc/not.conf",
MakepkgFlags: []string{"--nocheck"}, MakepkgBin: "makepkg",
},
test: t,
}
bases := []dep.Base{
{&aur.Pkg{PackageBase: "yay"}},
{&aur.Pkg{PackageBase: "yay-bin"}},
{&aur.Pkg{PackageBase: "yay-git"}},
{&aur.Pkg{PackageBase: "yay-v11"}},
{&aur.Pkg{PackageBase: "yay-v12"}},
}
err := downloadPKGBUILDSourceFanout(context.TODO(), cmdBuilder, "/tmp", bases, stringset.Make())
assert.NoError(t, err)
assert.Equal(t, 5, int(cmdBuilder.passes))
}
// GIVEN 1 package
// WHEN downloadPKGBUILDSourceFanout is called
// THEN 1 calls should be made to makepkg without concurrency
func Test_downloadPKGBUILDSourceFanoutNoCC(t *testing.T) {
t.Parallel()
cmdBuilder := &TestMakepkgBuilder{
parentBuilder: &exe.CmdBuilder{
MakepkgConfPath: "/etc/not.conf",
MakepkgFlags: []string{"--nocheck"}, MakepkgBin: "makepkg",
},
test: t,
}
bases := []dep.Base{
{&aur.Pkg{PackageBase: "yay"}},
}
err := downloadPKGBUILDSourceFanout(context.TODO(), cmdBuilder, "/tmp", bases, stringset.Make())
assert.NoError(t, err)
assert.Equal(t, 1, int(cmdBuilder.passes))
}
// GIVEN 5 packages
// WHEN downloadPKGBUILDSourceFanout is called
// THEN 5 calls should be made to makepkg
func Test_downloadPKGBUILDSourceFanoutError(t *testing.T) {
t.Parallel()
cmdBuilder := &TestMakepkgBuilder{
parentBuilder: &exe.CmdBuilder{
MakepkgConfPath: "/etc/not.conf",
MakepkgFlags: []string{"--nocheck"}, MakepkgBin: "makepkg",
},
test: t,
showError: &exec.ExitError{},
}
bases := []dep.Base{
{&aur.Pkg{PackageBase: "yay"}},
{&aur.Pkg{PackageBase: "yay-bin"}},
{&aur.Pkg{PackageBase: "yay-git"}},
{&aur.Pkg{PackageBase: "yay-v11"}},
{&aur.Pkg{PackageBase: "yay-v12"}},
}
err := downloadPKGBUILDSourceFanout(context.TODO(), cmdBuilder, "/tmp", bases, stringset.Make())
assert.Error(t, err)
assert.Equal(t, 5, int(cmdBuilder.passes))
assert.Len(t, err.(*multierror.MultiError).Errors, 5)
}

View File

@ -1,15 +1,11 @@
FROM docker.io/ljmf00/archlinux:devel FROM lopsided/archlinux:latest
LABEL maintainer="Jguer,docker@jguer.space"
ENV GO111MODULE=on ENV GO111MODULE=on
WORKDIR /app WORKDIR /app
RUN sed -i '/^\[community\]/,/^\[/ s/^/#/' /etc/pacman.conf
COPY go.mod . COPY go.mod .
RUN pacman-key --init && pacman -Sy && pacman -S --overwrite=* --noconfirm archlinux-keyring && \ RUN pacman -Syu --overwrite=* --needed --noconfirm go fakeroot binutils gcc make git gettext && \
pacman -Su --overwrite=* --needed --noconfirm pacman doxygen meson asciidoc go git gcc make sudo base-devel && \
rm -rfv /var/cache/pacman/* /var/lib/pacman/sync/* && \ rm -rfv /var/cache/pacman/* /var/lib/pacman/sync/* && \
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s v2.1.5 && \ curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s v1.42.0 && \
go mod download go mod download

151
clean.go
View File

@ -2,75 +2,68 @@ package main
import ( import (
"context" "context"
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"github.com/Jguer/aur"
mapset "github.com/deckarep/golang-set/v2"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/db" "github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v12/pkg/runtime" "github.com/Jguer/yay/v11/pkg/dep"
"github.com/Jguer/yay/v12/pkg/settings" "github.com/Jguer/yay/v11/pkg/query"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/parser" "github.com/Jguer/yay/v11/pkg/settings/parser"
"github.com/Jguer/yay/v11/pkg/stringset"
"github.com/Jguer/yay/v11/pkg/text"
) )
// CleanDependencies removes all dangling dependencies in system. // CleanDependencies removes all dangling dependencies in system.
func cleanDependencies(ctx context.Context, cfg *settings.Configuration, func cleanDependencies(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor, removeOptional bool) error {
cmdBuilder exe.ICmdBuilder, cmdArgs *parser.Arguments, dbExecutor db.Executor,
removeOptional bool,
) error {
hanging := hangingPackages(removeOptional, dbExecutor) hanging := hangingPackages(removeOptional, dbExecutor)
if len(hanging) != 0 { if len(hanging) != 0 {
return cleanRemove(ctx, cfg, cmdBuilder, cmdArgs, hanging) return cleanRemove(ctx, cmdArgs, hanging)
} }
return nil return nil
} }
// CleanRemove sends a full removal command to pacman with the pkgName slice. // CleanRemove sends a full removal command to pacman with the pkgName slice.
func cleanRemove(ctx context.Context, cfg *settings.Configuration, func cleanRemove(ctx context.Context, cmdArgs *parser.Arguments, pkgNames []string) error {
cmdBuilder exe.ICmdBuilder, cmdArgs *parser.Arguments, pkgNames []string,
) error {
if len(pkgNames) == 0 { if len(pkgNames) == 0 {
return nil return nil
} }
arguments := cmdArgs.CopyGlobal() arguments := cmdArgs.CopyGlobal()
if err := arguments.AddArg("R", "s", "u"); err != nil { _ = arguments.AddArg("R")
return err
}
arguments.AddTarget(pkgNames...) arguments.AddTarget(pkgNames...)
return cmdBuilder.Show( return config.Runtime.CmdBuilder.Show(
cmdBuilder.BuildPacmanCmd(ctx, config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
arguments, cfg.Mode, settings.NoConfirm)) arguments, config.Runtime.Mode, settings.NoConfirm))
} }
func syncClean(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error { func syncClean(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
keepInstalled := false keepInstalled := false
keepCurrent := false keepCurrent := false
_, removeAll, _ := cmdArgs.GetArg("c", "clean") _, removeAll, _ := cmdArgs.GetArg("c", "clean")
for _, v := range run.PacmanConf.CleanMethod { for _, v := range config.Runtime.PacmanConf.CleanMethod {
switch v { if v == "KeepInstalled" {
case "KeepInstalled":
keepInstalled = true keepInstalled = true
case "KeepCurrent": } else if v == "KeepCurrent" {
keepCurrent = true keepCurrent = true
} }
} }
if run.Cfg.Mode.AtLeastRepo() { if config.Runtime.Mode.AtLeastRepo() {
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, if err := config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)); err != nil { cmdArgs, config.Runtime.Mode, settings.NoConfirm)); err != nil {
return err return err
} }
} }
if !run.Cfg.Mode.AtLeastAUR() { if !config.Runtime.Mode.AtLeastAUR() {
return nil return nil
} }
@ -81,10 +74,10 @@ func syncClean(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Argume
question = gotext.Get("Do you want to remove all other AUR packages from cache?") question = gotext.Get("Do you want to remove all other AUR packages from cache?")
} }
run.Logger.Println(gotext.Get("\nBuild directory:"), run.Cfg.BuildDir) fmt.Println(gotext.Get("\nBuild directory:"), config.BuildDir)
if run.Logger.ContinueTask(question, true, settings.NoConfirm) { if text.ContinueTask(question, true, settings.NoConfirm) {
if err := cleanAUR(ctx, run, keepInstalled, keepCurrent, removeAll, dbExecutor); err != nil { if err := cleanAUR(ctx, keepInstalled, keepCurrent, removeAll, dbExecutor); err != nil {
return err return err
} }
} }
@ -93,24 +86,22 @@ func syncClean(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Argume
return nil return nil
} }
if run.Logger.ContinueTask(gotext.Get("Do you want to remove ALL untracked AUR files?"), true, settings.NoConfirm) { if text.ContinueTask(gotext.Get("Do you want to remove ALL untracked AUR files?"), true, settings.NoConfirm) {
return cleanUntracked(ctx, run) return cleanUntracked(ctx)
} }
return nil return nil
} }
func cleanAUR(ctx context.Context, run *runtime.Runtime, func cleanAUR(ctx context.Context, keepInstalled, keepCurrent, removeAll bool, dbExecutor db.Executor) error {
keepInstalled, keepCurrent, removeAll bool, dbExecutor db.Executor, fmt.Println(gotext.Get("removing AUR packages from cache..."))
) error {
run.Logger.Println(gotext.Get("removing AUR packages from cache..."))
installedBases := mapset.NewThreadUnsafeSet[string]() installedBases := make(stringset.StringSet)
inAURBases := mapset.NewThreadUnsafeSet[string]() inAURBases := make(stringset.StringSet)
remotePackages := dbExecutor.InstalledRemotePackages() remotePackages, _ := query.GetRemotePackages(dbExecutor)
files, err := os.ReadDir(run.Cfg.BuildDir) files, err := os.ReadDir(config.BuildDir)
if err != nil { if err != nil {
return err return err
} }
@ -130,23 +121,21 @@ func cleanAUR(ctx context.Context, run *runtime.Runtime,
// Querying the AUR is slow and needs internet so don't do it if we // Querying the AUR is slow and needs internet so don't do it if we
// don't need to. // don't need to.
if keepCurrent { if keepCurrent {
info, errInfo := run.AURClient.Get(ctx, &aur.Query{ info, errInfo := query.AURInfo(ctx, config.Runtime.AURClient, cachedPackages, &query.AURWarnings{}, config.RequestSplitN)
Needles: cachedPackages,
})
if errInfo != nil { if errInfo != nil {
return errInfo return errInfo
} }
for i := range info { for _, pkg := range info {
inAURBases.Add(info[i].PackageBase) inAURBases.Set(pkg.PackageBase)
} }
} }
for _, pkg := range remotePackages { for _, pkg := range remotePackages {
if pkg.Base() != "" { if pkg.Base() != "" {
installedBases.Add(pkg.Base()) installedBases.Set(pkg.Base())
} else { } else {
installedBases.Add(pkg.Name()) installedBases.Set(pkg.Name())
} }
} }
@ -156,29 +145,28 @@ func cleanAUR(ctx context.Context, run *runtime.Runtime,
} }
if !removeAll { if !removeAll {
if keepInstalled && installedBases.Contains(file.Name()) { if keepInstalled && installedBases.Get(file.Name()) {
continue continue
} }
if keepCurrent && inAURBases.Contains(file.Name()) { if keepCurrent && inAURBases.Get(file.Name()) {
continue continue
} }
} }
dir := filepath.Join(run.Cfg.BuildDir, file.Name()) err = os.RemoveAll(filepath.Join(config.BuildDir, file.Name()))
run.Logger.Debugln("removing", dir) if err != nil {
if err = os.RemoveAll(dir); err != nil { return nil
run.Logger.Warnln(gotext.Get("Unable to remove %s: %s", dir, err))
} }
} }
return nil return nil
} }
func cleanUntracked(ctx context.Context, run *runtime.Runtime) error { func cleanUntracked(ctx context.Context) error {
run.Logger.Println(gotext.Get("removing untracked AUR files from cache...")) fmt.Println(gotext.Get("removing untracked AUR files from cache..."))
files, err := os.ReadDir(run.Cfg.BuildDir) files, err := os.ReadDir(config.BuildDir)
if err != nil { if err != nil {
return err return err
} }
@ -188,11 +176,11 @@ func cleanUntracked(ctx context.Context, run *runtime.Runtime) error {
continue continue
} }
dir := filepath.Join(run.Cfg.BuildDir, file.Name()) dir := filepath.Join(config.BuildDir, file.Name())
run.Logger.Debugln("cleaning", dir)
if isGitRepository(dir) { if isGitRepository(dir) {
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildGitCmd(ctx, dir, "clean", "-fx")); err != nil { if err := config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildGitCmd(ctx, dir, "clean", "-fx")); err != nil {
run.Logger.Warnln(gotext.Get("Unable to clean:"), dir) text.Warnln(gotext.Get("Unable to clean:"), dir)
return err return err
} }
} }
@ -205,3 +193,40 @@ func isGitRepository(dir string) bool {
_, err := os.Stat(filepath.Join(dir, ".git")) _, err := os.Stat(filepath.Join(dir, ".git"))
return !os.IsNotExist(err) return !os.IsNotExist(err)
} }
func cleanAfter(ctx context.Context, bases []dep.Base) {
fmt.Println(gotext.Get("removing untracked AUR files from cache..."))
for i, base := range bases {
dir := filepath.Join(config.BuildDir, base.Pkgbase())
if !isGitRepository(dir) {
continue
}
text.OperationInfoln(gotext.Get("Cleaning (%d/%d): %s", i+1, len(bases), text.Cyan(dir)))
_, stderr, err := config.Runtime.CmdBuilder.Capture(
config.Runtime.CmdBuilder.BuildGitCmd(
ctx, dir, "reset", "--hard", "HEAD"))
if err != nil {
text.Errorln(gotext.Get("error resetting %s: %s", base.String(), stderr))
}
if err := config.Runtime.CmdBuilder.Show(
config.Runtime.CmdBuilder.BuildGitCmd(
ctx, "clean", "-fx", "--exclude='*.pkg.*'")); err != nil {
fmt.Fprintln(os.Stderr, err)
}
}
}
func cleanBuilds(bases []dep.Base) {
for i, base := range bases {
dir := filepath.Join(config.BuildDir, base.Pkgbase())
text.OperationInfoln(gotext.Get("Deleting (%d/%d): %s", i+1, len(bases), text.Cyan(dir)))
if err := os.RemoveAll(dir); err != nil {
fmt.Fprintln(os.Stderr, err)
}
}
}

View File

@ -1,116 +0,0 @@
//go:build !integration
// +build !integration
package main
import (
"context"
"fmt"
"os/exec"
"strings"
"testing"
"github.com/Jguer/go-alpm/v2"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/Jguer/yay/v12/pkg/db/mock"
"github.com/Jguer/yay/v12/pkg/runtime"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/settings/parser"
)
func TestCleanHanging(t *testing.T) {
pacmanBin := t.TempDir() + "/pacman"
t.Parallel()
testCases := []struct {
name string
args []string
wantShow []string
}{
{
name: "clean",
args: []string{"Y", "c"},
wantShow: []string{"pacman", "-R", "-s", "-u", "--config", "/etc/pacman.conf", "--", "lsp-plugins"},
},
{
name: "clean double",
args: []string{"Y", "c", "c"},
wantShow: []string{"pacman", "-R", "-s", "-u", "--config", "/etc/pacman.conf", "--", "lsp-plugins", "linux-headers"},
},
}
dbExc := &mock.DBExecutor{
PackageOptionalDependsFn: func(i alpm.IPackage) []alpm.Depend {
if i.Name() == "linux" {
return []alpm.Depend{
{
Name: "linux-headers",
},
}
}
return []alpm.Depend{}
},
PackageProvidesFn: func(p alpm.IPackage) []alpm.Depend { return []alpm.Depend{} },
PackageDependsFn: func(p alpm.IPackage) []alpm.Depend { return []alpm.Depend{} },
LocalPackagesFn: func() []mock.IPackage {
return []mock.IPackage{
&mock.Package{
PReason: alpm.PkgReasonExplicit,
PName: "linux",
},
&mock.Package{
PReason: alpm.PkgReasonDepend,
PName: "lsp-plugins",
},
&mock.Package{
PReason: alpm.PkgReasonDepend,
PName: "linux-headers",
},
}
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
mockRunner := &exe.MockRunner{
CaptureFn: func(cmd *exec.Cmd) (stdout string, stderr string, err error) {
return "", "", nil
},
ShowFn: func(cmd *exec.Cmd) error { return nil },
}
cmdBuilder := &exe.CmdBuilder{
SudoBin: "su",
PacmanBin: pacmanBin,
PacmanConfigPath: "/etc/pacman.conf",
GitBin: "git",
Runner: mockRunner,
SudoLoopEnabled: false,
}
run := &runtime.Runtime{CmdBuilder: cmdBuilder, Cfg: &settings.Configuration{}}
cmdArgs := parser.MakeArguments()
cmdArgs.AddArg(tc.args...)
err := handleCmd(context.Background(),
run, cmdArgs, dbExc,
)
require.NoError(t, err)
for i, call := range mockRunner.ShowCalls {
show := call.Args[0].(*exec.Cmd).String()
show = strings.ReplaceAll(show, pacmanBin, "pacman")
// options are in a different order on different systems and on CI root user is used
assert.Subset(t, strings.Split(show, " "),
strings.Split(tc.wantShow[i], " "),
fmt.Sprintf("%d - %s", i, show))
}
})
}
}

406
cmd.go
View File

@ -3,31 +3,29 @@ package main
import ( import (
"bufio" "bufio"
"context" "context"
"errors"
"fmt" "fmt"
"net/http" "net/http"
"os"
"strings" "strings"
alpm "github.com/Jguer/go-alpm/v2" alpm "github.com/Jguer/go-alpm/v2"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/completion" "github.com/Jguer/yay/v11/pkg/completion"
"github.com/Jguer/yay/v12/pkg/db" "github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v12/pkg/download" "github.com/Jguer/yay/v11/pkg/download"
"github.com/Jguer/yay/v12/pkg/intrange" "github.com/Jguer/yay/v11/pkg/intrange"
"github.com/Jguer/yay/v12/pkg/news" "github.com/Jguer/yay/v11/pkg/news"
"github.com/Jguer/yay/v12/pkg/query" "github.com/Jguer/yay/v11/pkg/query"
"github.com/Jguer/yay/v12/pkg/runtime" "github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings" "github.com/Jguer/yay/v11/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/text"
"github.com/Jguer/yay/v12/pkg/settings/parser" "github.com/Jguer/yay/v11/pkg/upgrade"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/vcs"
"github.com/Jguer/yay/v12/pkg/upgrade"
"github.com/Jguer/yay/v12/pkg/vcs"
) )
func usage(logger *text.Logger) { func usage() {
logger.Println(`Usage: fmt.Println(`Usage:
yay yay
yay <operation> [...] yay <operation> [...]
yay <package(s)> yay <package(s)>
@ -44,17 +42,15 @@ operations:
yay {-U --upgrade} [options] <file(s)> yay {-U --upgrade} [options] <file(s)>
New operations: New operations:
yay {-B --build} [options] [dir]
yay {-G --getpkgbuild} [options] [package(s)]
yay {-P --show} [options]
yay {-W --web} [options] [package(s)]
yay {-Y --yay} [options] [package(s)] yay {-Y --yay} [options] [package(s)]
yay {-P --show} [options]
yay {-G --getpkgbuild} [options] [package(s)]
If no operation is specified 'yay -Syu' will be performed If no arguments are provided 'yay -Syu' will be performed
If no operation is specified and targets are provided -Y will be assumed If no operation is provided -Y will be assumed
New options: New options:
-N --repo Assume targets are from the repositories --repo Assume targets are from the repositories
-a --aur Assume targets are from the AUR -a --aur Assume targets are from the AUR
Permanent configuration options: Permanent configuration options:
@ -62,7 +58,6 @@ Permanent configuration options:
config file when used config file when used
--aururl <url> Set an alternative AUR URL --aururl <url> Set an alternative AUR URL
--aurrpcurl <url> Set an alternative URL for the AUR /rpc endpoint
--builddir <dir> Directory used to download and run PKGBUILDS --builddir <dir> Directory used to download and run PKGBUILDS
--editor <file> Editor to use when editing PKGBUILDs --editor <file> Editor to use when editing PKGBUILDs
--editorflags <flags> Pass arguments to editor --editorflags <flags> Pass arguments to editor
@ -92,19 +87,22 @@ Permanent configuration options:
--cleanmenu Give the option to clean build PKGBUILDS --cleanmenu Give the option to clean build PKGBUILDS
--diffmenu Give the option to show diffs for build files --diffmenu Give the option to show diffs for build files
--editmenu Give the option to edit/view PKGBUILDS --editmenu Give the option to edit/view PKGBUILDS
--upgrademenu Show a detailed list of updates with the option to skip any
--nocleanmenu Don't clean build PKGBUILDS
--nodiffmenu Don't show diffs for build files
--noeditmenu Don't edit/view PKGBUILDS
--noupgrademenu Don't show the upgrade menu
--askremovemake Ask to remove makedepends after install --askremovemake Ask to remove makedepends after install
--askyesremovemake Ask to remove makedepends after install("Y" as default)
--removemake Remove makedepends after install --removemake Remove makedepends after install
--noremovemake Don't remove makedepends after install --noremovemake Don't remove makedepends after install
--cleanafter Remove package sources after successful install --cleanafter Remove package sources after successful install
--keepsrc Keep pkg/ and src/ after building packages --nocleanafter Do not remove package sources after successful build
--bottomup Shows AUR's packages first and then repository's --bottomup Shows AUR's packages first and then repository's
--topdown Shows repository's packages first and then AUR's --topdown Shows repository's packages first and then AUR's
--singlelineresults List each search result on its own line
--doublelineresults List each search result on two lines, like pacman
--devel Check development packages during sysupgrade --devel Check development packages during sysupgrade
--nodevel Do not check development packages
--rebuild Always build target packages --rebuild Always build target packages
--rebuildall Always build all AUR packages --rebuildall Always build all AUR packages
--norebuild Skip package build if in cache and up to date --norebuild Skip package build if in cache and up to date
@ -113,14 +111,23 @@ Permanent configuration options:
--noredownload Skip pkgbuild download if in cache and up to date --noredownload Skip pkgbuild download if in cache and up to date
--redownloadall Always download pkgbuilds of all AUR packages --redownloadall Always download pkgbuilds of all AUR packages
--provides Look for matching providers when searching for packages --provides Look for matching providers when searching for packages
--noprovides Just look for packages by pkgname
--pgpfetch Prompt to import PGP keys from PKGBUILDs --pgpfetch Prompt to import PGP keys from PKGBUILDs
--nopgpfetch Don't prompt to import PGP keys
--useask Automatically resolve conflicts using pacman's ask flag --useask Automatically resolve conflicts using pacman's ask flag
--nouseask Confirm conflicts manually during the install
--combinedupgrade Refresh then perform the repo and AUR upgrade together
--nocombinedupgrade Perform the repo upgrade and AUR upgrade separately
--batchinstall Build multiple AUR packages then install them together
--nobatchinstall Build and install each AUR package one by one
--sudo <file> sudo command to use --sudo <file> sudo command to use
--sudoflags <flags> Pass arguments to sudo --sudoflags <flags> Pass arguments to sudo
--sudoloop Loop sudo calls in the background to avoid timeout --sudoloop Loop sudo calls in the background to avoid timeout
--nosudoloop Do not loop sudo calls in the background
--timeupdate Check packages' AUR page for changes during sysupgrade --timeupdate Check packages' AUR page for changes during sysupgrade
--notimeupdate Do not check packages' AUR page for changes
show specific options: show specific options:
-c --complete Used for completions -c --complete Used for completions
@ -130,7 +137,7 @@ show specific options:
-w --news Print arch news -w --news Print arch news
yay specific options: yay specific options:
-c --clean Remove unneeded dependencies (-cc to ignore optdepends) -c --clean Remove unneeded dependencies
--gendb Generates development package DB used for updating --gendb Generates development package DB used for updating
getpkgbuild specific options: getpkgbuild specific options:
@ -138,52 +145,47 @@ getpkgbuild specific options:
-p --print Print pkgbuild of packages`) -p --print Print pkgbuild of packages`)
} }
func handleCmd(ctx context.Context, run *runtime.Runtime, func handleCmd(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
cmdArgs *parser.Arguments, dbExecutor db.Executor,
) error {
if cmdArgs.ExistsArg("h", "help") { if cmdArgs.ExistsArg("h", "help") {
return handleHelp(ctx, run, cmdArgs) return handleHelp(ctx, cmdArgs)
} }
if run.Cfg.SudoLoop && cmdArgs.NeedRoot(run.Cfg.Mode) { if config.SudoLoop && cmdArgs.NeedRoot(config.Runtime.Mode) {
run.CmdBuilder.SudoLoop() config.Runtime.CmdBuilder.SudoLoop()
} }
switch cmdArgs.Op { switch cmdArgs.Op {
case "V", "version": case "V", "version":
handleVersion(run.Logger) handleVersion()
return nil return nil
case "D", "database": case "D", "database":
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
case "F", "files": case "F", "files":
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
case "Q", "query": case "Q", "query":
return handleQuery(ctx, run, cmdArgs, dbExecutor) return handleQuery(ctx, cmdArgs, dbExecutor)
case "R", "remove": case "R", "remove":
return handleRemove(ctx, run, cmdArgs, run.VCSStore) return handleRemove(ctx, cmdArgs, config.Runtime.VCSStore)
case "S", "sync": case "S", "sync":
return handleSync(ctx, run, cmdArgs, dbExecutor) return handleSync(ctx, cmdArgs, dbExecutor)
case "T", "deptest": case "T", "deptest":
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
case "U", "upgrade": case "U", "upgrade":
return handleUpgrade(ctx, run, cmdArgs) return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
case "B", "build": cmdArgs, config.Runtime.Mode, settings.NoConfirm))
return handleBuild(ctx, run, dbExecutor, cmdArgs)
case "G", "getpkgbuild": case "G", "getpkgbuild":
return handleGetpkgbuild(ctx, run, cmdArgs, dbExecutor) return handleGetpkgbuild(ctx, cmdArgs, dbExecutor)
case "P", "show": case "P", "show":
return handlePrint(ctx, run, cmdArgs, dbExecutor) return handlePrint(ctx, cmdArgs, dbExecutor)
case "Y", "yay": case "Y", "--yay":
return handleYay(ctx, run, cmdArgs, run.CmdBuilder, return handleYay(ctx, cmdArgs, dbExecutor)
dbExecutor, run.QueryBuilder)
case "W", "web":
return handleWeb(ctx, run, cmdArgs)
} }
return errors.New(gotext.Get("unhandled operation")) return fmt.Errorf(gotext.Get("unhandled operation"))
} }
// getFilter returns filter function which can keep packages which were only // getFilter returns filter function which can keep packages which were only
@ -194,35 +196,34 @@ func getFilter(cmdArgs *parser.Arguments) (upgrade.Filter, error) {
switch { switch {
case deps && explicit: case deps && explicit:
return nil, errors.New(gotext.Get("invalid option: '--deps' and '--explicit' may not be used together")) return nil, fmt.Errorf(gotext.Get("invalid option: '--deps' and '--explicit' may not be used together"))
case deps: case deps:
return func(pkg *upgrade.Upgrade) bool { return func(pkg upgrade.Upgrade) bool {
return pkg.Reason == alpm.PkgReasonDepend return pkg.Reason == alpm.PkgReasonDepend
}, nil }, nil
case explicit: case explicit:
return func(pkg *upgrade.Upgrade) bool { return func(pkg upgrade.Upgrade) bool {
return pkg.Reason == alpm.PkgReasonExplicit return pkg.Reason == alpm.PkgReasonExplicit
}, nil }, nil
} }
return func(pkg *upgrade.Upgrade) bool { return func(pkg upgrade.Upgrade) bool {
return true return true
}, nil }, nil
} }
func handleQuery(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error { func handleQuery(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
if cmdArgs.ExistsArg("u", "upgrades") { if cmdArgs.ExistsArg("u", "upgrades") {
filter, err := getFilter(cmdArgs) filter, err := getFilter(cmdArgs)
if err != nil { if err != nil {
return err return err
} }
return printUpdateList(ctx, run, cmdArgs, dbExecutor, return printUpdateList(ctx, cmdArgs, dbExecutor, cmdArgs.ExistsDouble("u", "sysupgrade"), filter)
cmdArgs.ExistsDouble("u", "sysupgrade"), filter)
} }
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, if err := config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)); err != nil { cmdArgs, config.Runtime.Mode, settings.NoConfirm)); err != nil {
if str := err.Error(); strings.Contains(str, "exit status") { if str := err.Error(); strings.Contains(str, "exit status") {
// yay -Qdt should not output anything in case of error // yay -Qdt should not output anything in case of error
return fmt.Errorf("") return fmt.Errorf("")
@ -234,200 +235,255 @@ func handleQuery(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Argu
return nil return nil
} }
func handleHelp(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments) error { func handleHelp(ctx context.Context, cmdArgs *parser.Arguments) error {
usage(run.Logger)
switch cmdArgs.Op { switch cmdArgs.Op {
case "Y", "yay", "G", "getpkgbuild", "P", "show", "W", "web", "B", "build": case "Y", "yay", "G", "getpkgbuild", "P", "show":
usage()
return nil return nil
} }
run.Logger.Println("\npacman operation specific options:") return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, cmdArgs, config.Runtime.Mode, settings.NoConfirm))
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
} }
func handleVersion(logger *text.Logger) { func handleVersion() {
logger.Printf("yay v%s - libalpm v%s\n", yayVersion, alpm.Version()) fmt.Printf("yay v%s - libalpm v%s\n", yayVersion, alpm.Version())
} }
func handlePrint(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error { func handlePrint(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
switch { switch {
case cmdArgs.ExistsArg("d", "defaultconfig"): case cmdArgs.ExistsArg("d", "defaultconfig"):
tmpConfig := settings.DefaultConfig(yayVersion) tmpConfig := settings.DefaultConfig()
run.Logger.Printf("%v", tmpConfig) fmt.Printf("%v", tmpConfig)
return nil return nil
case cmdArgs.ExistsArg("g", "currentconfig"): case cmdArgs.ExistsArg("g", "currentconfig"):
run.Logger.Printf("%v", run.Cfg) fmt.Printf("%v", config)
return nil return nil
case cmdArgs.ExistsArg("n", "numberupgrades"):
filter, err := getFilter(cmdArgs)
if err != nil {
return err
}
return printNumberOfUpdates(ctx, dbExecutor, cmdArgs.ExistsDouble("u", "sysupgrade"), filter)
case cmdArgs.ExistsArg("w", "news"): case cmdArgs.ExistsArg("w", "news"):
double := cmdArgs.ExistsDouble("w", "news") double := cmdArgs.ExistsDouble("w", "news")
quiet := cmdArgs.ExistsArg("q", "quiet") quiet := cmdArgs.ExistsArg("q", "quiet")
return news.PrintNewsFeed(ctx, run.HTTPClient, run.Logger, return news.PrintNewsFeed(ctx, config.Runtime.HTTPClient, dbExecutor.LastBuildTime(), config.SortMode, double, quiet)
dbExecutor.LastBuildTime(), run.Cfg.BottomUp, double, quiet) case cmdArgs.ExistsDouble("c", "complete"):
return completion.Show(ctx, config.Runtime.HTTPClient, dbExecutor,
config.AURURL, config.Runtime.CompletionPath, config.CompletionInterval, true)
case cmdArgs.ExistsArg("c", "complete"): case cmdArgs.ExistsArg("c", "complete"):
return completion.Show(ctx, run.HTTPClient, dbExecutor, return completion.Show(ctx, config.Runtime.HTTPClient, dbExecutor,
run.Cfg.AURURL, run.Cfg.CompletionPath, run.Cfg.CompletionInterval, cmdArgs.ExistsDouble("c", "complete")) config.AURURL, config.Runtime.CompletionPath, config.CompletionInterval, false)
case cmdArgs.ExistsArg("s", "stats"): case cmdArgs.ExistsArg("s", "stats"):
return localStatistics(ctx, run, dbExecutor) return localStatistics(ctx, dbExecutor)
} }
return nil return nil
} }
func handleYay(ctx context.Context, run *runtime.Runtime, func handleYay(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
cmdArgs *parser.Arguments, cmdBuilder exe.ICmdBuilder,
dbExecutor db.Executor, queryBuilder query.Builder,
) error {
switch { switch {
case cmdArgs.ExistsArg("gendb"): case cmdArgs.ExistsArg("gendb"):
return createDevelDB(ctx, run, dbExecutor) return createDevelDB(ctx, config, dbExecutor)
case cmdArgs.ExistsDouble("c"): case cmdArgs.ExistsDouble("c"):
return cleanDependencies(ctx, run.Cfg, cmdBuilder, cmdArgs, dbExecutor, true) return cleanDependencies(ctx, cmdArgs, dbExecutor, true)
case cmdArgs.ExistsArg("c", "clean"): case cmdArgs.ExistsArg("c", "clean"):
return cleanDependencies(ctx, run.Cfg, cmdBuilder, cmdArgs, dbExecutor, false) return cleanDependencies(ctx, cmdArgs, dbExecutor, false)
case len(cmdArgs.Targets) > 0: case len(cmdArgs.Targets) > 0:
return displayNumberMenu(ctx, run, cmdArgs.Targets, dbExecutor, queryBuilder, cmdArgs) return handleYogurt(ctx, cmdArgs, dbExecutor)
} }
return nil return nil
} }
func handleWeb(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments) error { func handleGetpkgbuild(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor download.DBSearcher) error {
switch {
case cmdArgs.ExistsArg("v", "vote"):
return handlePackageVote(ctx, cmdArgs.Targets, run.AURClient, run.Logger,
run.VoteClient, true)
case cmdArgs.ExistsArg("u", "unvote"):
return handlePackageVote(ctx, cmdArgs.Targets, run.AURClient, run.Logger,
run.VoteClient, false)
}
return nil
}
func handleGetpkgbuild(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor download.DBSearcher) error {
if cmdArgs.ExistsArg("p", "print") { if cmdArgs.ExistsArg("p", "print") {
return printPkgbuilds(dbExecutor, run.AURClient, return printPkgbuilds(dbExecutor, config.Runtime.HTTPClient, cmdArgs.Targets, config.Runtime.Mode, config.AURURL)
run.HTTPClient, run.Logger, cmdArgs.Targets, run.Cfg.Mode, run.Cfg.AURURL)
} }
return getPkgbuilds(ctx, dbExecutor, run.AURClient, run, return getPkgbuilds(ctx, dbExecutor, config, cmdArgs.Targets, cmdArgs.ExistsArg("f", "force"))
cmdArgs.Targets, cmdArgs.ExistsArg("f", "force"))
} }
func handleUpgrade(ctx context.Context, func handleYogurt(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
run *runtime.Runtime, cmdArgs *parser.Arguments, config.SearchMode = numberMenu
) error { return displayNumberMenu(ctx, cmdArgs.Targets, dbExecutor, cmdArgs)
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm))
} }
// -B* options func handleSync(ctx context.Context, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
func handleBuild(ctx context.Context,
run *runtime.Runtime, dbExecutor db.Executor, cmdArgs *parser.Arguments,
) error {
if cmdArgs.ExistsArg("i", "install") {
return installLocalPKGBUILD(ctx, run, cmdArgs, dbExecutor)
}
return nil
}
func handleSync(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
targets := cmdArgs.Targets targets := cmdArgs.Targets
switch { switch {
case cmdArgs.ExistsArg("s", "search"): case cmdArgs.ExistsArg("s", "search"):
return syncSearch(ctx, targets, dbExecutor, run.QueryBuilder, !cmdArgs.ExistsArg("q", "quiet")) if cmdArgs.ExistsArg("q", "quiet") {
config.SearchMode = minimal
} else {
config.SearchMode = detailed
}
return syncSearch(ctx, targets, config.Runtime.AURClient, dbExecutor)
case cmdArgs.ExistsArg("p", "print", "print-format"): case cmdArgs.ExistsArg("p", "print", "print-format"):
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
case cmdArgs.ExistsArg("c", "clean"): case cmdArgs.ExistsArg("c", "clean"):
return syncClean(ctx, run, cmdArgs, dbExecutor) return syncClean(ctx, cmdArgs, dbExecutor)
case cmdArgs.ExistsArg("l", "list"): case cmdArgs.ExistsArg("l", "list"):
return syncList(ctx, run, run.HTTPClient, cmdArgs, dbExecutor) return syncList(ctx, config.Runtime.HTTPClient, cmdArgs, dbExecutor)
case cmdArgs.ExistsArg("g", "groups"): case cmdArgs.ExistsArg("g", "groups"):
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
case cmdArgs.ExistsArg("i", "info"): case cmdArgs.ExistsArg("i", "info"):
return syncInfo(ctx, run, cmdArgs, targets, dbExecutor) return syncInfo(ctx, cmdArgs, targets, dbExecutor)
case cmdArgs.ExistsArg("u", "sysupgrade") || len(cmdArgs.Targets) > 0: case cmdArgs.ExistsArg("u", "sysupgrade"):
return syncInstall(ctx, run, cmdArgs, dbExecutor) return install(ctx, cmdArgs, dbExecutor, false)
case len(cmdArgs.Targets) > 0:
return install(ctx, cmdArgs, dbExecutor, false)
case cmdArgs.ExistsArg("y", "refresh"): case cmdArgs.ExistsArg("y", "refresh"):
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
} }
return nil return nil
} }
func handleRemove(ctx context.Context, run *runtime.Runtime, cmdArgs *parser.Arguments, localCache vcs.Store) error { func handleRemove(ctx context.Context, cmdArgs *parser.Arguments, localCache *vcs.InfoStore) error {
err := run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, err := config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
if err == nil { if err == nil {
localCache.RemovePackages(cmdArgs.Targets) localCache.RemovePackage(cmdArgs.Targets)
} }
return err return err
} }
// NumberMenu presents a CLI for selecting packages to install. // NumberMenu presents a CLI for selecting packages to install.
func displayNumberMenu(ctx context.Context, run *runtime.Runtime, pkgS []string, dbExecutor db.Executor, func displayNumberMenu(ctx context.Context, pkgS []string, dbExecutor db.Executor, cmdArgs *parser.Arguments) error {
queryBuilder query.Builder, cmdArgs *parser.Arguments, var (
) error { aurErr error
queryBuilder.Execute(ctx, dbExecutor, pkgS) aq aurQuery
pq repoQuery
lenaq, lenpq int
)
if err := queryBuilder.Results(dbExecutor, query.NumberMenu); err != nil { pkgS = query.RemoveInvalidTargets(pkgS, config.Runtime.Mode)
return err
if config.Runtime.Mode.AtLeastAUR() {
aq, aurErr = narrowSearch(ctx, config.Runtime.AURClient, pkgS, true)
lenaq = len(aq)
} }
if queryBuilder.Len() == 0 { if config.Runtime.Mode.AtLeastRepo() {
// no results were found pq = queryRepo(pkgS, dbExecutor)
return nil lenpq = len(pq)
} }
run.Logger.Infoln(gotext.Get("Packages to install (eg: 1 2 3, 1-3 or ^4)")) if aurErr != nil {
text.Errorln(gotext.Get("Error during AUR search: %s\n", aurErr))
text.Warnln(gotext.Get("Showing repo packages only"))
}
numberBuf, err := run.Logger.GetInput("", false) if lenpq == 0 && lenaq == 0 {
return fmt.Errorf(gotext.Get("no packages match search"))
}
switch config.SortMode {
case settings.TopDown:
if config.Runtime.Mode.AtLeastRepo() {
pq.printSearch(dbExecutor)
}
if config.Runtime.Mode.AtLeastAUR() {
aq.printSearch(lenpq+1, dbExecutor)
}
case settings.BottomUp:
if config.Runtime.Mode.AtLeastAUR() {
aq.printSearch(lenpq+1, dbExecutor)
}
if config.Runtime.Mode.AtLeastRepo() {
pq.printSearch(dbExecutor)
}
default:
return fmt.Errorf(gotext.Get("invalid sort mode. Fix with yay -Y --bottomup --save"))
}
text.Infoln(gotext.Get("Packages to install (eg: 1 2 3, 1-3 or ^4)"))
text.Info()
reader := bufio.NewReader(os.Stdin)
numberBuf, overflow, err := reader.ReadLine()
if err != nil { if err != nil {
return err return err
} }
include, exclude, _, otherExclude := intrange.ParseNumberMenu(numberBuf) if overflow {
return fmt.Errorf(gotext.Get("input too long"))
targets, err := queryBuilder.GetTargets(include, exclude, otherExclude)
if err != nil {
return err
} }
// modify the arguments to pass for the install include, exclude, _, otherExclude := intrange.ParseNumberMenu(string(numberBuf))
cmdArgs.Targets = targets arguments := cmdArgs.CopyGlobal()
if len(cmdArgs.Targets) == 0 { isInclude := len(exclude) == 0 && len(otherExclude) == 0
run.Logger.Println(gotext.Get(" there is nothing to do"))
for i, pkg := range pq {
var target int
switch config.SortMode {
case settings.TopDown:
target = i + 1
case settings.BottomUp:
target = len(pq) - i
default:
return fmt.Errorf(gotext.Get("invalid sort mode. Fix with yay -Y --bottomup --save"))
}
if (isInclude && include.Get(target)) || (!isInclude && !exclude.Get(target)) {
arguments.AddTarget(pkg.DB().Name() + "/" + pkg.Name())
}
}
for i := range aq {
var target int
switch config.SortMode {
case settings.TopDown:
target = i + 1 + len(pq)
case settings.BottomUp:
target = len(aq) - i + len(pq)
default:
return fmt.Errorf(gotext.Get("invalid sort mode. Fix with yay -Y --bottomup --save"))
}
if (isInclude && include.Get(target)) || (!isInclude && !exclude.Get(target)) {
arguments.AddTarget("aur/" + aq[i].Name)
}
}
if len(arguments.Targets) == 0 {
fmt.Println(gotext.Get(" there is nothing to do"))
return nil return nil
} }
return syncInstall(ctx, run, cmdArgs, dbExecutor) return install(ctx, arguments, dbExecutor, true)
} }
func syncList(ctx context.Context, run *runtime.Runtime, func syncList(ctx context.Context, httpClient *http.Client, cmdArgs *parser.Arguments, dbExecutor db.Executor) error {
httpClient *http.Client, cmdArgs *parser.Arguments, dbExecutor db.Executor,
) error {
aur := false aur := false
for i := len(cmdArgs.Targets) - 1; i >= 0; i-- { for i := len(cmdArgs.Targets) - 1; i >= 0; i-- {
if cmdArgs.Targets[i] == "aur" && run.Cfg.Mode.AtLeastAUR() { if cmdArgs.Targets[i] == "aur" && config.Runtime.Mode.AtLeastAUR() {
cmdArgs.Targets = append(cmdArgs.Targets[:i], cmdArgs.Targets[i+1:]...) cmdArgs.Targets = append(cmdArgs.Targets[:i], cmdArgs.Targets[i+1:]...)
aur = true aur = true
} }
} }
if run.Cfg.Mode.AtLeastAUR() && (len(cmdArgs.Targets) == 0 || aur) { if config.Runtime.Mode.AtLeastAUR() && (len(cmdArgs.Targets) == 0 || aur) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, run.Cfg.AURURL+"/packages.gz", http.NoBody) req, err := http.NewRequestWithContext(ctx, "GET", config.AURURL+"/packages.gz", nil)
if err != nil { if err != nil {
return err return err
} }
@ -445,22 +501,22 @@ func syncList(ctx context.Context, run *runtime.Runtime,
for scanner.Scan() { for scanner.Scan() {
name := scanner.Text() name := scanner.Text()
if cmdArgs.ExistsArg("q", "quiet") { if cmdArgs.ExistsArg("q", "quiet") {
run.Logger.Println(name) fmt.Println(name)
} else { } else {
run.Logger.Printf("%s %s %s", text.Magenta("aur"), text.Bold(name), text.Bold(text.Green(gotext.Get("unknown-version")))) fmt.Printf("%s %s %s", text.Magenta("aur"), text.Bold(name), text.Bold(text.Green(gotext.Get("unknown-version"))))
if dbExecutor.LocalPackage(name) != nil { if dbExecutor.LocalPackage(name) != nil {
run.Logger.Print(text.Bold(text.Blue(gotext.Get(" [Installed]")))) fmt.Print(text.Bold(text.Blue(gotext.Get(" [Installed]"))))
} }
run.Logger.Println() fmt.Println()
} }
} }
} }
if run.Cfg.Mode.AtLeastRepo() && (len(cmdArgs.Targets) != 0 || !aur) { if config.Runtime.Mode.AtLeastRepo() && (len(cmdArgs.Targets) != 0 || !aur) {
return run.CmdBuilder.Show(run.CmdBuilder.BuildPacmanCmd(ctx, return config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildPacmanCmd(ctx,
cmdArgs, run.Cfg.Mode, settings.NoConfirm)) cmdArgs, config.Runtime.Mode, settings.NoConfirm))
} }
return nil return nil

View File

@ -1,140 +0,0 @@
//go:build !integration
// +build !integration
package main
import (
"context"
"fmt"
"io"
"os"
"os/exec"
"strings"
"testing"
"github.com/Jguer/aur"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/Jguer/yay/v12/pkg/db/mock"
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
"github.com/Jguer/yay/v12/pkg/query"
"github.com/Jguer/yay/v12/pkg/runtime"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/text"
"github.com/Jguer/yay/v12/pkg/vcs"
)
func TestYogurtMenuAURDB(t *testing.T) {
t.Skip("skip until Operation service is an interface")
t.Parallel()
makepkgBin := t.TempDir() + "/makepkg"
pacmanBin := t.TempDir() + "/pacman"
gitBin := t.TempDir() + "/git"
f, err := os.OpenFile(makepkgBin, os.O_RDONLY|os.O_CREATE, 0o755)
require.NoError(t, err)
require.NoError(t, f.Close())
f, err = os.OpenFile(pacmanBin, os.O_RDONLY|os.O_CREATE, 0o755)
require.NoError(t, err)
require.NoError(t, f.Close())
f, err = os.OpenFile(gitBin, os.O_RDONLY|os.O_CREATE, 0o755)
require.NoError(t, err)
require.NoError(t, f.Close())
captureOverride := func(cmd *exec.Cmd) (stdout string, stderr string, err error) {
return "", "", nil
}
showOverride := func(cmd *exec.Cmd) error {
return nil
}
mockRunner := &exe.MockRunner{CaptureFn: captureOverride, ShowFn: showOverride}
cmdBuilder := &exe.CmdBuilder{
MakepkgBin: makepkgBin,
SudoBin: "su",
PacmanBin: pacmanBin,
PacmanConfigPath: "/etc/pacman.conf",
GitBin: "git",
Runner: mockRunner,
SudoLoopEnabled: false,
}
cmdArgs := parser.MakeArguments()
cmdArgs.AddArg("Y")
cmdArgs.AddTarget("yay")
db := &mock.DBExecutor{
AlpmArchitecturesFn: func() ([]string, error) {
return []string{"x86_64"}, nil
},
RefreshHandleFn: func() error {
return nil
},
ReposFn: func() []string {
return []string{"aur"}
},
SyncPackagesFn: func(s ...string) []mock.IPackage {
return []mock.IPackage{
&mock.Package{
PName: "yay",
PBase: "yay",
PVersion: "10.0.0",
PDB: mock.NewDB("aur"),
},
}
},
LocalPackageFn: func(s string) mock.IPackage {
return nil
},
}
aurCache := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{
{
Name: "yay",
PackageBase: "yay",
Version: "10.0.0",
},
}, nil
},
}
logger := text.NewLogger(io.Discard, os.Stderr, strings.NewReader("1\n"), true, "test")
run := &runtime.Runtime{
Cfg: &settings.Configuration{
RemoveMake: "no",
},
Logger: logger,
CmdBuilder: cmdBuilder,
VCSStore: &vcs.Mock{},
QueryBuilder: query.NewSourceQueryBuilder(aurCache, logger, "votes", parser.ModeAny, "name",
true, false, true),
AURClient: aurCache,
}
err = handleCmd(context.Background(), run, cmdArgs, db)
require.NoError(t, err)
wantCapture := []string{}
wantShow := []string{
"pacman -S -y --config /etc/pacman.conf --",
"pacman -S -y -u --config /etc/pacman.conf --",
}
require.Len(t, mockRunner.ShowCalls, len(wantShow))
require.Len(t, mockRunner.CaptureCalls, len(wantCapture))
for i, call := range mockRunner.ShowCalls {
show := call.Args[0].(*exec.Cmd).String()
show = strings.ReplaceAll(show, makepkgBin, "makepkg")
show = strings.ReplaceAll(show, pacmanBin, "pacman")
show = strings.ReplaceAll(show, gitBin, "pacman")
// options are in a different order on different systems and on CI root user is used
assert.Subset(t, strings.Split(show, " "), strings.Split(wantShow[i], " "), fmt.Sprintf("%d - %s", i, show))
}
}

View File

@ -51,7 +51,7 @@ _pacman_repo_list() {
_yay() { _yay() {
compopt -o default compopt -o default
local common core cur database files prev query remove sync upgrade o local common core cur database files prev query remove sync upgrade o
local yays show getpkgbuild web local yays show getpkgbuild
local cur prev words cword local cur prev words cword
_init_completion || return _init_completion || return
@ -61,29 +61,28 @@ _yay() {
search unrequired upgrades' 'c e g i k l m n o p s t u') search unrequired upgrades' 'c e g i k l m n o p s t u')
remove=('cascade dbonly nodeps assume-installed nosave print recursive unneeded' 'c n p s u') remove=('cascade dbonly nodeps assume-installed nosave print recursive unneeded' 'c n p s u')
sync=('asdeps asexplicit clean dbonly downloadonly overwrite groups ignore ignoregroup sync=('asdeps asexplicit clean dbonly downloadonly overwrite groups ignore ignoregroup
info list needed nodeps assume-installed print refresh recursive search sysupgrade aur repo' info list needed nodeps assume-installed print refresh recursive search sysupgrade'
'c g i l p s u w y a N') 'c g i l p s u w y')
upgrade=('asdeps asexplicit overwrite needed nodeps assume-installed print recursive' 'p') upgrade=('asdeps asexplicit overwrite needed nodeps assume-installed print recursive' 'p')
core=('database files help query remove sync upgrade version' 'D F Q R S U V h') core=('database files help query remove sync upgrade version' 'D F Q R S U V h')
##yay stuff ##yay stuff
common=('arch cachedir color config confirm dbpath debug gpgdir help hookdir logfile common=('arch cachedir color config confirm dbpath debug gpgdir help hookdir logfile
noconfirm noprogressbar noscriptlet quiet root verbose noconfirm noprogressbar noscriptlet quiet root verbose
makepkg pacman git gpg gpgflags config requestsplitn sudoloop makepkg pacman git gpg gpgflags config requestsplitn sudoloop nosudoloop
redownload noredownload redownloadall rebuild rebuildall rebuildtree norebuild sortby redownload noredownload redownloadall rebuild rebuildall rebuildtree norebuild
singlelineresults doublelineresults answerclean answerdiff answeredit answerupgrade noanswerclean noanswerdiff sortby answerclean answerdiff answeredit answerupgrade noanswerclean noanswerdiff
noansweredit noanswerupgrade cleanmenu diffmenu editmenu cleanafter keepsrc noansweredit noanswerupgrade cleanmenu diffmenu editmenu upgrademenu cleanafter nocleanafter
provides pgpfetch nocleanmenu nodiffmenu noupgrademenu provides noprovides pgpfetch nopgpfetch
useask combinedupgrade aur repo makepkgconf useask nouseask combinedupgrade nocombinedupgrade aur repo makepkgconf
nomakepkgconf askremovemake askyesremovemake removemake noremovemake completioninterval aururl aurrpcurl nomakepkgconf askremovemake removemake noremovemake completioninterval aururl
searchby batchinstall' searchby batchinstall nobatchinstall'
'b d h q r v') 'b d h q r v')
yays=('clean gendb' 'c') yays=('clean gendb' 'c')
show=('complete defaultconfig currentconfig stats news' 'c d g s w') show=('complete defaultconfig currentconfig stats news' 'c d g s w')
getpkgbuild=('force print' 'f p') getpkgbuild=('force print' 'f p')
web=('vote unvote' 'v u')
for o in 'D database' 'F files' 'Q query' 'R remove' 'S sync' 'U upgrade' 'Y yays' 'P show' 'G getpkgbuild' 'W web'; do for o in 'D database' 'F files' 'Q query' 'R remove' 'S sync' 'U upgrade' 'Y yays' 'P show' 'G getpkgbuild'; do
_arch_incomp "$o" && break _arch_incomp "$o" && break
done done
@ -120,9 +119,6 @@ _yay() {
G) G)
_yay_pkg _yay_pkg
;; ;;
W)
_yay_pkg
;;
esac esac
fi fi
true true
@ -130,7 +126,7 @@ _yay() {
_pacman_file() { _pacman_file() {
compopt -o filenames compopt -o filenames
_filedir 'pkg.*' _filedir 'pkg.tar*'
} }
complete -F _yay yay complete -F _yay yay

View File

@ -8,7 +8,6 @@ set -l progname yay
set -l listall "(yay -Pc)" set -l listall "(yay -Pc)"
set -l listpacman "(__fish_print_packages)" set -l listpacman "(__fish_print_packages)"
set -l yayspecific '__fish_contains_opt -s Y yay' set -l yayspecific '__fish_contains_opt -s Y yay'
set -l webspecific '__fish_contains_opt -s W web'
set -l show '__fish_contains_opt -s P show' set -l show '__fish_contains_opt -s P show'
set -l getpkgbuild '__fish_contains_opt -s G getpkgbuild' set -l getpkgbuild '__fish_contains_opt -s G getpkgbuild'
@ -17,7 +16,7 @@ set -l listinstalled "(pacman -Q | string replace ' ' \t)"
set -l listrepos "(__fish_print_pacman_repos)" set -l listrepos "(__fish_print_pacman_repos)"
set -l listgroups "(pacman -Sg)\t'Package Group'" set -l listgroups "(pacman -Sg)\t'Package Group'"
set -l noopt 'not __fish_contains_opt -s S -s D -s Q -s R -s U -s T -s F -s Y -s W -s P -s G database query sync remove upgrade deptest files show getpkgbuild web yay' set -l noopt 'not __fish_contains_opt -s S -s D -s Q -s R -s U -s T -s F database query sync remove upgrade deptest files'
set -l database '__fish_contains_opt -s D database' set -l database '__fish_contains_opt -s D database'
set -l query '__fish_contains_opt -s Q query' set -l query '__fish_contains_opt -s Q query'
set -l remove '__fish_contains_opt -s R remove' set -l remove '__fish_contains_opt -s R remove'
@ -157,16 +156,10 @@ complete -c $progname -n "$upgrade" -xa '(__fish_complete_suffix pkg.tar.zst; __
complete -c $progname -s Y -f -l yay -n "$noopt" -d 'Yay specific operations' complete -c $progname -s Y -f -l yay -n "$noopt" -d 'Yay specific operations'
complete -c $progname -s P -f -l show -n "$noopt" -d 'Print information' complete -c $progname -s P -f -l show -n "$noopt" -d 'Print information'
complete -c $progname -s G -f -l getpkgbuild -n "$noopt" -d 'Get PKGBUILD from ABS or AUR' complete -c $progname -s G -f -l getpkgbuild -n "$noopt" -d 'Get PKGBUILD from ABS or AUR'
complete -c $progname -s W -f -l web -n "$noopt" -d 'Web operations'
# Web options
complete -c $progname -n "$webspecific" -s v -l vote -d 'Vote for AUR packages' -f
complete -c $progname -n "$webspecific" -s u -l unvote -d 'Unvote for AUR packages' -f
complete -c $progname -n "$webspecific" -xa "$listall"
# New options # New options
complete -c $progname -n "not $noopt" -s a -l aur -d 'Assume targets are from the AUR' -f complete -c $progname -n "not $noopt" -l repo -d 'Assume targets are from the AUR' -f
complete -c $progname -n "not $noopt" -s N -l repo -d 'Assume targets are from the repositories' -f complete -c $progname -n "not $noopt" -s a -l aur -d 'Assume targets are from the repositories' -f
# Yay options # Yay options
complete -c $progname -n "$yayspecific" -s c -l clean -d 'Remove unneeded dependencies' -f complete -c $progname -n "$yayspecific" -s c -l clean -d 'Remove unneeded dependencies' -f
@ -189,7 +182,6 @@ complete -c $progname -n "$getpkgbuild" -s p -l print -d 'Print pkgbuild of pack
# Permanent configuration settings # Permanent configuration settings
complete -c $progname -n "not $noopt" -l save -d 'Save current arguments to yay permanent configuration' -f complete -c $progname -n "not $noopt" -l save -d 'Save current arguments to yay permanent configuration' -f
complete -c $progname -n "not $noopt" -l aururl -d 'Set an alternative AUR URL' -f complete -c $progname -n "not $noopt" -l aururl -d 'Set an alternative AUR URL' -f
complete -c $progname -n "not $noopt" -l aurrpcurl -d 'Set an alternative URL for the AUR /rpc endpoint' -f
complete -c $progname -n "not $noopt" -l builddir -d 'Directory to use for Building AUR Packages' -r complete -c $progname -n "not $noopt" -l builddir -d 'Directory to use for Building AUR Packages' -r
complete -c $progname -n "not $noopt" -l editor -d 'Editor to use' -f complete -c $progname -n "not $noopt" -l editor -d 'Editor to use' -f
complete -c $progname -n "not $noopt" -l editorflags -d 'Editor flags to use' -f complete -c $progname -n "not $noopt" -l editorflags -d 'Editor flags to use' -f
@ -216,26 +208,35 @@ complete -c $progname -n "not $noopt" -l noanswerupgrade -d 'Unset the answer fo
complete -c $progname -n "not $noopt" -l cleanmenu -d 'Give the option to clean build PKGBUILDS' -f complete -c $progname -n "not $noopt" -l cleanmenu -d 'Give the option to clean build PKGBUILDS' -f
complete -c $progname -n "not $noopt" -l diffmenu -d 'Give the option to show diffs for build files' -f complete -c $progname -n "not $noopt" -l diffmenu -d 'Give the option to show diffs for build files' -f
complete -c $progname -n "not $noopt" -l editmenu -d 'Give the option to edit/view PKGBUILDS' -f complete -c $progname -n "not $noopt" -l editmenu -d 'Give the option to edit/view PKGBUILDS' -f
complete -c $progname -n "not $noopt" -l upgrademenu -d 'Show a detailed list of updates with the option to skip any' -f
complete -c $progname -n "not $noopt" -l nocleanmenu -d 'Do not clean build PKGBUILDS' -f
complete -c $progname -n "not $noopt" -l nodiffmenu -d 'Do not show diffs for build files' -f
complete -c $progname -n "not $noopt" -l noeditmenu -d 'Do not edit/view PKGBUILDS' -f
complete -c $progname -n "not $noopt" -l noupgrademenu -d 'Do not show the upgrade menu' -f
complete -c $progname -n "not $noopt" -l askremovemake -d 'Ask to remove make deps after install' -f complete -c $progname -n "not $noopt" -l askremovemake -d 'Ask to remove make deps after install' -f
complete -c $progname -n "not $noopt" -l askyesremovemake -d 'Ask to remove make deps after install(with "Y" as default)' -f
complete -c $progname -n "not $noopt" -l removemake -d 'Remove make deps after install' -f complete -c $progname -n "not $noopt" -l removemake -d 'Remove make deps after install' -f
complete -c $progname -n "not $noopt" -l noremovemake -d 'Do not remove make deps after install' -f complete -c $progname -n "not $noopt" -l noremovemake -d 'Do not remove make deps after install' -f
complete -c $progname -n "not $noopt" -l topdown -d 'Shows repository packages first and then aur' -f complete -c $progname -n "not $noopt" -l topdown -d 'Shows repository packages first and then aur' -f
complete -c $progname -n "not $noopt" -l bottomup -d 'Shows aur packages first and then repository' -f complete -c $progname -n "not $noopt" -l bottomup -d 'Shows aur packages first and then repository' -f
complete -c $progname -n "not $noopt" -l singlelineresults -d 'List each search result on its own line' -f
complete -c $progname -n "not $noopt" -l doublelineresults -d 'List each search result on two lines, like pacman' -f
complete -c $progname -n "not $noopt" -l devel -d 'Check -git/-svn/-hg development version' -f complete -c $progname -n "not $noopt" -l devel -d 'Check -git/-svn/-hg development version' -f
complete -c $progname -n "not $noopt" -l nodevel -d 'Disable development version checking' -f
complete -c $progname -n "not $noopt" -l cleanafter -d 'Clean package sources after successful build' -f complete -c $progname -n "not $noopt" -l cleanafter -d 'Clean package sources after successful build' -f
complete -c $progname -n "not $noopt" -l keepsrc -d 'Keep pkg/ and src/ after building packages' -f complete -c $progname -n "not $noopt" -l nocleanafter -d 'Disable package sources cleaning' -f
complete -c $progname -n "not $noopt" -l timeupdate -d 'Check package modification date and version' -f complete -c $progname -n "not $noopt" -l timeupdate -d 'Check package modification date and version' -f
complete -c $progname -n "not $noopt" -l notimeupdate -d 'Check only package version change' -f
complete -c $progname -n "not $noopt" -l redownload -d 'Redownload PKGBUILD of package even if up-to-date' -f complete -c $progname -n "not $noopt" -l redownload -d 'Redownload PKGBUILD of package even if up-to-date' -f
complete -c $progname -n "not $noopt" -l redownloadall -d 'Redownload PKGBUILD of package and deps even if up-to-date' -f complete -c $progname -n "not $noopt" -l redownloadall -d 'Redownload PKGBUILD of package and deps even if up-to-date' -f
complete -c $progname -n "not $noopt" -l noredownload -d 'Do not redownload up-to-date PKGBUILDs' -f complete -c $progname -n "not $noopt" -l noredownload -d 'Do not redownload up-to-date PKGBUILDs' -f
complete -c $progname -n "not $noopt" -l provides -d 'Look for matching providers when searching for packages' -f complete -c $progname -n "not $noopt" -l provides -d 'Look for matching providers when searching for packages' -f
complete -c $progname -n "not $noopt" -l noprovides -d 'Just look for packages by pkgname' -f
complete -c $progname -n "not $noopt" -l pgpfetch -d 'Prompt to import PGP keys from PKGBUILDs' -f complete -c $progname -n "not $noopt" -l pgpfetch -d 'Prompt to import PGP keys from PKGBUILDs' -f
complete -c $progname -n "not $noopt" -l nopgpfetch -d 'Do not prompt to import PGP keys' -f
complete -c $progname -n "not $noopt" -l useask -d 'Automatically resolve conflicts using pacmans ask flag' -f complete -c $progname -n "not $noopt" -l useask -d 'Automatically resolve conflicts using pacmans ask flag' -f
complete -c $progname -n "not $noopt" -l nouseask -d 'Confirm conflicts manually during the install' -f
complete -c $progname -n "not $noopt" -l combinedupgrade -d 'Refresh then perform the repo and AUR upgrade together' -f complete -c $progname -n "not $noopt" -l combinedupgrade -d 'Refresh then perform the repo and AUR upgrade together' -f
complete -c $progname -n "not $noopt" -l nocombinedupgrade -d 'Perform the repo upgrade and AUR upgrade separately' -f
complete -c $progname -n "not $noopt" -l batchinstall -d 'Build multiple AUR packages then install them together' -f complete -c $progname -n "not $noopt" -l batchinstall -d 'Build multiple AUR packages then install them together' -f
complete -c $progname -n "not $noopt" -l nobatchinstall -d 'Build and install each AUR package one by one' -f
complete -c $progname -n "not $noopt" -l rebuild -d 'Always build target packages' -f complete -c $progname -n "not $noopt" -l rebuild -d 'Always build target packages' -f
complete -c $progname -n "not $noopt" -l rebuildall -d 'Always build all AUR packages' -f complete -c $progname -n "not $noopt" -l rebuildall -d 'Always build all AUR packages' -f
complete -c $progname -n "not $noopt" -l rebuildtree -d 'Always build all AUR packages even if installed' -f complete -c $progname -n "not $noopt" -l rebuildtree -d 'Always build all AUR packages even if installed' -f
@ -243,3 +244,4 @@ complete -c $progname -n "not $noopt" -l norebuild -d 'Skip package build if in
complete -c $progname -n "not $noopt" -l mflags -d 'Pass the following options to makepkg' -f complete -c $progname -n "not $noopt" -l mflags -d 'Pass the following options to makepkg' -f
complete -c $progname -n "not $noopt" -l gpgflags -d 'Pass the following options to gpg' -f complete -c $progname -n "not $noopt" -l gpgflags -d 'Pass the following options to gpg' -f
complete -c $progname -n "not $noopt" -l sudoloop -d 'Loop sudo calls in the background to avoid timeout' -f complete -c $progname -n "not $noopt" -l sudoloop -d 'Loop sudo calls in the background to avoid timeout' -f
complete -c $progname -n "not $noopt" -l nosudoloop -d 'Do not loop sudo calls in the background' -f

View File

@ -1,5 +1,5 @@
#compdef yay #compdef yay
# vim:tabstop=2 shiftwidth=2 filetype=zsh # vim:fdm=marker foldlevel=0 tabstop=2 shiftwidth=2 filetype=zsh
typeset -A opt_args typeset -A opt_args
setopt extendedglob setopt extendedglob
@ -16,17 +16,15 @@ _pacman_opts_commands=(
{-T,--deptest}'[Check if dependencies are installed]' {-T,--deptest}'[Check if dependencies are installed]'
{-U,--upgrade}'[Upgrade a package]' {-U,--upgrade}'[Upgrade a package]'
{-Y,--yay}'[Yay specific options]' {-Y,--yay}'[Yay specific options]'
{-W,--web}'[web options]'
{-V,--version}'[Display version and exit]' {-V,--version}'[Display version and exit]'
'(-h --help)'{-h,--help}'[Display usage]' '(-h --help)'{-h,--help}'[Display usage]'
) )
# options for passing to _arguments: options common to all commands # options for passing to _arguments: options common to all commands
_pacman_opts_common=( _pacman_opts_common=(
{-N,--repo}'[Assume targets are from the repositories]' '--repo[Assume targets are from the repositories]'
{-a,--aur}'[Assume targets are from the AUR]' {-a,--aur}'[Assume targets are from the AUR]'
'--aururl[Set an alternative AUR URL]:url' '--aururl[Set an alternative AUR URL]:url'
'--aurrpcurl[Set an alternative URL for the AUR /rpc endpoint]:url'
'--arch[Set an alternate architecture]' '--arch[Set an alternate architecture]'
{-b,--dbpath}'[Alternate database location]:database_location:_files -/' {-b,--dbpath}'[Alternate database location]:database_location:_files -/'
'--color[colorize the output]:color options:(always never auto)' '--color[colorize the output]:color options:(always never auto)'
@ -70,36 +68,46 @@ _pacman_opts_common=(
'--cleanmenu[Give the option to clean build PKGBUILDS]' '--cleanmenu[Give the option to clean build PKGBUILDS]'
'--diffmenu[Give the option to show diffs for build files]' '--diffmenu[Give the option to show diffs for build files]'
'--editmenu[Give the option to edit/view PKGBUILDS]' '--editmenu[Give the option to edit/view PKGBUILDS]'
'--upgrademenu[Show a detailed list of updates with the option to skip any]'
"--nocleanmenu[Don't clean build PKGBUILDS]"
"--nodiffmenu[Don't show diffs for build files]"
"--noeditmenu[Don't edit/view PKGBUILDS]"
"--noupgrademenu[Don't show the upgrade menu]"
"--askremovemake[Ask to remove makedepends after install]" "--askremovemake[Ask to remove makedepends after install]"
"--askyesremovemake[Ask to remove makedepends after install(with "Y" as default)]"
"--removemake[Remove makedepends after install]" "--removemake[Remove makedepends after install]"
"--noremovemake[Don't remove makedepends after install]" "--noremovemake[Don't remove makedepends after install]"
'--bottomup[Show AUR packages first]' '--bottomup[Show AUR packages first]'
'--topdown[Show repository packages first]' '--topdown[Show repository packages first]'
'--singlelineresults[List each search result on its own line]'
'--doublelineresults[List each search result on two lines, like pacman]'
'--devel[Check -git/-svn/-hg development version]' '--devel[Check -git/-svn/-hg development version]'
'--nodevel[Disable development version checking]'
'--cleanafter[Clean package sources after successful build]' '--cleanafter[Clean package sources after successful build]'
'--keepsrc[Keep pkg/ and src/ after building packages]' '--nocleanafter[Disable package sources cleaning after successful build]'
'--timeupdate[Check packages modification date and version]' '--timeupdate[Check packages modification date and version]'
'--notimeupdate[Check only package version change]'
'--redownload[Always download pkgbuilds of targets]' '--redownload[Always download pkgbuilds of targets]'
'--redownloadall[Always download pkgbuilds of all AUR packages]' '--redownloadall[Always download pkgbuilds of all AUR packages]'
'--noredownload[Skip pkgbuild download if in cache and up to date]' '--noredownload[Skip pkgbuild download if in cache and up to date]'
'--rebuild[Always build target packages]' '--rebuild[Always build target packages]'
'--rebuildall[Always build all AUR packages]' '--rebuildall[Always build all AUR packages]'
'--provides[Look for matching providers when searching for packages]' '--provides[Look for matching providers when searching for packages]'
'--noprovides[Just look for packages by pkgname]'
'--pgpfetch[Prompt to import PGP keys from PKGBUILDs]' '--pgpfetch[Prompt to import PGP keys from PKGBUILDs]'
"--nopgpfetch[Don't prompt to import PGP keys]"
"--useask[Automatically resolve conflicts using pacman's ask flag]" "--useask[Automatically resolve conflicts using pacman's ask flag]"
'--nouseask[Confirm conflicts manually during the install]'
'--combinedupgrade[Refresh then perform the repo and AUR upgrade together]' '--combinedupgrade[Refresh then perform the repo and AUR upgrade together]'
'--nocombinedupgrade[Perform the repo upgrade and AUR upgrade separately]'
'--rebuildtree[Always build all AUR packages even if installed]' '--rebuildtree[Always build all AUR packages even if installed]'
'--norebuild[Skip package build if in cache and up to date]' '--norebuild[Skip package build if in cache and up to date]'
'--mflags[Pass arguments to makepkg]:mflags' '--mflags[Pass arguments to makepkg]:mflags'
'--gpgflags[Pass arguments to gpg]:gpgflags' '--gpgflags[Pass arguments to gpg]:gpgflags'
'--sudoloop[Loop sudo calls in the background to avoid timeout]' '--sudoloop[Loop sudo calls in the background to avoid timeout]'
'--nosudoloop[Do not loop sudo calls in the background]'
'--searchby[Search for packages using a specified field]' '--searchby[Search for packages using a specified field]'
'--sortby[Sort AUR results by a specific field during search]' '--sortby[Sort AUR results by a specific field during search]'
'--batchinstall[Build multiple AUR packages then install them together]' '--batchinstall[Build multiple AUR packages then install them together]'
'--nobatchinstall[Build and install each AUR package one by one]'
) )
# options for passing to _arguments: options for --upgrade commands # options for passing to _arguments: options for --upgrade commands
@ -155,12 +163,6 @@ _pacman_opts_getpkgbuild_modifiers=(
{-p,--print}'[Print PKGBUILDs]:package:_pacman_completions_all_packages' {-p,--print}'[Print PKGBUILDs]:package:_pacman_completions_all_packages'
) )
# -W
_pacman_opts_web_modifiers=(
{-u,--unvote}'[Unvote AUR package]:package:_pacman_completions_all_packages'
{-v,--vote}'[Vote AUR package]:package:_pacman_completions_all_packages'
)
# -P # -P
_pacman_opts_print_modifiers=( _pacman_opts_print_modifiers=(
{-c,--complete}'[Used for completions]' {-c,--complete}'[Used for completions]'
@ -500,19 +502,16 @@ _pacman_zsh_comp() {
"$_pacman_opts_query_modifiers[@]" \ "$_pacman_opts_query_modifiers[@]" \
'*:package file:_files -g "*.pkg.tar*~*.sig(.,@)"' '*:package file:_files -g "*.pkg.tar*~*.sig(.,@)"'
;; ;;
T*)
_pacman_action_deptest
;;
Q*) Q*)
_pacman_action_query _pacman_action_query
;; ;;
P*) P*)
_arguments -s : \ _arguments -s : \
'-P' \
"$_pacman_opts_print_modifiers[@]" "$_pacman_opts_print_modifiers[@]"
;; ;;
W*)
_arguments -s : \
'-W' \
"$_pacman_opts_web_modifiers[@]"
;;
R*) R*)
_pacman_action_remove _pacman_action_remove
;; ;;
@ -543,7 +542,10 @@ _pacman_zsh_comp() {
_pacman_action_sync _pacman_action_sync
;; ;;
T*) T*)
_pacman_action_deptest _arguments -s : \
'-T' \
"$_pacman_opts_common[@]" \
":packages:_pacman_all_packages"
;; ;;
U*) U*)
_pacman_action_upgrade _pacman_action_upgrade
@ -553,12 +555,10 @@ _pacman_zsh_comp() {
;; ;;
Y*) Y*)
_arguments -s : \ _arguments -s : \
'-Y' \
"$_pacman_opts_yay_modifiers[@]" "$_pacman_opts_yay_modifiers[@]"
;; ;;
G*) G*)
_arguments -s : \ _arguments -s : \
'-G' \
"$_pacman_opts_getpkgbuild_modifiers[@]" "$_pacman_opts_getpkgbuild_modifiers[@]"
;; ;;

114
config.go Normal file
View File

@ -0,0 +1,114 @@
package main
import (
"bufio"
"fmt"
"os"
"os/exec"
"strings"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v11/pkg/text"
)
// Verbosity settings for search.
const (
numberMenu = iota
detailed
minimal
)
var yayVersion = "11.0.0"
var localePath = "/usr/share/locale"
// YayConf holds the current config values for yay.
var config *settings.Configuration
// Editor returns the preferred system editor.
func editor() (editor string, args []string) {
switch {
case config.Editor != "":
editor, err := exec.LookPath(config.Editor)
if err != nil {
fmt.Fprintln(os.Stderr, err)
} else {
return editor, strings.Fields(config.EditorFlags)
}
fallthrough
case os.Getenv("EDITOR") != "":
if editorArgs := strings.Fields(os.Getenv("EDITOR")); len(editorArgs) != 0 {
editor, err := exec.LookPath(editorArgs[0])
if err != nil {
fmt.Fprintln(os.Stderr, err)
} else {
return editor, editorArgs[1:]
}
}
fallthrough
case os.Getenv("VISUAL") != "":
if editorArgs := strings.Fields(os.Getenv("VISUAL")); len(editorArgs) != 0 {
editor, err := exec.LookPath(editorArgs[0])
if err != nil {
fmt.Fprintln(os.Stderr, err)
} else {
return editor, editorArgs[1:]
}
}
fallthrough
default:
fmt.Fprintln(os.Stderr)
text.Errorln(gotext.Get("%s is not set", text.Bold(text.Cyan("$EDITOR"))))
text.Warnln(gotext.Get("Add %s or %s to your environment variables", text.Bold(text.Cyan("$EDITOR")), text.Bold(text.Cyan("$VISUAL"))))
for {
text.Infoln(gotext.Get("Edit PKGBUILD with?"))
editorInput, err := getInput("")
if err != nil {
fmt.Fprintln(os.Stderr, err)
continue
}
editorArgs := strings.Fields(editorInput)
if len(editorArgs) == 0 {
continue
}
editor, err := exec.LookPath(editorArgs[0])
if err != nil {
fmt.Fprintln(os.Stderr, err)
continue
}
return editor, editorArgs[1:]
}
}
}
func getInput(defaultValue string) (string, error) {
text.Info()
if defaultValue != "" || settings.NoConfirm {
fmt.Println(defaultValue)
return defaultValue, nil
}
reader := bufio.NewReader(os.Stdin)
buf, overflow, err := reader.ReadLine()
if err != nil {
return "", err
}
if overflow {
return "", fmt.Errorf(gotext.Get("input too long"))
}
return string(buf), nil
}

145
diff.go Normal file
View File

@ -0,0 +1,145 @@
package main
import (
"context"
"fmt"
"path/filepath"
"strings"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v11/pkg/dep"
"github.com/Jguer/yay/v11/pkg/multierror"
"github.com/Jguer/yay/v11/pkg/text"
)
const gitDiffRefName = "AUR_SEEN"
func showPkgbuildDiffs(ctx context.Context, bases []dep.Base, cloned map[string]bool) error {
var errMulti multierror.MultiError
for _, base := range bases {
pkg := base.Pkgbase()
dir := filepath.Join(config.BuildDir, pkg)
start, err := getLastSeenHash(ctx, config.BuildDir, pkg)
if err != nil {
errMulti.Add(err)
continue
}
if cloned[pkg] {
start = gitEmptyTree
} else {
hasDiff, err := gitHasDiff(ctx, config.BuildDir, pkg)
if err != nil {
errMulti.Add(err)
continue
}
if !hasDiff {
text.Warnln(gotext.Get("%s: No changes -- skipping", text.Cyan(base.String())))
continue
}
}
args := []string{
"diff",
start + "..HEAD@{upstream}", "--src-prefix",
dir + "/", "--dst-prefix", dir + "/", "--", ".", ":(exclude).SRCINFO",
}
if text.UseColor {
args = append(args, "--color=always")
} else {
args = append(args, "--color=never")
}
_ = config.Runtime.CmdBuilder.Show(config.Runtime.CmdBuilder.BuildGitCmd(ctx, dir, args...))
}
return errMulti.Return()
}
// Check whether or not a diff exists between the last reviewed diff and
// HEAD@{upstream}.
func gitHasDiff(ctx context.Context, path, name string) (bool, error) {
if gitHasLastSeenRef(ctx, path, name) {
stdout, stderr, err := config.Runtime.CmdBuilder.Capture(
config.Runtime.CmdBuilder.BuildGitCmd(ctx, filepath.Join(path, name), "rev-parse", gitDiffRefName, "HEAD@{upstream}"))
if err != nil {
return false, fmt.Errorf("%s%s", stderr, err)
}
lines := strings.Split(stdout, "\n")
lastseen := lines[0]
upstream := lines[1]
return lastseen != upstream, nil
}
// If YAY_DIFF_REVIEW does not exists, we have never reviewed a diff for this package
// and should display it.
return true, nil
}
// Return wether or not we have reviewed a diff yet. It checks for the existence of
// YAY_DIFF_REVIEW in the git ref-list.
func gitHasLastSeenRef(ctx context.Context, path, name string) bool {
_, _, err := config.Runtime.CmdBuilder.Capture(
config.Runtime.CmdBuilder.BuildGitCmd(ctx,
filepath.Join(path, name), "rev-parse", "--quiet", "--verify", gitDiffRefName))
return err == nil
}
// Returns the last reviewed hash. If YAY_DIFF_REVIEW exists it will return this hash.
// If it does not it will return empty tree as no diff have been reviewed yet.
func getLastSeenHash(ctx context.Context, path, name string) (string, error) {
if gitHasLastSeenRef(ctx, path, name) {
stdout, stderr, err := config.Runtime.CmdBuilder.Capture(
config.Runtime.CmdBuilder.BuildGitCmd(ctx,
filepath.Join(path, name), "rev-parse", gitDiffRefName))
if err != nil {
return "", fmt.Errorf("%s %s", stderr, err)
}
lines := strings.Split(stdout, "\n")
return lines[0], nil
}
return gitEmptyTree, nil
}
// Update the YAY_DIFF_REVIEW ref to HEAD. We use this ref to determine which diff were
// reviewed by the user.
func gitUpdateSeenRef(ctx context.Context, path, name string) error {
_, stderr, err := config.Runtime.CmdBuilder.Capture(
config.Runtime.CmdBuilder.BuildGitCmd(ctx,
filepath.Join(path, name), "update-ref", gitDiffRefName, "HEAD"))
if err != nil {
return fmt.Errorf("%s %s", stderr, err)
}
return nil
}
func gitMerge(ctx context.Context, path, name string) error {
_, stderr, err := config.Runtime.CmdBuilder.Capture(
config.Runtime.CmdBuilder.BuildGitCmd(ctx,
filepath.Join(path, name), "reset", "--hard", "HEAD"))
if err != nil {
return fmt.Errorf(gotext.Get("error resetting %s: %s", name, stderr))
}
_, stderr, err = config.Runtime.CmdBuilder.Capture(
config.Runtime.CmdBuilder.BuildGitCmd(ctx,
filepath.Join(path, name), "merge", "--no-edit", "--ff"))
if err != nil {
return fmt.Errorf(gotext.Get("error merging %s: %s", name, stderr))
}
return nil
}

168
doc/yay.8
View File

@ -1,4 +1,4 @@
.TH "YAY" "8" "2019\-10\-21" "Yay v12.0+" "Yay Manual" .TH "YAY" "8" "2019\-10\-21" "Yay v9.4+" "Yay Manual"
.nh .nh
.ad l .ad l
.SH NAME .SH NAME
@ -19,15 +19,10 @@ This manpage only covers options unique to Yay. For other options see
\fBpacman(8)\fR. \fBpacman(8)\fR.
.SH YAY OPERATIONS .SH YAY OPERATIONS
.TP .TP
.B \-Y, \-\-yay .B \-Y, \-\-yay
Perform yay specific operations. This is the default if no other operation is Perform yay specific operations. This is the default if no other operation is
selected and targets are defined. selected.
.TP
.B \-B, \-\-build
Build a PKGBUILD in a given directory.
.TP .TP
.B \-P, \-\-show .B \-P, \-\-show
@ -35,16 +30,13 @@ Perform yay specific print operations.
.TP .TP
.B \-G, \-\-getpkgbuild .B \-G, \-\-getpkgbuild
Downloads PKGBUILD from ABS or AUR. The ABS can only be used for Arch Linux repositories. Downloads PKGBUILD from ABS or AUR. The ABS can only be used for Arch Linux
repositories
.TP
.B \-W, \-\-web
Web related operations such as voting for AUR packages.
.RE .RE
If no operation is specified 'yay \-Syu' will be performed If no arguments are provided 'yay \-Syu' will be performed.
If no operation is specified and targets are provided \-Y will be assumed If no operation is selected \-Y will be assumed.
.SH EXTENDED PACMAN OPERATIONS .SH EXTENDED PACMAN OPERATIONS
.TP .TP
@ -63,7 +55,7 @@ Yay will also remove cached data about devel packages.
.SH NEW OPTIONS .SH NEW OPTIONS
.TP .TP
.B \-N, \-\-repo .B \-\-repo
Assume all targets are from the repositories. Additionally Actions such as Assume all targets are from the repositories. Additionally Actions such as
sysupgrade will only act on repository packages. sysupgrade will only act on repository packages.
@ -82,10 +74,6 @@ packages.
Displays a list of packages matching the search terms and prompts the user on Displays a list of packages matching the search terms and prompts the user on
which packages to install (yogurt mode). which packages to install (yogurt mode).
The first search term is used to query the different sources and
the following search terms are used to narrow the search results
through exact matching.
.TP .TP
.B \-\-gendb .B \-\-gendb
Generate development package database. Tracks the latest commit for each Generate development package database. Tracks the latest commit for each
@ -97,16 +85,16 @@ used when migrating to Yay from another AUR helper.
.B \-c, \-\-clean .B \-c, \-\-clean
Remove unneeded dependencies. Remove unneeded dependencies.
.TP .SH SHOW OPTIONS (APPLY TO \-P AND \-\-SHOW)
.B \-cc
Remove unneeded dependencies, including packages optionally required by any other package.
.SH SHOW OPTIONS (APPLY TO \-P AND \-\-show)
.TP .TP
.B \-c, \-\-complete .B \-c, \-\-complete
Print a list of all AUR and repo packages. This allows shell completion Print a list of all AUR and repo packages. This allows shell completion
and is not intended to be used directly by the user. and is not intended to be used directly by the user.
.TP
.B \-f, \-\-fish
During complete adjust the output for the fish shell.
.TP .TP
.B \-d, \-\-defaultconfig .B \-d, \-\-defaultconfig
Print default yay configuration. Print default yay configuration.
@ -115,12 +103,20 @@ Print default yay configuration.
.B \-g, \-\-currentconfig .B \-g, \-\-currentconfig
Print current yay configuration. Print current yay configuration.
.TP
.B \-n, \-\-numberupgrades
Deprecated, use \fByay -Qu\fR and \fBwc -l\fR instead\%.
.TP .TP
.B \-s, \-\-stats .B \-s, \-\-stats
Displays information about installed packages and system health. If there are Displays information about installed packages and system health. If there are
orphaned, or out\-of\-date packages, or packages that no longer exist on the orphaned, or out\-of\-date packages, or packages that no longer exist on the
AUR; warnings will be displayed. AUR; warnings will be displayed.
.TP
.B \-u, \-\-upgrades
Deprecated, use \fByay -Qu\fR instead\%.
.TP .TP
.B \-w, \-\-news .B \-w, \-\-news
Print new news from the Archlinux homepage. News is considered new if it is Print new news from the Archlinux homepage. News is considered new if it is
@ -131,12 +127,7 @@ available news.
.B \-q, \-\-quiet .B \-q, \-\-quiet
Only show titles when printing news. Only show titles when printing news.
.SH BUILD OPTIONS (APPLY TO \-B AND \-\-build) .SH GETPKGBUILD OPTIONS (APPLY TO \-G AND \-\-GETPKGBUILD)
.TP
.B \-i, \-\-install
Build and install a PKGBUILD in a given directory
.SH GETPKGBUILD OPTIONS (APPLY TO \-G AND \-\-getpkgbuild)
.TP .TP
.B \-f, \-\-force .B \-f, \-\-force
Force download for ABS packages that already exist in the current directory. This Force download for ABS packages that already exist in the current directory. This
@ -146,20 +137,6 @@ ensures directories are not accidentally overwritten.
.B \-p, \-\-print .B \-p, \-\-print
Prints the PKGBUILD of the given packages to stdout. Prints the PKGBUILD of the given packages to stdout.
.SH WEB OPTIONS (APPLY TO \-W AND \-\-web)
.TP
Web related operations such as voting for AUR packages.
Requires setting AUR_USERNAME and AUR_PASSWORD environment variables.
.TP
.B \-u, \-\-unvote
Remove vote from AUR package(s)
.TP
.B \-v, \-\-vote
Vote for AUR package(s)
.SH PERMANENT CONFIGURATION SETTINGS .SH PERMANENT CONFIGURATION SETTINGS
.TP .TP
.B \-\-save .B \-\-save
@ -169,11 +146,8 @@ file.
.TP .TP
.B \-\-aururl .B \-\-aururl
Set an alternative AUR URL. Set an alternative AUR URL. This is mostly useful for users in China who wish
to use https://aur.tuna.tsinghua.edu.cn/.
.TP
.B \-\-aurrpcurl
Set an alternative URL for the AUR /rpc endpoint.
.TP .TP
.B \-\-builddir <dir> .B \-\-builddir <dir>
@ -182,8 +156,8 @@ the AUR cache when deciding if Yay should skip builds.
.TP .TP
.B \-\-editor <command> .B \-\-editor <command>
Editor to use when editing PKGBUILDs. If this is not set the \fBVISUAL\fR Editor to use when editing PKGBUILDs. If this is not set the \fBEDITOR\fR
environment variable will be checked, followed by \fBEDITOR\fR. If none of environment variable will be checked, followed by \fBVISUAL\fR. If none of
these are set Yay will prompt the user for an editor. these are set Yay will prompt the user for an editor.
.TP .TP
@ -249,7 +223,7 @@ cache to never be refreshed.
Sort AUR results by a specific field during search. Sort AUR results by a specific field during search.
.TP .TP
.B \-\-searchby <name|name-desc|maintainer|depends|checkdepends|makedepends|optdepends|provides|conflicts|replaces|groups|keywords|comaintainers> .B \-\-searchby <name|name-desc|maintainer|depends|checkdepends|makedepends|optdepends>
Search for AUR packages by querying the specified field. Search for AUR packages by querying the specified field.
.TP .TP
@ -297,9 +271,6 @@ Unset the answer for the upgrade menu.
Show the clean menu. This menu gives you the chance to fully delete the Show the clean menu. This menu gives you the chance to fully delete the
downloaded build files from Yay's cache before redownloading a fresh copy. downloaded build files from Yay's cache before redownloading a fresh copy.
If 'cleanmenu' is enabled in the configuration file, you can temporarily disable it by
using '--cleanmenu=false' on the command line
.TP .TP
.B \-\-diffmenu .B \-\-diffmenu
Show the diff menu. This menu gives you the option to view diffs from Show the diff menu. This menu gives you the option to view diffs from
@ -318,12 +289,35 @@ before building.
recommended to edit pkgbuild variables unless you know what you are doing. recommended to edit pkgbuild variables unless you know what you are doing.
.TP .TP
.B \-\-askremovemake .B \-\-upgrademenu
Ask to remove makedepends after installing packages. Show a detailed list of updates in a similar format to VerbosePkgLists.
Upgrades can also be skipped using numbers, number ranges or repo names.
Additionally ^ can be used to invert the selection.
\fBWarning\fR: It is not recommended to skip updates from the repositories as
this can lead to partial upgrades. This feature is intended to easily skip AUR
updates on the fly that may be broken or have a long compile time. Ultimately
it is up to the user what upgrades they skip.
.TP .TP
.B \-\-askyesremovemake .B \-\-nocleanmenu
Ask to remove makedepends after installing packages(with "Y" as default). Do not show the clean menu.
.TP
.B \-\-nodiffmenu
Do not show the diff menu.
.TP
.B \-\-noeditmenu
Do not show the edit menu.
.TP
.B \-\-noupgrademenu
Do not show the upgrade menu.
.TP
.B \-\-askremovemake
Ask to remove makedepends after installing packages.
.TP .TP
.B \-\-removemake .B \-\-removemake
@ -341,16 +335,6 @@ Display repository packages first and then AUR packages.
.B \-\-bottomup .B \-\-bottomup
Show AUR packages first and then repository packages. Show AUR packages first and then repository packages.
.TP
.B \-\-singlelineresults
Override pacman's usual double-line search result format and list each result
on its own line.
.TP
.B \-\-doublelineresults
Follow pacman's double-line search result format and list each result using
two lines.
.TP .TP
.B \-\-devel .B \-\-devel
During sysupgrade also check AUR development packages for updates. Currently During sysupgrade also check AUR development packages for updates. Currently
@ -363,8 +347,9 @@ checked almost instantly and not require the original pkgbuild to be downloaded.
The slower pacaur-like devel checks can be implemented manually by piping The slower pacaur-like devel checks can be implemented manually by piping
a list of packages into yay (see \fBexamples\fR). a list of packages into yay (see \fBexamples\fR).
If 'devel' is enabled in the configuration file, you can temporarily disable it by .TP
using '--devel=false' on the command line .B \-\-nodevel
Do not check for development packages updates during sysupgrade.
.TP .TP
.B \-\-cleanafter .B \-\-cleanafter
@ -375,8 +360,8 @@ This allows VCS packages to easily pull an update
instead of having to reclone the entire repo. instead of having to reclone the entire repo.
.TP .TP
.B \-\-keepsrc .B \-\-nocleanafter
Keep pkg/ and src/ after building packages Do not remove package sources after successful Install.
.TP .TP
.B \-\-timeupdate .B \-\-timeupdate
@ -384,8 +369,8 @@ During sysupgrade also compare the build time of installed packages against
the last modification time of each package's AUR page. the last modification time of each package's AUR page.
.TP .TP
.B \-\-separatesources .B \-\-notimeupdate
Separate query results by source, AUR and sync Do not consider build times during sysupgrade.
.TP .TP
.B \-\-redownload .B \-\-redownload
@ -407,11 +392,23 @@ Look for matching providers when searching for AUR packages. When multiple
providers are found a menu will appear prompting you to pick one. This providers are found a menu will appear prompting you to pick one. This
increases dependency resolve time although this should not be noticeable. increases dependency resolve time although this should not be noticeable.
.TP
.B \-\-noprovides
Do not look for matching providers when searching for AUR packages.
Yay will never show its provider menu but Pacman will still show its
provider menu for repo packages.
.TP .TP
.B \-\-pgpfetch .B \-\-pgpfetch
Prompt to import unknown PGP keys from the \fBvalidpgpkeys\fR field of each Prompt to import unknown PGP keys from the \fBvalidpgpkeys\fR field of each
PKGBUILD. PKGBUILD.
.TP
.B \-\-nopgpfetch
Do not prompt to import unknown PGP keys. This is likely to cause a build
failure unless using options such as \fB\-\-skippgpcheck\fR or a customized
gpg config\%.
.TP .TP
.B \-\-useask .B \-\-useask
Use pacman's --ask flag to automatically confirm package conflicts. Yay lists Use pacman's --ask flag to automatically confirm package conflicts. Yay lists
@ -419,6 +416,11 @@ conflicts ahead of time. It is possible that Yay does not detect
a conflict, causing a package to be removed without the user's confirmation. a conflict, causing a package to be removed without the user's confirmation.
However, this is very unlikely. However, this is very unlikely.
.TP
.B \-\-nouseask
Manually resolve package conflicts during the install. Packages which do not
conflict will not need to be confined manually.
.TP .TP
.B \-\-combinedupgrade .B \-\-combinedupgrade
During sysupgrade, Yay will first perform a refresh, then show During sysupgrade, Yay will first perform a refresh, then show
@ -430,6 +432,12 @@ If Yay exits for any reason After the refresh without upgrading. It is then
the user's responsibility to either resolve the reason Yay exited or run the user's responsibility to either resolve the reason Yay exited or run
a sysupgrade through pacman directly. a sysupgrade through pacman directly.
.TP
.B \-\-nocombinedupgrade
During sysupgrade, Pacman \-Syu will be called, then the AUR upgrade will
start. This means the upgrade menu and pkgbuild review will be performed
after the sysupgrade has finished.
.TP .TP
.B \-\-batchinstall .B \-\-batchinstall
When building and installing AUR packages instead of installing each package When building and installing AUR packages instead of installing each package
@ -437,6 +445,10 @@ after building, queue each package for install. Then once either all packages
are built or a package in the build queue is needed as a dependency to build are built or a package in the build queue is needed as a dependency to build
another package, install all the packages in the install queue. another package, install all the packages in the install queue.
.TP
.B \-\-nobatchinstall
Always install AUR packages immediately after building them.
.TP .TP
.B \-\-rebuild .B \-\-rebuild
Always build target packages even when a copy is available in cache. Always build target packages even when a copy is available in cache.
@ -490,6 +502,10 @@ separated list that is quoted by the shell.
Loop sudo calls in the background to prevent sudo from timing out during long Loop sudo calls in the background to prevent sudo from timing out during long
builds. builds.
.TP
.B \-\-nosudoloop
Do not loop sudo calls in the background.
.SH EXAMPLES .SH EXAMPLES
.TP .TP
yay \fIfoo\fR yay \fIfoo\fR
@ -594,6 +610,6 @@ See the arch wiki at https://wiki.archlinux.org/index.php/Arch_User_Repository f
Please report bugs to our GitHub page https://github.com/Jguer/yay Please report bugs to our GitHub page https://github.com/Jguer/yay
.SH AUTHORS .SH AUTHORS
Jguer <joguer@proton.me> Jguer <joaogg3@gmail.com>
.br .br
Morgan <morganamilo@archlinux.org> Morgan <morganamilo@archlinux.org>

View File

@ -1,9 +0,0 @@
package main
import (
"errors"
"github.com/leonelquinteros/gotext"
)
var ErrPackagesNotFound = errors.New(gotext.Get("could not find all required packages"))

39
get.go
View File

@ -7,27 +7,27 @@ import (
"os" "os"
"strings" "strings"
"github.com/Jguer/aur"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/download" "github.com/Jguer/yay/v11/pkg/download"
"github.com/Jguer/yay/v12/pkg/runtime" "github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/parser" "github.com/Jguer/yay/v11/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/text"
) )
// yay -Gp. // yay -Gp.
func printPkgbuilds(dbExecutor download.DBSearcher, aurClient aur.QueryClient, func printPkgbuilds(dbExecutor download.DBSearcher, httpClient *http.Client, targets []string,
httpClient *http.Client, logger *text.Logger, targets []string, mode parser.TargetMode, aurURL string) error {
mode parser.TargetMode, aurURL string, pkgbuilds, err := download.PKGBUILDs(dbExecutor, httpClient, targets, aurURL, mode)
) error {
pkgbuilds, err := download.PKGBUILDs(dbExecutor, aurClient, httpClient, logger, targets, aurURL, mode)
if err != nil { if err != nil {
logger.Errorln(err) text.Errorln(err)
} }
if len(pkgbuilds) != 0 {
for target, pkgbuild := range pkgbuilds { for target, pkgbuild := range pkgbuilds {
logger.Printf("\n\n# %s\n\n%s", target, string(pkgbuild)) fmt.Printf("\n\n# %s\n\n", target)
fmt.Print(string(pkgbuild))
}
} }
if len(pkgbuilds) != len(targets) { if len(pkgbuilds) != len(targets) {
@ -39,7 +39,7 @@ func printPkgbuilds(dbExecutor download.DBSearcher, aurClient aur.QueryClient,
} }
} }
logger.Warnln(gotext.Get("Unable to find the following packages:"), " ", strings.Join(missing, ", ")) text.Warnln(gotext.Get("Unable to find the following packages:"), strings.Join(missing, ", "))
return fmt.Errorf("") return fmt.Errorf("")
} }
@ -48,18 +48,17 @@ func printPkgbuilds(dbExecutor download.DBSearcher, aurClient aur.QueryClient,
} }
// yay -G. // yay -G.
func getPkgbuilds(ctx context.Context, dbExecutor download.DBSearcher, aurClient aur.QueryClient, func getPkgbuilds(ctx context.Context, dbExecutor download.DBSearcher,
run *runtime.Runtime, targets []string, force bool, config *settings.Configuration, targets []string, force bool) error {
) error {
wd, err := os.Getwd() wd, err := os.Getwd()
if err != nil { if err != nil {
return err return err
} }
cloned, errD := download.PKGBUILDRepos(ctx, dbExecutor, aurClient, cloned, errD := download.PKGBUILDRepos(ctx, dbExecutor,
run.CmdBuilder, run.Logger, targets, run.Cfg.Mode, run.Cfg.AURURL, wd, force) config.Runtime.CmdBuilder, targets, config.Runtime.Mode, config.AURURL, wd, force)
if errD != nil { if errD != nil {
run.Logger.Errorln(errD) text.Errorln(errD)
} }
if len(targets) != len(cloned) { if len(targets) != len(cloned) {
@ -71,7 +70,7 @@ func getPkgbuilds(ctx context.Context, dbExecutor download.DBSearcher, aurClient
} }
} }
run.Logger.Warnln(gotext.Get("Unable to find the following packages:"), " ", strings.Join(missing, ", ")) text.Warnln(gotext.Get("Unable to find the following packages:"), strings.Join(missing, ", "))
err = fmt.Errorf("") err = fmt.Errorf("")
} }

34
go.mod
View File

@ -1,35 +1,25 @@
module github.com/Jguer/yay/v12 module github.com/Jguer/yay/v11
require ( require (
github.com/Jguer/aur v1.2.3 github.com/Jguer/aur v1.0.0
github.com/Jguer/go-alpm/v2 v2.2.2 github.com/Jguer/go-alpm/v2 v2.0.6
github.com/Jguer/votar v1.0.0
github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5 github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5
github.com/Morganamilo/go-srcinfo v1.0.0 github.com/Morganamilo/go-srcinfo v1.0.0
github.com/adrg/strutil v0.3.1
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible github.com/bradleyjkemp/cupaloy v2.3.0+incompatible
github.com/deckarep/golang-set/v2 v2.8.0 github.com/leonelquinteros/gotext v1.5.0
github.com/hashicorp/go-multierror v1.1.1 github.com/pkg/errors v0.9.1
github.com/leonelquinteros/gotext v1.7.2 github.com/stretchr/testify v1.7.0
github.com/stretchr/testify v1.10.0 golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac
golang.org/x/net v0.41.0 golang.org/x/term v0.0.0-20210927222741-03fcf44c2211
golang.org/x/sys v0.33.0 golang.org/x/text v0.3.7 // indirect
golang.org/x/term v0.32.0 gopkg.in/h2non/gock.v1 v1.0.15
gopkg.in/h2non/gock.v1 v1.1.2
) )
require ( require (
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/itchyny/gojq v0.12.17 // indirect
github.com/itchyny/timefmt-go v0.1.6 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/ohler55/ojg v1.26.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
) )
go 1.23.5 go 1.17
toolchain go1.24.0

85
go.sum
View File

@ -1,68 +1,57 @@
github.com/Jguer/aur v1.2.3 h1:D+OGgLxnAnZnw88DsRvnRQsn0Poxsy9ng7pBcsA0krM= github.com/Jguer/aur v1.0.0 h1:/1oCUucvFGA5kq1vA50UstQoj5hkRQuAKYp/pyBgOz0=
github.com/Jguer/aur v1.2.3/go.mod h1:Dahvb6L1yr0rR7svyYSDwaRJoQMeyvJblwJ3QH/7CUs= github.com/Jguer/aur v1.0.0/go.mod h1:1/SQjhWahmk2xKcmAm6XO1zGqK8HgYw3xlJM6a7845E=
github.com/Jguer/go-alpm/v2 v2.2.2 h1:sPwUoZp1X5Tw6K6Ba1lWvVJfcgVNEGVcxARLBttZnC0= github.com/Jguer/go-alpm/v2 v2.0.6 h1:VLavW5qFk/TJRHT3Gg7ClDukU4MNWdfkWM9yLxVkYyQ=
github.com/Jguer/go-alpm/v2 v2.2.2/go.mod h1:lfe8gSe83F/KERaQvEfrSqQ4n+8bES+ZIyKWR/gm3MI= github.com/Jguer/go-alpm/v2 v2.0.6/go.mod h1:zU4iKCtNkDARfj5BrKJXYAQ5nIjtZbySfa0paboSmTQ=
github.com/Jguer/votar v1.0.0 h1:drPYpV5Py5BeAQS8xezmT6uCEfLzotNjLf5yfmlHKTg=
github.com/Jguer/votar v1.0.0/go.mod h1:rc6vgVlTqNjI4nAnPbDTbdxw/N7kXkbB8BcUDjeFbYQ=
github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5 h1:TMscPjkb1ThXN32LuFY5bEYIcXZx3YlwzhS1GxNpn/c= github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5 h1:TMscPjkb1ThXN32LuFY5bEYIcXZx3YlwzhS1GxNpn/c=
github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5/go.mod h1:Hk55m330jNiwxRodIlMCvw5iEyoRUCIY64W1p9D+tHc= github.com/Morganamilo/go-pacmanconf v0.0.0-20210502114700-cff030e927a5/go.mod h1:Hk55m330jNiwxRodIlMCvw5iEyoRUCIY64W1p9D+tHc=
github.com/Morganamilo/go-srcinfo v1.0.0 h1:Wh4nEF+HJWo+29hnxM18Q2hi+DUf0GejS13+Wg+dzmI= github.com/Morganamilo/go-srcinfo v1.0.0 h1:Wh4nEF+HJWo+29hnxM18Q2hi+DUf0GejS13+Wg+dzmI=
github.com/Morganamilo/go-srcinfo v1.0.0/go.mod h1:MP6VGY1NNpVUmYIEgoM9acix95KQqIRyqQ0hCLsyYUY= github.com/Morganamilo/go-srcinfo v1.0.0/go.mod h1:MP6VGY1NNpVUmYIEgoM9acix95KQqIRyqQ0hCLsyYUY=
github.com/adrg/strutil v0.3.1 h1:OLvSS7CSJO8lBii4YmBt8jiK9QOtB9CzCzwl4Ic/Fz4=
github.com/adrg/strutil v0.3.1/go.mod h1:8h90y18QLrs11IBffcGX3NW/GFBXCMcNg4M7H6MspPA=
github.com/alexflint/go-arg v1.4.3/go.mod h1:3PZ/wp/8HuqRZMUUgu7I+e1qcpUbvmS258mRXkFH4IA=
github.com/alexflint/go-scalar v1.1.0/go.mod h1:LoFvNMqS1CPrMVltza4LvnGKhaSpc3oyLEBUZVhhS2o=
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible h1:UafIjBvWQmS9i/xRg+CamMrnLTKNzo+bdmT/oH34c2Y= github.com/bradleyjkemp/cupaloy v2.3.0+incompatible h1:UafIjBvWQmS9i/xRg+CamMrnLTKNzo+bdmT/oH34c2Y=
github.com/bradleyjkemp/cupaloy v2.3.0+incompatible/go.mod h1:Au1Xw1sgaJ5iSFktEhYsS0dbQiS1B0/XMXl+42y9Ilk= github.com/bradleyjkemp/cupaloy v2.3.0+incompatible/go.mod h1:Au1Xw1sgaJ5iSFktEhYsS0dbQiS1B0/XMXl+42y9Ilk=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/deckarep/golang-set/v2 v2.8.0 h1:swm0rlPCmdWn9mESxKOjWk8hXSqoxOp+ZlfuyaAdFlQ=
github.com/deckarep/golang-set/v2 v2.8.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542 h1:2VTzZjLZBgl62/EtslCrtky5vbi9dd7HrQPQIx6wqiw=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI= github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/leonelquinteros/gotext v1.5.0 h1:ODY7LzLpZWWSJdAHnzhreOr6cwLXTAmc914FOauSkBM=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/leonelquinteros/gotext v1.5.0/go.mod h1:OCiUVHuhP9LGFBQ1oAmdtNCHJCiHiQA8lf4nAifHkr0=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/itchyny/gojq v0.12.17 h1:8av8eGduDb5+rvEdaOO+zQUjA04MS0m3Ps8HiD+fceg=
github.com/itchyny/gojq v0.12.17/go.mod h1:WBrEMkgAfAGO1LUcGOckBl5O726KPp+OlkKug0I/FEY=
github.com/itchyny/timefmt-go v0.1.6 h1:ia3s54iciXDdzWzwaVKXZPbiXzxxnv1SPGFfM/myJ5Q=
github.com/itchyny/timefmt-go v0.1.6/go.mod h1:RRDZYC5s9ErkjQvTvvU7keJjxUYzIISJGxm9/mAERQg=
github.com/leonelquinteros/gotext v1.7.2 h1:bDPndU8nt+/kRo1m4l/1OXiiy2v7Z7dfPQ9+YP7G1Mc=
github.com/leonelquinteros/gotext v1.7.2/go.mod h1:9/haCkm5P7Jay1sxKDGJ5WIg4zkz8oZKw4ekNpALob8=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32 h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=
github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms= github.com/nbio/st v0.0.0-20140626010706-e9e8d9816f32/go.mod h1:9wM+0iRr9ahx58uYLpLIr5fm8diHn0JbqRycJi6w0Ms=
github.com/ohler55/ojg v1.26.1 h1:J5TaLmVEuvnpVH7JMdT1QdbpJU545Yp6cKiCO4aQILc= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/ohler55/ojg v1.26.1/go.mod h1:gQhDVpQLqrmnd2eqGAvJtn+NfKoYJbe/A4Sj3/Vro4o= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA=
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac h1:oN6lz7iLW/YC7un8pq+9bOLyXrprv2+DKfkJY+2LJJw=
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b h1:9zKuko04nR4gjZ4+DNjHqRlAJqbJETHwiNKDqTfOjfE=
golang.org/x/term v0.0.0-20210615171337-6886f2dfbf5b/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20200221224223-e1da425f72fd/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/h2non/gock.v1 v1.1.2 h1:jBbHXgGBK/AoPVfJh5x4r/WxIrElvbLel8TCZkkZJoY= gopkg.in/h2non/gock.v1 v1.0.15 h1:SzLqcIlb/fDfg7UvukMpNcWsu7sI5tWwL+KCATZqks0=
gopkg.in/h2non/gock.v1 v1.1.2/go.mod h1:n7UGz/ckNChHiK05rDoiC4MYSunEC/lyaUm2WWaDva0= gopkg.in/h2non/gock.v1 v1.0.15/go.mod h1:sX4zAkdYX1TRGJ2JY156cFspQn4yRWn6p9EMdODlynE=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

1121
install.go Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,108 +0,0 @@
// Experimental code for install local with dependency refactoring
// Not at feature parity with install.go
package main
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/Jguer/yay/v12/pkg/db"
"github.com/Jguer/yay/v12/pkg/dep"
"github.com/Jguer/yay/v12/pkg/multierror"
"github.com/Jguer/yay/v12/pkg/runtime"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/sync"
gosrc "github.com/Morganamilo/go-srcinfo"
"github.com/leonelquinteros/gotext"
)
var ErrNoBuildFiles = errors.New(gotext.Get("cannot find PKGBUILD and .SRCINFO in directory"))
func srcinfoExists(ctx context.Context,
cmdBuilder exe.ICmdBuilder, targetDir string,
) error {
srcInfoDir := filepath.Join(targetDir, ".SRCINFO")
pkgbuildDir := filepath.Join(targetDir, "PKGBUILD")
if _, err := os.Stat(srcInfoDir); err == nil {
if _, err := os.Stat(pkgbuildDir); err == nil {
return nil
}
}
if _, err := os.Stat(pkgbuildDir); err == nil {
// run makepkg to generate .SRCINFO
srcinfo, stderr, err := cmdBuilder.Capture(cmdBuilder.BuildMakepkgCmd(ctx, targetDir, "--printsrcinfo"))
if err != nil {
return fmt.Errorf("unable to generate .SRCINFO: %w - %s", err, stderr)
}
if srcinfo == "" {
return fmt.Errorf("generated .SRCINFO is empty, check your PKGBUILD for errors")
}
if err := os.WriteFile(srcInfoDir, []byte(srcinfo), 0o600); err != nil {
return fmt.Errorf("unable to write .SRCINFO: %w", err)
}
return nil
}
return fmt.Errorf("%w: %s", ErrNoBuildFiles, targetDir)
}
func installLocalPKGBUILD(
ctx context.Context,
run *runtime.Runtime,
cmdArgs *parser.Arguments,
dbExecutor db.Executor,
) error {
aurCache := run.AURClient
noCheck := strings.Contains(run.Cfg.MFlags, "--nocheck")
if len(cmdArgs.Targets) < 1 {
return errors.New(gotext.Get("no target directories specified"))
}
srcInfos := map[string]*gosrc.Srcinfo{}
for _, targetDir := range cmdArgs.Targets {
if err := srcinfoExists(ctx, run.CmdBuilder, targetDir); err != nil {
return err
}
pkgbuild, err := gosrc.ParseFile(filepath.Join(targetDir, ".SRCINFO"))
if err != nil {
return fmt.Errorf("%s: %w", gotext.Get("failed to parse .SRCINFO"), err)
}
srcInfos[targetDir] = pkgbuild
}
grapher := dep.NewGrapher(dbExecutor, aurCache, false, settings.NoConfirm,
cmdArgs.ExistsDouble("d", "nodeps"), noCheck, cmdArgs.ExistsArg("needed"),
run.Logger.Child("grapher"))
graph, err := grapher.GraphFromSrcInfos(ctx, nil, srcInfos)
if err != nil {
return err
}
opService := sync.NewOperationService(ctx, dbExecutor, run)
multiErr := &multierror.MultiError{}
targets := graph.TopoSortedLayerMap(func(name string, ii *dep.InstallInfo) error {
if ii.Source == dep.Missing {
multiErr.Add(fmt.Errorf("%w: %s %s", ErrPackagesNotFound, name, ii.Version))
}
return nil
})
if err := multiErr.Return(); err != nil {
return err
}
return opService.Run(ctx, run, cmdArgs, targets, []string{})
}

File diff suppressed because it is too large Load Diff

150
main.go
View File

@ -2,24 +2,19 @@ package main // import "github.com/Jguer/yay"
import ( import (
"context" "context"
"errors" "fmt"
"os" "os"
"os/exec" "os/exec"
"runtime/debug"
"strings"
pacmanconf "github.com/Morganamilo/go-pacmanconf"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"golang.org/x/term"
"github.com/Jguer/yay/v12/pkg/db/ialpm" "github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v12/pkg/runtime" "github.com/Jguer/yay/v11/pkg/db/ialpm"
"github.com/Jguer/yay/v12/pkg/settings" "github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/parser" "github.com/Jguer/yay/v11/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/text"
)
var (
yayVersion = "12.0.4" // To be set by compiler.
localePath = "/usr/share/locale" // To be set by compiler.
) )
func initGotext() { func initGotext() {
@ -28,12 +23,7 @@ func initGotext() {
} }
if lc := os.Getenv("LANGUAGE"); lc != "" { if lc := os.Getenv("LANGUAGE"); lc != "" {
// Split LANGUAGE by ':' and prioritize the first locale gotext.Configure(localePath, lc, "yay")
// Should fix in gotext to support this
locales := strings.Split(lc, ":")
if len(locales) > 0 && locales[0] != "" {
gotext.Configure(localePath, locales[0], "yay")
}
} else if lc := os.Getenv("LC_ALL"); lc != "" { } else if lc := os.Getenv("LC_ALL"); lc != "" {
gotext.Configure(localePath, lc, "yay") gotext.Configure(localePath, lc, "yay")
} else if lc := os.Getenv("LC_MESSAGES"); lc != "" { } else if lc := os.Getenv("LC_MESSAGES"); lc != "" {
@ -43,36 +33,79 @@ func initGotext() {
} }
} }
func initAlpm(cmdArgs *parser.Arguments, pacmanConfigPath string) (*pacmanconf.Config, bool, error) {
root := "/"
if value, _, exists := cmdArgs.GetArg("root", "r"); exists {
root = value
}
pacmanConf, stderr, err := pacmanconf.PacmanConf("--config", pacmanConfigPath, "--root", root)
if err != nil {
cmdErr := err
if stderr != "" {
cmdErr = fmt.Errorf("%s\n%s", err, stderr)
}
return nil, false, cmdErr
}
if dbPath, _, exists := cmdArgs.GetArg("dbpath", "b"); exists {
pacmanConf.DBPath = dbPath
}
if arch := cmdArgs.GetArgs("arch"); arch != nil {
pacmanConf.Architecture = append(pacmanConf.Architecture, arch...)
}
if ignoreArray := cmdArgs.GetArgs("ignore"); ignoreArray != nil {
pacmanConf.IgnorePkg = append(pacmanConf.IgnorePkg, ignoreArray...)
}
if ignoreGroupsArray := cmdArgs.GetArgs("ignoregroup"); ignoreGroupsArray != nil {
pacmanConf.IgnoreGroup = append(pacmanConf.IgnoreGroup, ignoreGroupsArray...)
}
if cacheArray := cmdArgs.GetArgs("cachedir"); cacheArray != nil {
pacmanConf.CacheDir = cacheArray
}
if gpgDir, _, exists := cmdArgs.GetArg("gpgdir"); exists {
pacmanConf.GPGDir = gpgDir
}
useColor := pacmanConf.Color && term.IsTerminal(int(os.Stdout.Fd()))
switch value, _, _ := cmdArgs.GetArg("color"); value {
case "always":
useColor = true
case "auto":
useColor = term.IsTerminal(int(os.Stdout.Fd()))
case "never":
useColor = false
}
return pacmanConf, useColor, nil
}
func main() { func main() {
fallbackLog := text.NewLogger(os.Stdout, os.Stderr, os.Stdin, false, "fallback")
var ( var (
err error err error
ctx = context.Background() ctx = context.Background()
ret = 0 ret = 0
) )
defer func() { defer func() { os.Exit(ret) }()
if rec := recover(); rec != nil {
fallbackLog.Errorln("Panic occurred:", rec)
fallbackLog.Errorln("Stack trace:", string(debug.Stack()))
ret = 1
}
os.Exit(ret)
}()
initGotext() initGotext()
if os.Geteuid() == 0 { if os.Geteuid() == 0 {
fallbackLog.Warnln(gotext.Get("Avoid running yay as root/sudo.")) text.Warnln(gotext.Get("Avoid running yay as root/sudo."))
} }
configPath := settings.GetConfigPath() config, err = settings.NewConfig(yayVersion)
// Parse config
cfg, err := settings.NewConfig(fallbackLog, configPath, yayVersion)
if err != nil { if err != nil {
if str := err.Error(); str != "" { if str := err.Error(); str != "" {
fallbackLog.Errorln(str) text.Errorln(str)
} }
ret = 1 ret = 1
@ -80,17 +113,11 @@ func main() {
return return
} }
if errS := cfg.RunMigrations(fallbackLog,
settings.DefaultMigrations(), configPath, yayVersion); errS != nil {
fallbackLog.Errorln(errS)
}
cmdArgs := parser.MakeArguments() cmdArgs := parser.MakeArguments()
// Parse command line if err = config.ParseCommandLine(cmdArgs); err != nil {
if err = cfg.ParseCommandLine(cmdArgs); err != nil {
if str := err.Error(); str != "" { if str := err.Error(); str != "" {
fallbackLog.Errorln(str) text.Errorln(str)
} }
ret = 1 ret = 1
@ -98,17 +125,18 @@ func main() {
return return
} }
if cfg.SaveConfig { if config.Runtime.SaveConfig {
if errS := cfg.Save(configPath, yayVersion); errS != nil { if errS := config.Save(config.Runtime.ConfigPath); errS != nil {
fallbackLog.Errorln(errS) text.Errorln(errS)
} }
} }
// Build run var useColor bool
run, err := runtime.NewRuntime(cfg, cmdArgs, yayVersion)
config.Runtime.PacmanConf, useColor, err = initAlpm(cmdArgs, config.PacmanConf)
if err != nil { if err != nil {
if str := err.Error(); str != "" { if str := err.Error(); str != "" {
fallbackLog.Errorln(str) text.Errorln(str)
} }
ret = 1 ret = 1
@ -116,10 +144,14 @@ func main() {
return return
} }
dbExecutor, err := ialpm.NewExecutor(run.PacmanConf, run.Logger.Child("db")) config.Runtime.CmdBuilder.SetPacmanDBPath(config.Runtime.PacmanConf.DBPath)
text.UseColor = useColor
dbExecutor, err := ialpm.NewExecutor(config.Runtime.PacmanConf)
if err != nil { if err != nil {
if str := err.Error(); str != "" { if str := err.Error(); str != "" {
fallbackLog.Errorln(str) text.Errorln(str)
} }
ret = 1 ret = 1
@ -127,22 +159,14 @@ func main() {
return return
} }
defer func() { defer dbExecutor.Cleanup()
if rec := recover(); rec != nil {
fallbackLog.Errorln("Panic occurred in DB operation:", rec)
fallbackLog.Errorln("Stack trace:", string(debug.Stack()))
}
dbExecutor.Cleanup() if err = handleCmd(ctx, cmdArgs, db.Executor(dbExecutor)); err != nil {
}()
if err = handleCmd(ctx, run, cmdArgs, dbExecutor); err != nil {
if str := err.Error(); str != "" { if str := err.Error(); str != "" {
fallbackLog.Errorln(str) text.Errorln(str)
} }
exitError := &exec.ExitError{} if exitError, ok := err.(*exec.ExitError); ok {
if errors.As(err, &exitError) {
// mirror pacman exit code when applicable // mirror pacman exit code when applicable
ret = exitError.ExitCode() ret = exitError.ExitCode()
return return
@ -150,5 +174,7 @@ func main() {
// fallback // fallback
ret = 1 ret = 1
return
} }
} }

51
main_test.go Normal file
View File

@ -0,0 +1,51 @@
package main
import (
"testing"
"github.com/Morganamilo/go-pacmanconf"
"github.com/stretchr/testify/assert"
"github.com/Jguer/yay/v11/pkg/settings/parser"
)
func TestPacmanConf(t *testing.T) {
t.Parallel()
expectedPacmanConf := &pacmanconf.Config{
RootDir: "/",
DBPath: "//var/lib/pacman/",
CacheDir: []string{"/cachedir/", "/another/"},
HookDir: []string{"/hookdir/"},
GPGDir: "/gpgdir/",
LogFile: "/logfile",
HoldPkg: []string(nil),
IgnorePkg: []string{"ignore", "this", "package"},
IgnoreGroup: []string{"ignore", "this", "group"},
Architecture: []string{"8086"},
XferCommand: "",
NoUpgrade: []string{"noupgrade"},
NoExtract: []string{"noextract"},
CleanMethod: []string{"KeepInstalled"},
SigLevel: []string{"PackageOptional", "PackageTrustedOnly", "DatabaseOptional", "DatabaseTrustedOnly"},
LocalFileSigLevel: []string(nil),
RemoteFileSigLevel: []string(nil),
UseSyslog: false,
Color: false,
UseDelta: 0,
TotalDownload: false,
CheckSpace: true,
VerbosePkgLists: true,
DisableDownloadTimeout: false,
Repos: []pacmanconf.Repository{
{Name: "repo1", Servers: []string{"repo1"}, SigLevel: []string(nil), Usage: []string{"All"}},
{Name: "repo2", Servers: []string{"repo2"}, SigLevel: []string(nil), Usage: []string{"All"}},
},
}
pacmanConf, color, err := initAlpm(parser.MakeArguments(), "testdata/pacman.conf")
assert.Nil(t, err)
assert.NotNil(t, pacmanConf)
assert.Equal(t, color, false)
assert.EqualValues(t, expectedPacmanConf, pacmanConf)
}

View File

@ -1,82 +0,0 @@
package main
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"github.com/Jguer/yay/v12/pkg/db/ialpm"
"github.com/Jguer/yay/v12/pkg/dep"
"github.com/Jguer/yay/v12/pkg/runtime"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/text"
"github.com/Jguer/aur/metadata"
"github.com/leonelquinteros/gotext"
)
func handleCmd(logger *text.Logger) error {
cfg, err := settings.NewConfig(logger, settings.GetConfigPath(), "")
if err != nil {
return err
}
cmdArgs := parser.MakeArguments()
if errP := cfg.ParseCommandLine(cmdArgs); errP != nil {
return errP
}
run, err := runtime.NewRuntime(cfg, cmdArgs, "1.0.0")
if err != nil {
return err
}
dbExecutor, err := ialpm.NewExecutor(run.PacmanConf, logger)
if err != nil {
return err
}
aurCache, err := metadata.New(
metadata.WithCacheFilePath(
filepath.Join(cfg.BuildDir, "aur.json")))
if err != nil {
return fmt.Errorf("%s: %w", gotext.Get("failed to retrieve aur Cache"), err)
}
grapher := dep.NewGrapher(dbExecutor, aurCache, true, settings.NoConfirm,
cmdArgs.ExistsDouble("d", "nodeps"), false, false,
run.Logger.Child("grapher"))
return graphPackage(context.Background(), grapher, cmdArgs.Targets)
}
func main() {
fallbackLog := text.NewLogger(os.Stdout, os.Stderr, os.Stdin, false, "fallback")
if err := handleCmd(fallbackLog); err != nil {
fallbackLog.Errorln(err)
os.Exit(1)
}
}
func graphPackage(
ctx context.Context,
grapher *dep.Grapher,
targets []string,
) error {
if len(targets) != 1 {
return errors.New(gotext.Get("only one target is allowed"))
}
graph, err := grapher.GraphFromAUR(ctx, nil, []string{targets[0]})
if err != nil {
return err
}
fmt.Fprintln(os.Stdout, graph.String())
fmt.Fprintln(os.Stdout, "\nlayers map\n", graph.TopoSortedLayerMap(nil))
return nil
}

View File

@ -13,21 +13,16 @@ import (
"strings" "strings"
"time" "time"
"github.com/Jguer/yay/v12/pkg/db" "github.com/Jguer/yay/v11/pkg/db"
) )
type PkgSynchronizer interface { type PkgSynchronizer interface {
SyncPackages(...string) []db.IPackage SyncPackages(...string) []db.IPackage
} }
type httpRequestDoer interface {
Do(req *http.Request) (*http.Response, error)
}
// Show provides completion info for shells. // Show provides completion info for shells.
func Show(ctx context.Context, httpClient httpRequestDoer, func Show(ctx context.Context, httpClient *http.Client,
dbExecutor PkgSynchronizer, aurURL, completionPath string, interval int, force bool, dbExecutor PkgSynchronizer, aurURL, completionPath string, interval int, force bool) error {
) error {
err := Update(ctx, httpClient, dbExecutor, aurURL, completionPath, interval, force) err := Update(ctx, httpClient, dbExecutor, aurURL, completionPath, interval, force)
if err != nil { if err != nil {
return err return err
@ -45,9 +40,8 @@ func Show(ctx context.Context, httpClient httpRequestDoer,
} }
// Update updates completion cache to be used by Complete. // Update updates completion cache to be used by Complete.
func Update(ctx context.Context, httpClient httpRequestDoer, func Update(ctx context.Context, httpClient *http.Client,
dbExecutor PkgSynchronizer, aurURL, completionPath string, interval int, force bool, dbExecutor PkgSynchronizer, aurURL, completionPath string, interval int, force bool) error {
) error {
info, err := os.Stat(completionPath) info, err := os.Stat(completionPath)
if os.IsNotExist(err) || (interval != -1 && time.Since(info.ModTime()).Hours() >= float64(interval*24)) || force { if os.IsNotExist(err) || (interval != -1 && time.Since(info.ModTime()).Hours() >= float64(interval*24)) || force {
@ -76,7 +70,7 @@ func Update(ctx context.Context, httpClient httpRequestDoer,
} }
// CreateAURList creates a new completion file. // CreateAURList creates a new completion file.
func createAURList(ctx context.Context, client httpRequestDoer, aurURL string, out io.Writer) error { func createAURList(ctx context.Context, client *http.Client, aurURL string, out io.Writer) error {
u, err := url.Parse(aurURL) u, err := url.Parse(aurURL)
if err != nil { if err != nil {
return err return err
@ -84,7 +78,7 @@ func createAURList(ctx context.Context, client httpRequestDoer, aurURL string, o
u.Path = path.Join(u.Path, "packages.gz") u.Path = path.Join(u.Path, "packages.gz")
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), http.NoBody) req, err := http.NewRequestWithContext(ctx, "GET", u.String(), nil)
if err != nil { if err != nil {
return err return err
} }
@ -117,7 +111,7 @@ func createAURList(ctx context.Context, client httpRequestDoer, aurURL string, o
return nil return nil
} }
// createRepoList appends Repo packages to completion cache. // CreatePackageList appends Repo packages to completion cache.
func createRepoList(dbExecutor PkgSynchronizer, out io.Writer) error { func createRepoList(dbExecutor PkgSynchronizer, out io.Writer) error {
for _, pkg := range dbExecutor.SyncPackages() { for _, pkg := range dbExecutor.SyncPackages() {
_, err := io.WriteString(out, pkg.Name()+"\t"+pkg.DB().Name()+"\n") _, err := io.WriteString(out, pkg.Name()+"\t"+pkg.DB().Name()+"\n")

View File

@ -1,17 +1,14 @@
//go:build !integration
// +build !integration
package completion package completion
import ( import (
"bytes" "bytes"
"context" "context"
"errors" "errors"
"io"
"net/http" "net/http"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"gopkg.in/h2non/gock.v1"
) )
const samplePackageResp = ` const samplePackageResp = `
@ -36,64 +33,41 @@ lumina AUR
eternallands-sound AUR eternallands-sound AUR
` `
type mockDoer struct {
t *testing.T
returnBody string
returnStatusCode int
returnErr error
wantUrl string
}
func (m *mockDoer) Do(req *http.Request) (*http.Response, error) {
assert.Equal(m.t, m.wantUrl, req.URL.String())
return &http.Response{
StatusCode: m.returnStatusCode,
Body: io.NopCloser(bytes.NewBufferString(m.returnBody)),
}, m.returnErr
}
func Test_createAURList(t *testing.T) { func Test_createAURList(t *testing.T) {
t.Parallel() defer gock.Off()
doer := &mockDoer{
t: t, gock.New("https://aur.archlinux.org").
wantUrl: "https://aur.archlinux.org/packages.gz", Get("/packages.gz").
returnStatusCode: 200, Reply(200).
returnBody: samplePackageResp, BodyString(samplePackageResp)
returnErr: nil,
}
out := &bytes.Buffer{} out := &bytes.Buffer{}
err := createAURList(context.Background(), doer, "https://aur.archlinux.org", out) err := createAURList(context.TODO(), &http.Client{}, "https://aur.archlinux.org", out)
assert.NoError(t, err) assert.NoError(t, err)
gotOut := out.String() gotOut := out.String()
assert.Equal(t, expectPackageCompletion, gotOut) assert.Equal(t, expectPackageCompletion, gotOut)
} }
func Test_createAURListHTTPError(t *testing.T) { func Test_createAURListHTTPError(t *testing.T) {
t.Parallel() defer gock.Off()
doer := &mockDoer{
t: t, gock.New("https://aur.archlinux.org").
wantUrl: "https://aur.archlinux.org/packages.gz", Get("/packages.gz").
returnStatusCode: 200, ReplyError(errors.New("Not available"))
returnBody: samplePackageResp,
returnErr: errors.New("Not available"),
}
out := &bytes.Buffer{} out := &bytes.Buffer{}
err := createAURList(context.Background(), doer, "https://aur.archlinux.org", out) err := createAURList(context.TODO(), &http.Client{}, "https://aur.archlinux.org", out)
assert.EqualError(t, err, "Not available") assert.EqualError(t, err, "Get \"https://aur.archlinux.org/packages.gz\": Not available")
} }
func Test_createAURListStatusError(t *testing.T) { func Test_createAURListStatusError(t *testing.T) {
t.Parallel() defer gock.Off()
doer := &mockDoer{
t: t,
wantUrl: "https://aur.archlinux.org/packages.gz",
returnStatusCode: 503,
returnBody: samplePackageResp,
returnErr: nil,
}
gock.New("https://aur.archlinux.org").
Get("/packages.gz").
Reply(503).
BodyString(samplePackageResp)
out := &bytes.Buffer{} out := &bytes.Buffer{}
err := createAURList(context.Background(), doer, "https://aur.archlinux.org", out) err := createAURList(context.TODO(), &http.Client{}, "https://aur.archlinux.org", out)
assert.EqualError(t, err, "invalid status code: 503") assert.EqualError(t, err, "invalid status code: 503")
} }

View File

@ -4,8 +4,6 @@ import (
"time" "time"
alpm "github.com/Jguer/go-alpm/v2" alpm "github.com/Jguer/go-alpm/v2"
"github.com/Jguer/yay/v12/pkg/text"
) )
type ( type (
@ -13,56 +11,39 @@ type (
Depend = alpm.Depend Depend = alpm.Depend
) )
// VerCmp performs version comparison according to Pacman conventions. Return func VerCmp(a, b string) int {
// value is <0 if and only if v1 is older than v2. return alpm.VerCmp(a, b)
func VerCmp(v1, v2 string) int {
return alpm.VerCmp(v1, v2)
} }
type Upgrade struct { type Upgrade struct {
Name string Name string
Base string
Repository string Repository string
LocalVersion string LocalVersion string
RemoteVersion string RemoteVersion string
Reason alpm.PkgReason Reason alpm.PkgReason
Extra string // Extra information to be displayed
}
type SyncUpgrade struct {
Package alpm.IPackage
LocalVersion string
Reason alpm.PkgReason
} }
type Executor interface { type Executor interface {
AlpmArchitectures() ([]string, error) AlpmArchitectures() ([]string, error)
BiggestPackages() []IPackage BiggestPackages() []IPackage
Cleanup() Cleanup()
InstalledRemotePackageNames() []string
InstalledRemotePackages() map[string]IPackage
InstalledSyncPackageNames() []string
IsCorrectVersionInstalled(string, string) bool IsCorrectVersionInstalled(string, string) bool
LastBuildTime() time.Time LastBuildTime() time.Time
LocalPackage(string) IPackage LocalPackage(string) IPackage
LocalPackages() []IPackage LocalPackages() []IPackage
LocalSatisfierExists(string) bool LocalSatisfierExists(string) bool
PackageConflicts(IPackage) []Depend
PackageDepends(IPackage) []Depend PackageDepends(IPackage) []Depend
PackageGroups(IPackage) []string PackageGroups(IPackage) []string
PackageOptionalDepends(IPackage) []Depend PackageOptionalDepends(IPackage) []Depend
PackageProvides(IPackage) []Depend PackageProvides(IPackage) []Depend
PackagesFromGroup(string) []IPackage PackagesFromGroup(string) []IPackage
PackagesFromGroupAndDB(string, string) ([]IPackage, error)
RefreshHandle() error RefreshHandle() error
SyncUpgrades(enableDowngrade bool) ( RepoUpgrades(bool) ([]Upgrade, error)
map[string]SyncUpgrade, error)
Repos() []string Repos() []string
SatisfierFromDB(string, string) (IPackage, error) SatisfierFromDB(string, string) IPackage
SyncPackage(string) IPackage SyncPackage(string) IPackage
SyncPackageFromDB(string, string) IPackage
SyncPackages(...string) []IPackage SyncPackages(...string) []IPackage
SyncSatisfier(string) IPackage SyncSatisfier(string) IPackage
SyncSatisfierExists(string) bool SyncSatisfierExists(string) bool
SetLogger(logger *text.Logger)
} }

View File

@ -1,6 +1,7 @@
package ialpm package ialpm
import ( import (
"bufio"
"errors" "errors"
"fmt" "fmt"
"os" "os"
@ -11,9 +12,10 @@ import (
pacmanconf "github.com/Morganamilo/go-pacmanconf" pacmanconf "github.com/Morganamilo/go-pacmanconf"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/db" "github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v12/pkg/settings" "github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/text"
"github.com/Jguer/yay/v11/pkg/upgrade"
) )
type AlpmExecutor struct { type AlpmExecutor struct {
@ -22,31 +24,16 @@ type AlpmExecutor struct {
syncDB alpm.IDBList syncDB alpm.IDBList
syncDBsCache []alpm.IDB syncDBsCache []alpm.IDB
conf *pacmanconf.Config conf *pacmanconf.Config
log *text.Logger
installedRemotePkgNames []string
installedRemotePkgMap map[string]alpm.IPackage
installedSyncPkgNames []string
} }
func NewExecutor(pacmanConf *pacmanconf.Config, logger *text.Logger) (*AlpmExecutor, error) { func NewExecutor(pacmanConf *pacmanconf.Config) (*AlpmExecutor, error) {
ae := &AlpmExecutor{ ae := &AlpmExecutor{conf: pacmanConf}
handle: nil,
localDB: nil,
syncDB: nil,
syncDBsCache: []alpm.IDB{},
conf: pacmanConf,
log: logger,
installedRemotePkgNames: nil,
installedRemotePkgMap: nil,
installedSyncPkgNames: nil,
}
if err := ae.RefreshHandle(); err != nil { err := ae.RefreshHandle()
if err != nil {
return nil, err return nil, err
} }
var err error
ae.localDB, err = ae.handle.LocalDB() ae.localDB, err = ae.handle.LocalDB()
if err != nil { if err != nil {
return nil, err return nil, err
@ -143,14 +130,12 @@ func configureAlpm(pacmanConf *pacmanconf.Config, alpmHandle *alpm.Handle) error
return alpmHandle.SetCheckSpace(pacmanConf.CheckSpace) return alpmHandle.SetCheckSpace(pacmanConf.CheckSpace)
} }
func (ae *AlpmExecutor) logCallback() func(level alpm.LogLevel, str string) { func logCallback(level alpm.LogLevel, str string) {
return func(level alpm.LogLevel, str string) {
switch level { switch level {
case alpm.LogWarning: case alpm.LogWarning:
ae.log.Warn(str) text.Warn(str)
case alpm.LogError: case alpm.LogError:
ae.log.Error(str) text.Error(str)
}
} }
} }
@ -176,7 +161,7 @@ func (ae *AlpmExecutor) questionCallback() func(question alpm.QuestionAny) {
return nil return nil
}) })
str := text.Bold(gotext.Get("There are %[1]d providers available for %[2]s:", size, qp.Dep())) str := text.Bold(gotext.Get("There are %d providers available for %s:\n", size, qp.Dep()))
size = 1 size = 1
@ -187,44 +172,52 @@ func (ae *AlpmExecutor) questionCallback() func(question alpm.QuestionAny) {
if dbName != thisDB { if dbName != thisDB {
dbName = thisDB dbName = thisDB
str += "\n" str += text.SprintOperationInfo(gotext.Get("Repository"), dbName, "\n ")
str += ae.log.SprintOperationInfo(gotext.Get("Repository"), " ", dbName, "\n ")
} }
str += fmt.Sprintf("%d) %s ", size, pkg.Name()) str += fmt.Sprintf("%d) %s ", size, pkg.Name())
size++ size++
return nil return nil
}) })
ae.log.OperationInfoln(str) text.OperationInfoln(str)
for { for {
ae.log.Println(gotext.Get("\nEnter a number (default=1): ")) fmt.Print(gotext.Get("\nEnter a number (default=1): "))
// TODO: reenable noconfirm // TODO: reenable noconfirm
if settings.NoConfirm { if settings.NoConfirm {
ae.log.Println() fmt.Println()
break break
} }
numberBuf, err := ae.log.GetInput("", false) reader := bufio.NewReader(os.Stdin)
numberBuf, overflow, err := reader.ReadLine()
if err != nil { if err != nil {
ae.log.Errorln(err) text.Errorln(err)
break break
} }
if numberBuf == "" { if overflow {
text.Errorln(gotext.Get(" Input too long"))
continue
}
if string(numberBuf) == "" {
break break
} }
num, err := strconv.Atoi(numberBuf) num, err := strconv.Atoi(string(numberBuf))
if err != nil { if err != nil {
ae.log.Errorln(gotext.Get("invalid number: %s", numberBuf)) text.Errorln(gotext.Get("invalid number: %s", string(numberBuf)))
continue continue
} }
if num < 1 || num > size { if num < 1 || num > size {
ae.log.Errorln(gotext.Get("invalid value: %d is not between %d and %d", num, 1, size)) text.Errorln(gotext.Get("invalid value: %d is not between %d and %d", num, 1, size))
continue continue
} }
@ -252,7 +245,7 @@ func (ae *AlpmExecutor) RefreshHandle() error {
} }
alpmSetQuestionCallback(alpmHandle, ae.questionCallback()) alpmSetQuestionCallback(alpmHandle, ae.questionCallback())
alpmSetLogCallback(alpmHandle, ae.logCallback()) alpmSetLogCallback(alpmHandle, logCallback)
ae.handle = alpmHandle ae.handle = alpmHandle
ae.syncDBsCache = nil ae.syncDBsCache = nil
@ -311,22 +304,6 @@ func (ae *AlpmExecutor) PackagesFromGroup(groupName string) []alpm.IPackage {
return groupPackages return groupPackages
} }
func (ae *AlpmExecutor) PackagesFromGroupAndDB(groupName, dbName string) ([]alpm.IPackage, error) {
singleDBList, err := ae.handle.SyncDBListByDBName(dbName)
if err != nil {
return nil, err
}
groupPackages := []alpm.IPackage{}
_ = singleDBList.FindGroupPkgs(groupName).ForEach(func(pkg alpm.IPackage) error {
groupPackages = append(groupPackages, pkg)
return nil
})
return groupPackages, nil
}
func (ae *AlpmExecutor) LocalPackages() []alpm.IPackage { func (ae *AlpmExecutor) LocalPackages() []alpm.IPackage {
localPackages := []alpm.IPackage{} localPackages := []alpm.IPackage{}
_ = ae.localDB.PkgCache().ForEach(func(pkg alpm.IPackage) error { _ = ae.localDB.PkgCache().ForEach(func(pkg alpm.IPackage) error {
@ -385,27 +362,18 @@ func (ae *AlpmExecutor) SyncPackage(pkgName string) alpm.IPackage {
return nil return nil
} }
func (ae *AlpmExecutor) SyncPackageFromDB(pkgName, dbName string) alpm.IPackage { func (ae *AlpmExecutor) SatisfierFromDB(pkgName, dbName string) alpm.IPackage {
singleDB, err := ae.handle.SyncDBByName(dbName) singleDB, err := ae.handle.SyncDBByName(dbName)
if err != nil { if err != nil {
return nil return nil
} }
return singleDB.Pkg(pkgName) foundPkg, err := singleDB.PkgCache().FindSatisfier(pkgName)
}
func (ae *AlpmExecutor) SatisfierFromDB(pkgName, dbName string) (alpm.IPackage, error) {
singleDBList, err := ae.handle.SyncDBListByDBName(dbName)
if err != nil { if err != nil {
return nil, err return nil
} }
foundPkg, err := singleDBList.FindSatisfier(pkgName) return foundPkg
if err != nil {
return nil, nil
}
return foundPkg, nil
} }
func (ae *AlpmExecutor) PackageDepends(pkg alpm.IPackage) []alpm.Depend { func (ae *AlpmExecutor) PackageDepends(pkg alpm.IPackage) []alpm.Depend {
@ -423,6 +391,11 @@ func (ae *AlpmExecutor) PackageProvides(pkg alpm.IPackage) []alpm.Depend {
return alpmPackage.Provides().Slice() return alpmPackage.Provides().Slice()
} }
func (ae *AlpmExecutor) PackageConflicts(pkg alpm.IPackage) []alpm.Depend {
alpmPackage := pkg.(*alpm.Package)
return alpmPackage.Conflicts().Slice()
}
func (ae *AlpmExecutor) PackageGroups(pkg alpm.IPackage) []string { func (ae *AlpmExecutor) PackageGroups(pkg alpm.IPackage) []string {
alpmPackage := pkg.(*alpm.Package) alpmPackage := pkg.(*alpm.Package)
return alpmPackage.Groups().Slice() return alpmPackage.Groups().Slice()
@ -430,19 +403,18 @@ func (ae *AlpmExecutor) PackageGroups(pkg alpm.IPackage) []string {
// upRepo gathers local packages and checks if they have new versions. // upRepo gathers local packages and checks if they have new versions.
// Output: Upgrade type package list. // Output: Upgrade type package list.
func (ae *AlpmExecutor) SyncUpgrades(enableDowngrade bool) ( func (ae *AlpmExecutor) RepoUpgrades(enableDowngrade bool) ([]db.Upgrade, error) {
map[string]db.SyncUpgrade, error,
) {
ups := map[string]db.SyncUpgrade{}
var errReturn error var errReturn error
slice := []db.Upgrade{}
localDB, errDB := ae.handle.LocalDB() localDB, errDB := ae.handle.LocalDB()
if errDB != nil { if errDB != nil {
return ups, errDB return slice, errDB
} }
if err := ae.handle.TransInit(alpm.TransFlagNoLock); err != nil { if err := ae.handle.TransInit(alpm.TransFlagNoLock); err != nil {
return ups, err return slice, err
} }
defer func() { defer func() {
@ -450,7 +422,7 @@ func (ae *AlpmExecutor) SyncUpgrades(enableDowngrade bool) (
}() }()
if err := ae.handle.SyncSysupgrade(enableDowngrade); err != nil { if err := ae.handle.SyncSysupgrade(enableDowngrade); err != nil {
return ups, err return slice, err
} }
_ = ae.handle.TransGetAdd().ForEach(func(pkg alpm.IPackage) error { _ = ae.handle.TransGetAdd().ForEach(func(pkg alpm.IPackage) error {
@ -462,16 +434,17 @@ func (ae *AlpmExecutor) SyncUpgrades(enableDowngrade bool) (
reason = localPkg.Reason() reason = localPkg.Reason()
} }
ups[pkg.Name()] = db.SyncUpgrade{ slice = append(slice, upgrade.Upgrade{
Package: pkg, Name: pkg.Name(),
Reason: reason, Repository: pkg.DB().Name(),
LocalVersion: localVer, LocalVersion: localVer,
} RemoteVersion: pkg.Version(),
Reason: reason,
})
return nil return nil
}) })
return ups, errReturn return slice, errReturn
} }
func (ae *AlpmExecutor) BiggestPackages() []alpm.IPackage { func (ae *AlpmExecutor) BiggestPackages() []alpm.IPackage {

View File

@ -1,18 +1,11 @@
//go:build !integration
// +build !integration
package ialpm package ialpm
import ( import (
"io"
"strings"
"testing" "testing"
alpm "github.com/Jguer/go-alpm/v2" alpm "github.com/Jguer/go-alpm/v2"
"github.com/Morganamilo/go-pacmanconf" "github.com/Morganamilo/go-pacmanconf"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/Jguer/yay/v12/pkg/text"
) )
func TestAlpmExecutor(t *testing.T) { func TestAlpmExecutor(t *testing.T) {
@ -48,7 +41,7 @@ func TestAlpmExecutor(t *testing.T) {
}, },
} }
aExec, err := NewExecutor(pacmanConf, text.NewLogger(io.Discard, io.Discard, strings.NewReader(""), false, "test")) aExec, err := NewExecutor(pacmanConf)
assert.NoError(t, err) assert.NoError(t, err)
assert.NotNil(t, aExec.conf) assert.NotNil(t, aExec.conf)

View File

@ -1,54 +0,0 @@
package ialpm
import (
alpm "github.com/Jguer/go-alpm/v2"
"github.com/Jguer/yay/v12/pkg/text"
)
// GetPackageNamesBySource returns package names with and without correspondence in SyncDBS respectively.
func (ae *AlpmExecutor) getPackageNamesBySource() {
if ae.installedRemotePkgMap == nil {
ae.installedRemotePkgMap = map[string]alpm.IPackage{}
}
for _, localpkg := range ae.LocalPackages() {
pkgName := localpkg.Name()
if ae.SyncPackage(pkgName) != nil {
ae.installedSyncPkgNames = append(ae.installedSyncPkgNames, pkgName)
} else {
ae.installedRemotePkgNames = append(ae.installedRemotePkgNames, pkgName)
ae.installedRemotePkgMap[pkgName] = localpkg
}
}
ae.log.Debugln("populating db executor package caches.",
"sync_len", len(ae.installedSyncPkgNames), "remote_len", len(ae.installedRemotePkgNames))
}
func (ae *AlpmExecutor) InstalledRemotePackages() map[string]alpm.IPackage {
if ae.installedRemotePkgMap == nil {
ae.getPackageNamesBySource()
}
return ae.installedRemotePkgMap
}
func (ae *AlpmExecutor) InstalledRemotePackageNames() []string {
if ae.installedRemotePkgNames == nil {
ae.getPackageNamesBySource()
}
return ae.installedRemotePkgNames
}
func (ae *AlpmExecutor) InstalledSyncPackageNames() []string {
if ae.installedSyncPkgNames == nil {
ae.getPackageNamesBySource()
}
return ae.installedSyncPkgNames
}
func (ae *AlpmExecutor) SetLogger(logger *text.Logger) {
ae.log = logger
}

View File

@ -1,214 +0,0 @@
package mock
import (
"time"
"github.com/Jguer/yay/v12/pkg/db"
"github.com/Jguer/yay/v12/pkg/text"
"github.com/Jguer/go-alpm/v2"
)
type (
IPackage = alpm.IPackage
Depend = alpm.Depend
Upgrade = db.Upgrade
)
type DBExecutor struct {
db.Executor
AlpmArchitecturesFn func() ([]string, error)
InstalledRemotePackageNamesFn func() []string
InstalledRemotePackagesFn func() map[string]IPackage
IsCorrectVersionInstalledFn func(string, string) bool
LocalPackageFn func(string) IPackage
LocalPackagesFn func() []IPackage
LocalSatisfierExistsFn func(string) bool
PackageDependsFn func(IPackage) []Depend
PackageOptionalDependsFn func(alpm.IPackage) []alpm.Depend
PackageProvidesFn func(IPackage) []Depend
PackagesFromGroupFn func(string) []IPackage
PackagesFromGroupAndDBFn func(string, string) ([]IPackage, error)
RefreshHandleFn func() error
ReposFn func() []string
SyncPackageFn func(string) IPackage
SyncPackagesFn func(...string) []IPackage
SyncSatisfierFn func(string) IPackage
SatisfierFromDBFn func(string, string) (IPackage, error)
SyncUpgradesFn func(bool) (map[string]db.SyncUpgrade, error)
SetLoggerFn func(*text.Logger)
}
func (t *DBExecutor) InstalledRemotePackageNames() []string {
if t.InstalledRemotePackageNamesFn != nil {
return t.InstalledRemotePackageNamesFn()
}
panic("implement me")
}
func (t *DBExecutor) InstalledRemotePackages() map[string]IPackage {
if t.InstalledRemotePackagesFn != nil {
return t.InstalledRemotePackagesFn()
}
panic("implement me")
}
func (t *DBExecutor) AlpmArchitectures() ([]string, error) {
if t.AlpmArchitecturesFn != nil {
return t.AlpmArchitecturesFn()
}
panic("implement me")
}
func (t *DBExecutor) BiggestPackages() []IPackage {
panic("implement me")
}
func (t *DBExecutor) Cleanup() {
panic("implement me")
}
func (t *DBExecutor) IsCorrectVersionInstalled(s, s2 string) bool {
if t.IsCorrectVersionInstalledFn != nil {
return t.IsCorrectVersionInstalledFn(s, s2)
}
panic("implement me")
}
func (t *DBExecutor) LastBuildTime() time.Time {
panic("implement me")
}
func (t *DBExecutor) LocalPackage(s string) IPackage {
if t.LocalPackageFn != nil {
return t.LocalPackageFn(s)
}
panic("implement me")
}
func (t *DBExecutor) LocalPackages() []IPackage {
if t.LocalPackagesFn != nil {
return t.LocalPackagesFn()
}
panic("implement me")
}
func (t *DBExecutor) LocalSatisfierExists(s string) bool {
if t.LocalSatisfierExistsFn != nil {
return t.LocalSatisfierExistsFn(s)
}
panic("implement me")
}
func (t *DBExecutor) PackageConflicts(iPackage IPackage) []Depend {
panic("implement me")
}
func (t *DBExecutor) PackageDepends(iPackage IPackage) []Depend {
if t.PackageDependsFn != nil {
return t.PackageDependsFn(iPackage)
}
panic("implement me")
}
func (t *DBExecutor) PackageGroups(iPackage IPackage) []string {
return []string{}
}
func (t *DBExecutor) PackageOptionalDepends(iPackage IPackage) []Depend {
if t.PackageOptionalDependsFn != nil {
return t.PackageOptionalDependsFn(iPackage)
}
panic("implement me")
}
func (t *DBExecutor) PackageProvides(iPackage IPackage) []Depend {
if t.PackageProvidesFn != nil {
return t.PackageProvidesFn(iPackage)
}
panic("implement me")
}
func (t *DBExecutor) PackagesFromGroup(s string) []IPackage {
if t.PackagesFromGroupFn != nil {
return t.PackagesFromGroupFn(s)
}
panic("implement me")
}
func (t *DBExecutor) PackagesFromGroupAndDB(s, s2 string) ([]IPackage, error) {
if t.PackagesFromGroupAndDBFn != nil {
return t.PackagesFromGroupAndDBFn(s, s2)
}
panic("implement me")
}
func (t *DBExecutor) RefreshHandle() error {
if t.RefreshHandleFn != nil {
return t.RefreshHandleFn()
}
panic("implement me")
}
func (t *DBExecutor) SyncUpgrades(b bool) (map[string]db.SyncUpgrade, error) {
if t.SyncUpgradesFn != nil {
return t.SyncUpgradesFn(b)
}
panic("implement me")
}
func (t *DBExecutor) Repos() []string {
if t.ReposFn != nil {
return t.ReposFn()
}
panic("implement me")
}
func (t *DBExecutor) SatisfierFromDB(s, s2 string) (IPackage, error) {
if t.SatisfierFromDBFn != nil {
return t.SatisfierFromDBFn(s, s2)
}
panic("implement me")
}
func (t *DBExecutor) SyncPackage(s string) IPackage {
if t.SyncPackageFn != nil {
return t.SyncPackageFn(s)
}
panic("implement me")
}
func (t *DBExecutor) SyncPackages(s ...string) []IPackage {
if t.SyncPackagesFn != nil {
return t.SyncPackagesFn(s...)
}
panic("implement me")
}
func (t *DBExecutor) SyncSatisfier(s string) IPackage {
if t.SyncSatisfierFn != nil {
return t.SyncSatisfierFn(s)
}
panic("implement me")
}
func (t *DBExecutor) SyncSatisfierExists(s string) bool {
if t.SyncSatisfierFn != nil {
return t.SyncSatisfierFn(s) != nil
}
panic("implement me")
}
func (t *DBExecutor) SetLogger(logger *text.Logger) {
if t.SetLoggerFn != nil {
t.SetLoggerFn(logger)
return
}
panic("implement me")
}

View File

@ -6,30 +6,10 @@ import (
alpm "github.com/Jguer/go-alpm/v2" alpm "github.com/Jguer/go-alpm/v2"
) )
type DependList struct {
Depends []Depend
}
func (d DependList) Slice() []alpm.Depend {
return d.Depends
}
func (d DependList) ForEach(f func(*alpm.Depend) error) error {
for i := range d.Depends {
dep := &d.Depends[i]
err := f(dep)
if err != nil {
return err
}
}
return nil
}
type Package struct { type Package struct {
PBase string PBase string
PBuildDate time.Time PBuildDate time.Time
PDB *DB PDB alpm.IDB
PDescription string PDescription string
PISize int64 PISize int64
PName string PName string
@ -37,8 +17,6 @@ type Package struct {
PSize int64 PSize int64
PVersion string PVersion string
PReason alpm.PkgReason PReason alpm.PkgReason
PDepends alpm.IDependList
PProvides alpm.IDependList
} }
func (p *Package) Base() string { func (p *Package) Base() string {
@ -82,148 +60,129 @@ func (p *Package) Reason() alpm.PkgReason {
} }
func (p *Package) FileName() string { func (p *Package) FileName() string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
func (p *Package) Base64Signature() string { func (p *Package) Base64Signature() string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
func (p *Package) Validation() alpm.Validation { func (p *Package) Validation() alpm.Validation {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Architecture returns the package target Architecture. // Architecture returns the package target Architecture.
func (p *Package) Architecture() string { func (p *Package) Architecture() string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Backup returns a list of package backups. // Backup returns a list of package backups.
func (p *Package) Backup() alpm.BackupList { func (p *Package) Backup() alpm.BackupList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Conflicts returns the conflicts of the package as a DependList. // Conflicts returns the conflicts of the package as a DependList.
func (p *Package) Conflicts() alpm.IDependList { func (p *Package) Conflicts() alpm.DependList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Depends returns the package's dependency list. // Depends returns the package's dependency list.
func (p *Package) Depends() alpm.IDependList { func (p *Package) Depends() alpm.DependList {
if p.PDepends != nil { panic("not implemented") // TODO: Implement
return p.PDepends
}
return alpm.DependList{}
} }
// Depends returns the package's optional dependency list. // Depends returns the package's optional dependency list.
func (p *Package) OptionalDepends() alpm.IDependList { func (p *Package) OptionalDepends() alpm.DependList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Depends returns the package's check dependency list. // Depends returns the package's check dependency list.
func (p *Package) CheckDepends() alpm.IDependList { func (p *Package) CheckDepends() alpm.DependList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Depends returns the package's make dependency list. // Depends returns the package's make dependency list.
func (p *Package) MakeDepends() alpm.IDependList { func (p *Package) MakeDepends() alpm.DependList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Files returns the file list of the package. // Files returns the file list of the package.
func (p *Package) Files() []alpm.File { func (p *Package) Files() []alpm.File {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// ContainsFile checks if the path is in the package filelist. // ContainsFile checks if the path is in the package filelist.
func (p *Package) ContainsFile(path string) (alpm.File, error) { func (p *Package) ContainsFile(path string) (alpm.File, error) {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Groups returns the groups the package belongs to. // Groups returns the groups the package belongs to.
func (p *Package) Groups() alpm.StringList { func (p *Package) Groups() alpm.StringList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// InstallDate returns the package install date. // InstallDate returns the package install date.
func (p *Package) InstallDate() time.Time { func (p *Package) InstallDate() time.Time {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Licenses returns the package license list. // Licenses returns the package license list.
func (p *Package) Licenses() alpm.StringList { func (p *Package) Licenses() alpm.StringList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// SHA256Sum returns package SHA256Sum. // SHA256Sum returns package SHA256Sum.
func (p *Package) SHA256Sum() string { func (p *Package) SHA256Sum() string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// MD5Sum returns package MD5Sum. // MD5Sum returns package MD5Sum.
func (p *Package) MD5Sum() string { func (p *Package) MD5Sum() string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Packager returns package packager name. // Packager returns package packager name.
func (p *Package) Packager() string { func (p *Package) Packager() string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Provides returns DependList of packages provides by package. // Provides returns DependList of packages provides by package.
func (p *Package) Provides() alpm.IDependList { func (p *Package) Provides() alpm.DependList {
if p.PProvides == nil { panic("not implemented") // TODO: Implement
return alpm.DependList{}
}
return p.PProvides
} }
// Origin returns package origin. // Origin returns package origin.
func (p *Package) Origin() alpm.PkgFrom { func (p *Package) Origin() alpm.PkgFrom {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// Replaces returns a DependList with the packages this package replaces. // Replaces returns a DependList with the packages this package replaces.
func (p *Package) Replaces() alpm.IDependList { func (p *Package) Replaces() alpm.DependList {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// URL returns the upstream URL of the package. // URL returns the upstream URL of the package.
func (p *Package) URL() string { func (p *Package) URL() string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// ComputeRequiredBy returns the names of reverse dependencies of a package. // ComputeRequiredBy returns the names of reverse dependencies of a package.
func (p *Package) ComputeRequiredBy() []string { func (p *Package) ComputeRequiredBy() []string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// ComputeOptionalFor returns the names of packages that optionally // ComputeOptionalFor returns the names of packages that optionally
// require the given package. // require the given package.
func (p *Package) ComputeOptionalFor() []string { func (p *Package) ComputeOptionalFor() []string {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
// SyncNewVersion checks if there is a new version of the // SyncNewVersion checks if there is a new version of the
// package in a given DBlist. // package in a given DBlist.
func (p *Package) SyncNewVersion(l alpm.IDBList) alpm.IPackage { func (p *Package) SyncNewVersion(l alpm.IDBList) alpm.IPackage {
panic("not implemented") panic("not implemented") // TODO: Implement
} }
func (p *Package) Type() string { func (p *Package) Type() string {
panic("not implemented") panic("not implemented") // TODO: Implement
}
type DB struct {
alpm.IDB
name string
}
func NewDB(name string) *DB {
return &DB{name: name}
}
func (d *DB) Name() string {
return d.name
} }

View File

@ -1,15 +0,0 @@
package db
func ArchIsSupported(alpmArch []string, arch string) bool {
if arch == "any" {
return true
}
for _, a := range alpmArch {
if a == arch {
return true
}
}
return false
}

55
pkg/dep/base.go Normal file
View File

@ -0,0 +1,55 @@
package dep
import aur "github.com/Jguer/yay/v11/pkg/query"
// Base is an AUR base package.
type Base []*aur.Pkg
// Pkgbase returns the first base package.
func (b Base) Pkgbase() string {
return b[0].PackageBase
}
// Version returns the first base package version.
func (b Base) Version() string {
return b[0].Version
}
// URLPath returns the first base package URL.
func (b Base) URLPath() string {
return b[0].URLPath
}
// Packages foo and bar from a pkgbase named base would print like so:
// base (foo bar).
func (b Base) String() string {
pkg := b[0]
str := pkg.PackageBase
if len(b) > 1 || pkg.PackageBase != pkg.Name {
str2 := " ("
for _, split := range b {
str2 += split.Name + " "
}
str2 = str2[:len(str2)-1] + ")"
str += str2
}
return str
}
func GetBases(pkgs []*aur.Pkg) []Base {
basesMap := make(map[string]Base)
for _, pkg := range pkgs {
basesMap[pkg.PackageBase] = append(basesMap[pkg.PackageBase], pkg)
}
bases := make([]Base, 0, len(basesMap))
for _, base := range basesMap {
bases = append(bases, base)
}
return bases
}

View File

@ -3,10 +3,43 @@ package dep
import ( import (
"strings" "strings"
"github.com/Jguer/yay/v12/pkg/db" "github.com/Jguer/yay/v11/pkg/db"
aur "github.com/Jguer/yay/v12/pkg/query" aur "github.com/Jguer/yay/v11/pkg/query"
"github.com/Jguer/yay/v11/pkg/text"
) )
type providers struct {
lookfor string
Pkgs []*aur.Pkg
}
func makeProviders(name string) providers {
return providers{
name,
make([]*aur.Pkg, 0),
}
}
func (q providers) Len() int {
return len(q.Pkgs)
}
func (q providers) Less(i, j int) bool {
if q.lookfor == q.Pkgs[i].Name {
return true
}
if q.lookfor == q.Pkgs[j].Name {
return false
}
return text.LessRunes([]rune(q.Pkgs[i].Name), []rune(q.Pkgs[j].Name))
}
func (q providers) Swap(i, j int) {
q.Pkgs[i], q.Pkgs[j] = q.Pkgs[j], q.Pkgs[i]
}
func splitDep(dep string) (pkg, mod, ver string) { func splitDep(dep string) (pkg, mod, ver string) {
split := strings.FieldsFunc(dep, func(c rune) bool { split := strings.FieldsFunc(dep, func(c rune) bool {
match := c == '>' || c == '<' || c == '=' match := c == '>' || c == '<' || c == '='
@ -47,7 +80,7 @@ func provideSatisfies(provide, dep, pkgVersion string) bool {
return false return false
} }
// Unversioned provides can not satisfy a versioned dep // Unversioned provieds can not satisfy a versioned dep
if provideMod == "" && depMod != "" { if provideMod == "" && depMod != "" {
provideVersion = pkgVersion // Example package: pagure provideVersion = pkgVersion // Example package: pagure
} }
@ -85,3 +118,17 @@ func satisfiesAur(dep string, pkg *aur.Pkg) bool {
return false return false
} }
func satisfiesRepo(dep string, pkg db.IPackage, dbExecutor db.Executor) bool {
if pkgSatisfies(pkg.Name(), pkg.Version(), dep) {
return true
}
for _, provided := range dbExecutor.PackageProvides(pkg) {
if provideSatisfies(provided.String(), dep, pkg.Version()) {
return true
}
}
return false
}

325
pkg/dep/depCheck.go Normal file
View File

@ -0,0 +1,325 @@
package dep
import (
"fmt"
"os"
"strings"
"sync"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v11/pkg/stringset"
"github.com/Jguer/yay/v11/pkg/text"
)
func (dp *Pool) checkInnerConflict(name, conflict string, conflicts stringset.MapStringSet) {
for _, pkg := range dp.Aur {
if pkg.Name == name {
continue
}
if satisfiesAur(conflict, pkg) {
conflicts.Add(name, pkg.Name)
}
}
for _, pkg := range dp.Repo {
if pkg.Name() == name {
continue
}
if satisfiesRepo(conflict, pkg, dp.AlpmExecutor) {
conflicts.Add(name, pkg.Name())
}
}
}
func (dp *Pool) checkForwardConflict(name, conflict string, conflicts stringset.MapStringSet) {
for _, pkg := range dp.AlpmExecutor.LocalPackages() {
if pkg.Name() == name || dp.hasPackage(pkg.Name()) {
continue
}
if satisfiesRepo(conflict, pkg, dp.AlpmExecutor) {
n := pkg.Name()
if n != conflict {
n += " (" + conflict + ")"
}
conflicts.Add(name, n)
}
}
}
func (dp *Pool) checkReverseConflict(name, conflict string, conflicts stringset.MapStringSet) {
for _, pkg := range dp.Aur {
if pkg.Name == name {
continue
}
if satisfiesAur(conflict, pkg) {
if name != conflict {
name += " (" + conflict + ")"
}
conflicts.Add(pkg.Name, name)
}
}
for _, pkg := range dp.Repo {
if pkg.Name() == name {
continue
}
if satisfiesRepo(conflict, pkg, dp.AlpmExecutor) {
if name != conflict {
name += " (" + conflict + ")"
}
conflicts.Add(pkg.Name(), name)
}
}
}
func (dp *Pool) checkInnerConflicts(conflicts stringset.MapStringSet) {
for _, pkg := range dp.Aur {
for _, conflict := range pkg.Conflicts {
dp.checkInnerConflict(pkg.Name, conflict, conflicts)
}
}
for _, pkg := range dp.Repo {
for _, conflict := range dp.AlpmExecutor.PackageConflicts(pkg) {
dp.checkInnerConflict(pkg.Name(), conflict.String(), conflicts)
}
}
}
func (dp *Pool) checkForwardConflicts(conflicts stringset.MapStringSet) {
for _, pkg := range dp.Aur {
for _, conflict := range pkg.Conflicts {
dp.checkForwardConflict(pkg.Name, conflict, conflicts)
}
}
for _, pkg := range dp.Repo {
for _, conflict := range dp.AlpmExecutor.PackageConflicts(pkg) {
dp.checkForwardConflict(pkg.Name(), conflict.String(), conflicts)
}
}
}
func (dp *Pool) checkReverseConflicts(conflicts stringset.MapStringSet) {
for _, pkg := range dp.AlpmExecutor.LocalPackages() {
if dp.hasPackage(pkg.Name()) {
continue
}
for _, conflict := range dp.AlpmExecutor.PackageConflicts(pkg) {
dp.checkReverseConflict(pkg.Name(), conflict.String(), conflicts)
}
}
}
func (dp *Pool) CheckConflicts(useAsk, noConfirm, noDeps bool) (stringset.MapStringSet, error) {
conflicts := make(stringset.MapStringSet)
if noDeps {
return conflicts, nil
}
var wg sync.WaitGroup
innerConflicts := make(stringset.MapStringSet)
wg.Add(2)
text.OperationInfoln(gotext.Get("Checking for conflicts..."))
go func() {
dp.checkForwardConflicts(conflicts)
dp.checkReverseConflicts(conflicts)
wg.Done()
}()
text.OperationInfoln(gotext.Get("Checking for inner conflicts..."))
go func() {
dp.checkInnerConflicts(innerConflicts)
wg.Done()
}()
wg.Wait()
if len(innerConflicts) != 0 {
text.Errorln(gotext.Get("\nInner conflicts found:"))
for name, pkgs := range innerConflicts {
str := text.SprintError(name + ":")
for pkg := range pkgs {
str += " " + text.Cyan(pkg) + ","
}
str = strings.TrimSuffix(str, ",")
fmt.Println(str)
}
}
if len(conflicts) != 0 {
text.Errorln(gotext.Get("\nPackage conflicts found:"))
for name, pkgs := range conflicts {
str := text.SprintError(gotext.Get("Installing %s will remove:", text.Cyan(name)))
for pkg := range pkgs {
str += " " + text.Cyan(pkg) + ","
}
str = strings.TrimSuffix(str, ",")
fmt.Println(str)
}
}
// Add the inner conflicts to the conflicts
// These are used to decide what to pass --ask to (if set) or don't pass --noconfirm to
// As we have no idea what the order is yet we add every inner conflict to the slice
for name, pkgs := range innerConflicts {
conflicts[name] = make(stringset.StringSet)
for pkg := range pkgs {
conflicts[pkg] = make(stringset.StringSet)
}
}
if len(conflicts) > 0 {
if !useAsk {
if noConfirm {
return nil, fmt.Errorf(gotext.Get("package conflicts can not be resolved with noconfirm, aborting"))
}
text.Errorln(gotext.Get("Conflicting packages will have to be confirmed manually"))
}
}
return conflicts, nil
}
type missing struct {
Good stringset.StringSet
Missing map[string][][]string
}
func (dp *Pool) _checkMissing(dep string, stack []string, missing *missing, noDeps, noCheckDeps bool) {
if missing.Good.Get(dep) {
return
}
if trees, ok := missing.Missing[dep]; ok {
for _, tree := range trees {
if stringSliceEqual(tree, stack) {
return
}
}
missing.Missing[dep] = append(missing.Missing[dep], stack)
return
}
if aurPkg := dp.findSatisfierAur(dep); aurPkg != nil {
missing.Good.Set(dep)
combinedDepList := ComputeCombinedDepList(aurPkg, noDeps, noCheckDeps)
for _, deps := range combinedDepList {
for _, aurDep := range deps {
if dp.AlpmExecutor.LocalSatisfierExists(aurDep) {
missing.Good.Set(aurDep)
continue
}
dp._checkMissing(aurDep, append(stack, aurPkg.Name), missing, noDeps, noCheckDeps)
}
}
return
}
if repoPkg := dp.findSatisfierRepo(dep); repoPkg != nil {
missing.Good.Set(dep)
if noDeps {
return
}
for _, dep := range dp.AlpmExecutor.PackageDepends(repoPkg) {
if dp.AlpmExecutor.LocalSatisfierExists(dep.String()) {
missing.Good.Set(dep.String())
continue
}
dp._checkMissing(dep.String(), append(stack, repoPkg.Name()), missing, noDeps, noCheckDeps)
}
return
}
missing.Missing[dep] = [][]string{stack}
}
func stringSliceEqual(a, b []string) bool {
if a == nil && b == nil {
return true
}
if a == nil || b == nil {
return false
}
if len(a) != len(b) {
return false
}
for i := 0; i < len(a); i++ {
if a[i] != b[i] {
return false
}
}
return true
}
func (dp *Pool) CheckMissing(noDeps, noCheckDeps bool) error {
missing := &missing{
make(stringset.StringSet),
make(map[string][][]string),
}
for _, target := range dp.Targets {
dp._checkMissing(target.DepString(), make([]string, 0), missing, noDeps, noCheckDeps)
}
if len(missing.Missing) == 0 {
return nil
}
text.Errorln(gotext.Get("Could not find all required packages:"))
for dep, trees := range missing.Missing {
for _, tree := range trees {
fmt.Fprintf(os.Stderr, "\t%s", text.Cyan(dep))
if len(tree) == 0 {
fmt.Fprint(os.Stderr, gotext.Get(" (Target"))
} else {
fmt.Fprint(os.Stderr, gotext.Get(" (Wanted by: "))
for n := 0; n < len(tree)-1; n++ {
fmt.Fprint(os.Stderr, text.Cyan(tree[n]), " -> ")
}
fmt.Fprint(os.Stderr, text.Cyan(tree[len(tree)-1]))
}
fmt.Fprintln(os.Stderr, ")")
}
}
return fmt.Errorf("")
}

201
pkg/dep/depOrder.go Normal file
View File

@ -0,0 +1,201 @@
package dep
import (
"fmt"
"github.com/Jguer/yay/v11/pkg/db"
aur "github.com/Jguer/yay/v11/pkg/query"
"github.com/Jguer/yay/v11/pkg/stringset"
"github.com/Jguer/yay/v11/pkg/text"
)
type Order struct {
Aur []Base
Repo []db.IPackage
Runtime stringset.StringSet
}
func makeOrder() *Order {
return &Order{
make([]Base, 0),
make([]db.IPackage, 0),
make(stringset.StringSet),
}
}
func GetOrder(dp *Pool, noDeps, noCheckDeps bool) *Order {
do := makeOrder()
for _, target := range dp.Targets {
dep := target.DepString()
if aurPkg := dp.Aur[dep]; aurPkg != nil && pkgSatisfies(aurPkg.Name, aurPkg.Version, dep) {
do.orderPkgAur(aurPkg, dp, true, noDeps, noCheckDeps)
} else if aurPkg := dp.findSatisfierAur(dep); aurPkg != nil {
do.orderPkgAur(aurPkg, dp, true, noDeps, noCheckDeps)
} else if repoPkg := dp.findSatisfierRepo(dep); repoPkg != nil {
do.orderPkgRepo(repoPkg, dp, true)
}
}
return do
}
func (do *Order) orderPkgAur(pkg *aur.Pkg, dp *Pool, runtime, noDeps, noCheckDeps bool) {
if runtime {
do.Runtime.Set(pkg.Name)
}
delete(dp.Aur, pkg.Name)
for i, deps := range ComputeCombinedDepList(pkg, noDeps, noCheckDeps) {
for _, dep := range deps {
if aurPkg := dp.findSatisfierAur(dep); aurPkg != nil {
do.orderPkgAur(aurPkg, dp, runtime && i == 0, noDeps, noCheckDeps)
}
if repoPkg := dp.findSatisfierRepo(dep); repoPkg != nil {
do.orderPkgRepo(repoPkg, dp, runtime && i == 0)
}
}
}
for i, base := range do.Aur {
if base.Pkgbase() == pkg.PackageBase {
do.Aur[i] = append(base, pkg)
return
}
}
do.Aur = append(do.Aur, Base{pkg})
}
func (do *Order) orderPkgRepo(pkg db.IPackage, dp *Pool, runtime bool) {
if runtime {
do.Runtime.Set(pkg.Name())
}
delete(dp.Repo, pkg.Name())
for _, dep := range dp.AlpmExecutor.PackageDepends(pkg) {
if repoPkg := dp.findSatisfierRepo(dep.String()); repoPkg != nil {
do.orderPkgRepo(repoPkg, dp, runtime)
}
}
do.Repo = append(do.Repo, pkg)
}
func (do *Order) HasMake() bool {
lenAur := 0
for _, base := range do.Aur {
lenAur += len(base)
}
return len(do.Runtime) != lenAur+len(do.Repo)
}
func (do *Order) GetMake() []string {
makeOnly := []string{}
for _, base := range do.Aur {
for _, pkg := range base {
if !do.Runtime.Get(pkg.Name) {
makeOnly = append(makeOnly, pkg.Name)
}
}
}
for _, pkg := range do.Repo {
if !do.Runtime.Get(pkg.Name()) {
makeOnly = append(makeOnly, pkg.Name())
}
}
return makeOnly
}
// Print prints repository packages to be downloaded.
func (do *Order) Print() {
repo := ""
repoMake := ""
aurString := ""
aurMake := ""
repoLen := 0
repoMakeLen := 0
aurLen := 0
aurMakeLen := 0
for _, pkg := range do.Repo {
pkgStr := fmt.Sprintf(" %s-%s", pkg.Name(), pkg.Version())
if do.Runtime.Get(pkg.Name()) {
repo += pkgStr
repoLen++
} else {
repoMake += pkgStr
repoMakeLen++
}
}
for _, base := range do.Aur {
pkg := base.Pkgbase()
pkgStr := " " + pkg + "-" + base[0].Version
pkgStrMake := pkgStr
push := false
pushMake := false
switch {
case len(base) > 1, pkg != base[0].Name:
pkgStr += " ("
pkgStrMake += " ("
for _, split := range base {
if do.Runtime.Get(split.Name) {
pkgStr += split.Name + " "
aurLen++
push = true
} else {
pkgStrMake += split.Name + " "
aurMakeLen++
pushMake = true
}
}
pkgStr = pkgStr[:len(pkgStr)-1] + ")"
pkgStrMake = pkgStrMake[:len(pkgStrMake)-1] + ")"
case do.Runtime.Get(base[0].Name):
aurLen++
push = true
default:
aurMakeLen++
pushMake = true
}
if push {
aurString += pkgStr
}
if pushMake {
aurMake += pkgStrMake
}
}
printDownloads("Repo", repoLen, repo)
printDownloads("Repo Make", repoMakeLen, repoMake)
printDownloads("Aur", aurLen, aurString)
printDownloads("Aur Make", aurMakeLen, aurMake)
}
func printDownloads(repoName string, length int, packages string) {
if length < 1 {
return
}
repoInfo := fmt.Sprintf(text.Bold(text.Blue("[%s:%d]")), repoName, length)
fmt.Println(repoInfo + text.Cyan(packages))
}

592
pkg/dep/depPool.go Normal file
View File

@ -0,0 +1,592 @@
package dep
import (
"bufio"
"context"
"fmt"
"os"
"sort"
"strconv"
"strings"
"sync"
"github.com/Jguer/aur"
alpm "github.com/Jguer/go-alpm/v2"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v11/pkg/query"
"github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v11/pkg/settings/parser"
"github.com/Jguer/yay/v11/pkg/stringset"
"github.com/Jguer/yay/v11/pkg/text"
)
type Target struct {
DB string
Name string
Mod string
Version string
}
func ToTarget(pkg string) Target {
dbName, depString := text.SplitDBFromName(pkg)
name, mod, depVersion := splitDep(depString)
return Target{
DB: dbName,
Name: name,
Mod: mod,
Version: depVersion,
}
}
func (t Target) DepString() string {
return t.Name + t.Mod + t.Version
}
func (t Target) String() string {
if t.DB != "" {
return t.DB + "/" + t.DepString()
}
return t.DepString()
}
type Pool struct {
Targets []Target
Explicit stringset.StringSet
Repo map[string]db.IPackage
Aur map[string]*query.Pkg
AurCache map[string]*query.Pkg
Groups []string
AlpmExecutor db.Executor
Warnings *query.AURWarnings
aurClient *aur.Client
}
func makePool(dbExecutor db.Executor, aurClient *aur.Client) *Pool {
dp := &Pool{
Targets: []Target{},
Explicit: map[string]struct{}{},
Repo: map[string]alpm.IPackage{},
Aur: map[string]*aur.Pkg{},
AurCache: map[string]*aur.Pkg{},
Groups: []string{},
AlpmExecutor: dbExecutor,
Warnings: nil,
aurClient: aurClient,
}
return dp
}
// Includes db/ prefixes and group installs.
func (dp *Pool) ResolveTargets(ctx context.Context, pkgs []string,
mode parser.TargetMode,
ignoreProviders, noConfirm, provides bool, rebuild string, splitN int, noDeps, noCheckDeps bool, assumeInstalled []string) error {
// RPC requests are slow
// Combine as many AUR package requests as possible into a single RPC call
aurTargets := make(stringset.StringSet)
pkgs = query.RemoveInvalidTargets(pkgs, mode)
for _, pkg := range pkgs {
target := ToTarget(pkg)
// skip targets already satisfied
// even if the user enters db/pkg and aur/pkg the latter will
// still get skipped even if it's from a different database to
// the one specified
// this is how pacman behaves
if dp.hasPackage(target.DepString()) || isInAssumeInstalled(target.DepString(), assumeInstalled) {
continue
}
var foundPkg db.IPackage
// aur/ prefix means we only check the aur
if target.DB == "aur" || mode == parser.ModeAUR {
dp.Targets = append(dp.Targets, target)
aurTargets.Set(target.DepString())
continue
}
// If there's a different prefix only look in that repo
if target.DB != "" {
foundPkg = dp.AlpmExecutor.SatisfierFromDB(target.DepString(), target.DB)
} else {
// otherwise find it in any repo
foundPkg = dp.AlpmExecutor.SyncSatisfier(target.DepString())
}
if foundPkg != nil {
dp.Targets = append(dp.Targets, target)
dp.Explicit.Set(foundPkg.Name())
dp.ResolveRepoDependency(foundPkg, noDeps)
continue
} else {
// check for groups
// currently we don't resolve the packages in a group
// only check if the group exists
// would be better to check the groups from singleDB if
// the user specified a db but there's no easy way to do
// it without making alpm_lists so don't bother for now
// db/group is probably a rare use case
groupPackages := dp.AlpmExecutor.PackagesFromGroup(target.Name)
if len(groupPackages) > 0 {
dp.Groups = append(dp.Groups, target.String())
for _, pkg := range groupPackages {
dp.Explicit.Set(pkg.Name())
}
continue
}
}
// if there was no db prefix check the aur
if target.DB == "" {
aurTargets.Set(target.DepString())
}
dp.Targets = append(dp.Targets, target)
}
if len(aurTargets) > 0 && mode.AtLeastAUR() {
return dp.resolveAURPackages(ctx, aurTargets, true, ignoreProviders,
noConfirm, provides, rebuild, splitN, noDeps, noCheckDeps)
}
return nil
}
// Pseudo provides finder.
// Try to find provides by performing a search of the package name
// This effectively performs -Ss on each package
// then runs -Si on each result to cache the information.
//
// For example if you were to -S yay then yay -Ss would give:
// yay-git yay-bin yay realyog pacui pacui-git ruby-yard
// These packages will all be added to the cache in case they are needed later
// Ofcouse only the first three packages provide yay, the rest are just false
// positives.
//
// This method increases dependency resolve time.
func (dp *Pool) findProvides(ctx context.Context, pkgs stringset.StringSet) error {
var (
mux sync.Mutex
wg sync.WaitGroup
)
doSearch := func(pkg string) {
defer wg.Done()
var (
err error
results []query.Pkg
)
// Hack for a bigger search result, if the user wants
// java-envronment we can search for just java instead and get
// more hits.
pkg, _, _ = splitDep(pkg) // openimagedenoise-git > ispc-git #1234
words := strings.Split(pkg, "-")
for i := range words {
results, err = dp.aurClient.Search(ctx, strings.Join(words[:i+1], "-"), aur.None)
if err == nil {
break
}
}
if err != nil {
return
}
for iR := range results {
mux.Lock()
if _, ok := dp.AurCache[results[iR].Name]; !ok {
pkgs.Set(results[iR].Name)
}
mux.Unlock()
}
}
for pkg := range pkgs {
if dp.AlpmExecutor.LocalPackage(pkg) != nil {
continue
}
wg.Add(1)
go doSearch(pkg)
}
wg.Wait()
return nil
}
func (dp *Pool) cacheAURPackages(ctx context.Context, _pkgs stringset.StringSet, provides bool, splitN int) error {
pkgs := _pkgs.Copy()
toQuery := make([]string, 0)
for pkg := range pkgs {
if _, ok := dp.AurCache[pkg]; ok {
pkgs.Remove(pkg)
}
}
if len(pkgs) == 0 {
return nil
}
if provides {
err := dp.findProvides(ctx, pkgs)
if err != nil {
return err
}
}
for pkg := range pkgs {
if _, ok := dp.AurCache[pkg]; !ok {
name, _, ver := splitDep(pkg)
if ver != "" {
toQuery = append(toQuery, name, name+"-"+ver)
} else {
toQuery = append(toQuery, name)
}
}
}
info, err := query.AURInfo(ctx, dp.aurClient, toQuery, dp.Warnings, splitN)
if err != nil {
return err
}
for _, pkg := range info {
// Dump everything in cache just in case we need it later
dp.AurCache[pkg.Name] = pkg
}
return nil
}
// Compute dependency lists used in Package dep searching and ordering.
// Order sensitive TOFIX.
func ComputeCombinedDepList(pkg *aur.Pkg, noDeps, noCheckDeps bool) [][]string {
combinedDepList := make([][]string, 0, 3)
if !noDeps {
combinedDepList = append(combinedDepList, pkg.Depends)
}
combinedDepList = append(combinedDepList, pkg.MakeDepends)
if !noCheckDeps {
combinedDepList = append(combinedDepList, pkg.CheckDepends)
}
return combinedDepList
}
func (dp *Pool) resolveAURPackages(ctx context.Context,
pkgs stringset.StringSet,
explicit, ignoreProviders, noConfirm, provides bool,
rebuild string, splitN int, noDeps, noCheckDeps bool) error {
newPackages := make(stringset.StringSet)
newAURPackages := make(stringset.StringSet)
err := dp.cacheAURPackages(ctx, pkgs, provides, splitN)
if err != nil {
return err
}
if len(pkgs) == 0 {
return nil
}
for name := range pkgs {
_, ok := dp.Aur[name]
if ok {
continue
}
pkg := dp.findSatisfierAurCache(name, ignoreProviders, noConfirm, provides)
if pkg == nil {
continue
}
if explicit {
dp.Explicit.Set(pkg.Name)
}
dp.Aur[pkg.Name] = pkg
combinedDepList := ComputeCombinedDepList(pkg, noDeps, noCheckDeps)
for _, deps := range combinedDepList {
for _, dep := range deps {
newPackages.Set(dep)
}
}
}
for dep := range newPackages {
if dp.hasSatisfier(dep) {
continue
}
isInstalled := dp.AlpmExecutor.LocalSatisfierExists(dep)
hm := settings.HideMenus
settings.HideMenus = isInstalled
repoPkg := dp.AlpmExecutor.SyncSatisfier(dep) // has satisfier in repo: fetch it
settings.HideMenus = hm
if isInstalled && (rebuild != "tree" || repoPkg != nil) {
continue
}
if repoPkg != nil {
dp.ResolveRepoDependency(repoPkg, false)
continue
}
// assume it's in the aur
// ditch the versioning because the RPC can't handle it
newAURPackages.Set(dep)
}
err = dp.resolveAURPackages(ctx, newAURPackages, false, ignoreProviders,
noConfirm, provides, rebuild, splitN, noDeps, noCheckDeps)
return err
}
func (dp *Pool) ResolveRepoDependency(pkg db.IPackage, noDeps bool) {
dp.Repo[pkg.Name()] = pkg
if noDeps {
return
}
for _, dep := range dp.AlpmExecutor.PackageDepends(pkg) {
if dp.hasSatisfier(dep.String()) {
continue
}
// has satisfier installed: skip
if dp.AlpmExecutor.LocalSatisfierExists(dep.String()) {
continue
}
// has satisfier in repo: fetch it
if repoPkg := dp.AlpmExecutor.SyncSatisfier(dep.String()); repoPkg != nil {
dp.ResolveRepoDependency(repoPkg, noDeps)
}
}
}
func GetPool(ctx context.Context, pkgs []string,
warnings *query.AURWarnings,
dbExecutor db.Executor,
aurClient *aur.Client,
mode parser.TargetMode,
ignoreProviders, noConfirm, provides bool,
rebuild string, splitN int, noDeps bool, noCheckDeps bool, assumeInstalled []string) (*Pool, error) {
dp := makePool(dbExecutor, aurClient)
dp.Warnings = warnings
err := dp.ResolveTargets(ctx, pkgs, mode, ignoreProviders, noConfirm, provides,
rebuild, splitN, noDeps, noCheckDeps, assumeInstalled)
return dp, err
}
func (dp *Pool) findSatisfierAur(dep string) *query.Pkg {
for _, pkg := range dp.Aur {
if satisfiesAur(dep, pkg) {
return pkg
}
}
return nil
}
// This is mostly used to promote packages from the cache
// to the Install list
// Provide a pacman style provider menu if there's more than one candidate
// This acts slightly differently from Pacman, It will give
// a menu even if a package with a matching name exists. I believe this
// method is better because most of the time you are choosing between
// foo and foo-git.
// Using Pacman's ways trying to install foo would never give you
// a menu.
// TODO: maybe intermix repo providers in the menu.
func (dp *Pool) findSatisfierAurCache(dep string, ignoreProviders, noConfirm, provides bool) *query.Pkg {
depName, _, _ := splitDep(dep)
seen := make(stringset.StringSet)
providerSlice := makeProviders(depName)
if dp.AlpmExecutor.LocalPackage(depName) != nil {
if pkg, ok := dp.AurCache[dep]; ok && pkgSatisfies(pkg.Name, pkg.Version, dep) {
return pkg
}
}
if ignoreProviders {
for _, pkg := range dp.AurCache {
if pkgSatisfies(pkg.Name, pkg.Version, dep) {
for _, target := range dp.Targets {
if target.Name == pkg.Name {
return pkg
}
}
}
}
}
for _, pkg := range dp.AurCache {
if seen.Get(pkg.Name) {
continue
}
if pkgSatisfies(pkg.Name, pkg.Version, dep) {
providerSlice.Pkgs = append(providerSlice.Pkgs, pkg)
seen.Set(pkg.Name)
continue
}
for _, provide := range pkg.Provides {
if provideSatisfies(provide, dep, pkg.Version) {
providerSlice.Pkgs = append(providerSlice.Pkgs, pkg)
seen.Set(pkg.Name)
continue
}
}
}
if !provides && providerSlice.Len() >= 1 {
return providerSlice.Pkgs[0]
}
if providerSlice.Len() == 1 {
return providerSlice.Pkgs[0]
}
if providerSlice.Len() > 1 {
sort.Sort(providerSlice)
return providerMenu(dep, providerSlice, noConfirm)
}
return nil
}
func (dp *Pool) findSatisfierRepo(dep string) db.IPackage {
for _, pkg := range dp.Repo {
if satisfiesRepo(dep, pkg, dp.AlpmExecutor) {
return pkg
}
}
return nil
}
func (dp *Pool) hasSatisfier(dep string) bool {
return dp.findSatisfierRepo(dep) != nil || dp.findSatisfierAur(dep) != nil
}
func (dp *Pool) hasPackage(name string) bool {
for _, pkg := range dp.Repo {
if pkg.Name() == name {
return true
}
}
for _, pkg := range dp.Aur {
if pkg.Name == name {
return true
}
}
for _, pkg := range dp.Groups {
if pkg == name {
return true
}
}
return false
}
func isInAssumeInstalled(name string, assumeInstalled []string) bool {
for _, pkgAndVersion := range assumeInstalled {
assumeName, _, _ := splitDep(pkgAndVersion)
depName, _, _ := splitDep(name)
if assumeName == depName {
return true
}
}
return false
}
func providerMenu(dep string, providers providers, noConfirm bool) *query.Pkg {
size := providers.Len()
str := text.Bold(gotext.Get("There are %d providers available for %s:\n", size, dep))
size = 1
str += text.SprintOperationInfo(gotext.Get("Repository AUR"), "\n ")
for _, pkg := range providers.Pkgs {
str += fmt.Sprintf("%d) %s ", size, pkg.Name)
size++
}
text.OperationInfoln(str)
for {
fmt.Print(gotext.Get("\nEnter a number (default=1): "))
if noConfirm {
fmt.Println("1")
return providers.Pkgs[0]
}
reader := bufio.NewReader(os.Stdin)
numberBuf, overflow, err := reader.ReadLine()
if err != nil {
fmt.Fprintln(os.Stderr, err)
break
}
if overflow {
text.Errorln(gotext.Get("input too long"))
continue
}
if string(numberBuf) == "" {
return providers.Pkgs[0]
}
num, err := strconv.Atoi(string(numberBuf))
if err != nil {
text.Errorln(gotext.Get("invalid number: %s", string(numberBuf)))
continue
}
if num < 1 || num >= size {
text.Errorln(gotext.Get("invalid value: %d is not between %d and %d", num, 1, size-1))
continue
}
return providers.Pkgs[num-1]
}
return nil
}

View File

@ -1,853 +0,0 @@
package dep
import (
"context"
"fmt"
"strconv"
aurc "github.com/Jguer/aur"
alpm "github.com/Jguer/go-alpm/v2"
gosrc "github.com/Morganamilo/go-srcinfo"
mapset "github.com/deckarep/golang-set/v2"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/db"
"github.com/Jguer/yay/v12/pkg/dep/topo"
"github.com/Jguer/yay/v12/pkg/intrange"
aur "github.com/Jguer/yay/v12/pkg/query"
"github.com/Jguer/yay/v12/pkg/text"
)
type InstallInfo struct {
Source Source
Reason Reason
Version string
LocalVersion string
SrcinfoPath *string
AURBase *string
SyncDBName *string
IsGroup bool
Upgrade bool
Devel bool
}
func (i *InstallInfo) String() string {
return fmt.Sprintf("InstallInfo{Source: %v, Reason: %v}", i.Source, i.Reason)
}
type (
Reason uint
Source int
)
func (r Reason) String() string {
return ReasonNames[r]
}
func (s Source) String() string {
return SourceNames[s]
}
const (
Explicit Reason = iota // 0
Dep // 1
MakeDep // 2
CheckDep // 3
)
var ReasonNames = map[Reason]string{
Explicit: gotext.Get("Explicit"),
Dep: gotext.Get("Dependency"),
MakeDep: gotext.Get("Make Dependency"),
CheckDep: gotext.Get("Check Dependency"),
}
const (
AUR Source = iota
Sync
Local
SrcInfo
Missing
)
var SourceNames = map[Source]string{
AUR: gotext.Get("AUR"),
Sync: gotext.Get("Sync"),
Local: gotext.Get("Local"),
SrcInfo: gotext.Get("SRCINFO"),
Missing: gotext.Get("Missing"),
}
var bgColorMap = map[Source]string{
AUR: "lightblue",
Sync: "lemonchiffon",
Local: "darkolivegreen1",
Missing: "tomato",
}
var colorMap = map[Reason]string{
Explicit: "black",
Dep: "deeppink",
MakeDep: "navyblue",
CheckDep: "forestgreen",
}
type Grapher struct {
logger *text.Logger
providerCache map[string][]aur.Pkg
dbExecutor db.Executor
aurClient aurc.QueryClient
fullGraph bool // If true, the graph will include all dependencies including already installed ones or repo
noConfirm bool // If true, the graph will not prompt for confirmation
noDeps bool // If true, the graph will not include dependencies
noCheckDeps bool // If true, the graph will not include check dependencies
needed bool // If true, the graph will only include packages that are not installed
}
func NewGrapher(dbExecutor db.Executor, aurCache aurc.QueryClient,
fullGraph, noConfirm, noDeps, noCheckDeps, needed bool,
logger *text.Logger,
) *Grapher {
return &Grapher{
dbExecutor: dbExecutor,
aurClient: aurCache,
fullGraph: fullGraph,
noConfirm: noConfirm,
noDeps: noDeps,
noCheckDeps: noCheckDeps,
needed: needed,
providerCache: make(map[string][]aurc.Pkg, 5),
logger: logger,
}
}
func NewGraph() *topo.Graph[string, *InstallInfo] {
return topo.New[string, *InstallInfo]()
}
func (g *Grapher) GraphFromTargets(ctx context.Context,
graph *topo.Graph[string, *InstallInfo], targets []string,
) (*topo.Graph[string, *InstallInfo], error) {
if graph == nil {
graph = NewGraph()
}
aurTargets := make([]string, 0, len(targets))
for _, targetString := range targets {
target := ToTarget(targetString)
switch target.DB {
case "": // unspecified db
if pkg := g.dbExecutor.SyncSatisfier(target.Name); pkg != nil {
g.GraphSyncPkg(ctx, graph, pkg, nil)
continue
}
groupPackages := g.dbExecutor.PackagesFromGroup(target.Name)
if len(groupPackages) > 0 {
dbName := groupPackages[0].DB().Name()
g.GraphSyncGroup(ctx, graph, target.Name, dbName)
continue
}
fallthrough
case "aur":
aurTargets = append(aurTargets, target.Name)
default:
pkg, err := g.dbExecutor.SatisfierFromDB(target.Name, target.DB)
if err != nil {
return nil, err
}
if pkg != nil {
g.GraphSyncPkg(ctx, graph, pkg, nil)
continue
}
groupPackages, err := g.dbExecutor.PackagesFromGroupAndDB(target.Name, target.DB)
if err != nil {
return nil, err
}
if len(groupPackages) > 0 {
g.GraphSyncGroup(ctx, graph, target.Name, target.DB)
continue
}
g.logger.Errorln(gotext.Get("No package found for"), " ", target)
}
}
var errA error
graph, errA = g.GraphFromAUR(ctx, graph, aurTargets)
if errA != nil {
return nil, errA
}
return graph, nil
}
func (g *Grapher) pickSrcInfoPkgs(pkgs []*aurc.Pkg) ([]*aurc.Pkg, error) {
final := make([]*aurc.Pkg, 0, len(pkgs))
for i := range pkgs {
g.logger.Println(text.Magenta(strconv.Itoa(i+1)+" ") + text.Bold(pkgs[i].Name) +
" " + text.Cyan(pkgs[i].Version))
g.logger.Println(" " + pkgs[i].Description)
}
g.logger.Infoln(gotext.Get("Packages to exclude") + " (eg: \"1 2 3\", \"1-3\", \"^4\"):")
numberBuf, err := g.logger.GetInput("", g.noConfirm)
if err != nil {
return nil, err
}
include, exclude, _, otherExclude := intrange.ParseNumberMenu(numberBuf)
isInclude := len(exclude) == 0 && otherExclude.Cardinality() == 0
for i := 1; i <= len(pkgs); i++ {
target := i - 1
if isInclude && !include.Get(i) {
final = append(final, pkgs[target])
}
if !isInclude && (exclude.Get(i)) {
final = append(final, pkgs[target])
}
}
return final, nil
}
func (g *Grapher) addAurPkgProvides(pkg *aurc.Pkg, graph *topo.Graph[string, *InstallInfo]) {
for i := range pkg.Provides {
depName, mod, version := splitDep(pkg.Provides[i])
g.logger.Debugln(pkg.String() + " provides: " + depName)
graph.Provides(depName, &alpm.Depend{
Name: depName,
Version: version,
Mod: aurDepModToAlpmDep(mod),
}, pkg.Name)
}
}
func (g *Grapher) GraphFromSrcInfos(ctx context.Context, graph *topo.Graph[string, *InstallInfo],
srcInfos map[string]*gosrc.Srcinfo,
) (*topo.Graph[string, *InstallInfo], error) {
if graph == nil {
graph = NewGraph()
}
aurPkgsAdded := []*aurc.Pkg{}
for pkgBuildDir, pkgbuild := range srcInfos {
pkgBuildDir := pkgBuildDir
aurPkgs, err := makeAURPKGFromSrcinfo(g.dbExecutor, pkgbuild)
if err != nil {
return nil, err
}
if len(aurPkgs) > 1 {
var errPick error
aurPkgs, errPick = g.pickSrcInfoPkgs(aurPkgs)
if errPick != nil {
return nil, errPick
}
}
for _, pkg := range aurPkgs {
pkg := pkg
reason := Explicit
if pkg := g.dbExecutor.LocalPackage(pkg.Name); pkg != nil {
reason = Reason(pkg.Reason())
}
graph.AddNode(pkg.Name)
g.addAurPkgProvides(pkg, graph)
g.ValidateAndSetNodeInfo(graph, pkg.Name, &topo.NodeInfo[*InstallInfo]{
Color: colorMap[reason],
Background: bgColorMap[AUR],
Value: &InstallInfo{
Source: SrcInfo,
Reason: reason,
SrcinfoPath: &pkgBuildDir,
AURBase: &pkg.PackageBase,
Version: pkg.Version,
},
})
}
aurPkgsAdded = append(aurPkgsAdded, aurPkgs...)
}
g.AddDepsForPkgs(ctx, aurPkgsAdded, graph)
return graph, nil
}
func (g *Grapher) AddDepsForPkgs(ctx context.Context, pkgs []*aur.Pkg, graph *topo.Graph[string, *InstallInfo]) {
for _, pkg := range pkgs {
g.addDepNodes(ctx, pkg, graph)
}
}
func (g *Grapher) addDepNodes(ctx context.Context, pkg *aur.Pkg, graph *topo.Graph[string, *InstallInfo]) {
if len(pkg.MakeDepends) > 0 {
g.addNodes(ctx, graph, pkg.Name, pkg.MakeDepends, MakeDep)
}
if !g.noDeps && len(pkg.Depends) > 0 {
g.addNodes(ctx, graph, pkg.Name, pkg.Depends, Dep)
}
if !g.noCheckDeps && !g.noDeps && len(pkg.CheckDepends) > 0 {
g.addNodes(ctx, graph, pkg.Name, pkg.CheckDepends, CheckDep)
}
}
func (g *Grapher) GraphSyncPkg(ctx context.Context,
graph *topo.Graph[string, *InstallInfo],
pkg alpm.IPackage, upgradeInfo *db.SyncUpgrade,
) *topo.Graph[string, *InstallInfo] {
if graph == nil {
graph = NewGraph()
}
graph.AddNode(pkg.Name())
_ = pkg.Provides().ForEach(func(p *alpm.Depend) error {
g.logger.Debugln(pkg.Name() + " provides: " + p.String())
graph.Provides(p.Name, p, pkg.Name())
return nil
})
dbName := pkg.DB().Name()
info := &InstallInfo{
Source: Sync,
Reason: Explicit,
Version: pkg.Version(),
SyncDBName: &dbName,
}
if upgradeInfo == nil {
if localPkg := g.dbExecutor.LocalPackage(pkg.Name()); localPkg != nil {
info.Reason = Reason(localPkg.Reason())
}
} else {
info.Upgrade = true
info.Reason = Reason(upgradeInfo.Reason)
info.LocalVersion = upgradeInfo.LocalVersion
}
g.ValidateAndSetNodeInfo(graph, pkg.Name(), &topo.NodeInfo[*InstallInfo]{
Color: colorMap[info.Reason],
Background: bgColorMap[info.Source],
Value: info,
})
return graph
}
func (g *Grapher) GraphSyncGroup(ctx context.Context,
graph *topo.Graph[string, *InstallInfo],
groupName, dbName string,
) *topo.Graph[string, *InstallInfo] {
if graph == nil {
graph = NewGraph()
}
graph.AddNode(groupName)
g.ValidateAndSetNodeInfo(graph, groupName, &topo.NodeInfo[*InstallInfo]{
Color: colorMap[Explicit],
Background: bgColorMap[Sync],
Value: &InstallInfo{
Source: Sync,
Reason: Explicit,
Version: "",
SyncDBName: &dbName,
IsGroup: true,
},
})
return graph
}
func (g *Grapher) GraphAURTarget(ctx context.Context,
graph *topo.Graph[string, *InstallInfo],
pkg *aurc.Pkg, instalInfo *InstallInfo,
) *topo.Graph[string, *InstallInfo] {
if graph == nil {
graph = NewGraph()
}
graph.AddNode(pkg.Name)
g.addAurPkgProvides(pkg, graph)
g.ValidateAndSetNodeInfo(graph, pkg.Name, &topo.NodeInfo[*InstallInfo]{
Color: colorMap[instalInfo.Reason],
Background: bgColorMap[AUR],
Value: instalInfo,
})
return graph
}
func (g *Grapher) GraphFromAUR(ctx context.Context,
graph *topo.Graph[string, *InstallInfo],
targets []string,
) (*topo.Graph[string, *InstallInfo], error) {
if graph == nil {
graph = NewGraph()
}
if len(targets) == 0 {
return graph, nil
}
aurPkgs, errCache := g.aurClient.Get(ctx, &aurc.Query{By: aurc.Name, Needles: targets})
if errCache != nil {
g.logger.Errorln(errCache)
}
for i := range aurPkgs {
pkg := &aurPkgs[i]
if _, ok := g.providerCache[pkg.Name]; !ok {
g.providerCache[pkg.Name] = []aurc.Pkg{*pkg}
}
}
aurPkgsAdded := []*aurc.Pkg{}
for _, target := range targets {
if cachedProvidePkg, ok := g.providerCache[target]; ok {
aurPkgs = cachedProvidePkg
} else {
var errA error
aurPkgs, errA = g.aurClient.Get(ctx, &aurc.Query{By: aurc.Provides, Needles: []string{target}, Contains: true})
if errA != nil {
g.logger.Errorln(gotext.Get("Failed to find AUR package for"), " ", target, ":", errA)
}
}
if len(aurPkgs) == 0 {
g.logger.Errorln(gotext.Get("No AUR package found for"), " ", target)
continue
}
aurPkg := &aurPkgs[0]
if len(aurPkgs) > 1 {
chosen := g.provideMenu(target, aurPkgs)
aurPkg = chosen
g.providerCache[target] = []aurc.Pkg{*aurPkg}
}
reason := Explicit
if pkg := g.dbExecutor.LocalPackage(aurPkg.Name); pkg != nil {
reason = Reason(pkg.Reason())
if g.needed {
if db.VerCmp(pkg.Version(), aurPkg.Version) >= 0 {
g.logger.Warnln(gotext.Get("%s is up to date -- skipping", text.Cyan(pkg.Name()+"-"+pkg.Version())))
continue
}
}
}
graph = g.GraphAURTarget(ctx, graph, aurPkg, &InstallInfo{
AURBase: &aurPkg.PackageBase,
Reason: reason,
Source: AUR,
Version: aurPkg.Version,
})
aurPkgsAdded = append(aurPkgsAdded, aurPkg)
}
g.AddDepsForPkgs(ctx, aurPkgsAdded, graph)
return graph, nil
}
// Removes found deps from the deps mapset and returns the found deps.
func (g *Grapher) findDepsFromAUR(ctx context.Context,
deps mapset.Set[string],
) []aurc.Pkg {
pkgsToAdd := make([]aurc.Pkg, 0, deps.Cardinality())
if deps.Cardinality() == 0 {
return []aurc.Pkg{}
}
missingNeedles := make([]string, 0, deps.Cardinality())
for _, depString := range deps.ToSlice() {
if _, ok := g.providerCache[depString]; !ok {
depName, _, _ := splitDep(depString)
missingNeedles = append(missingNeedles, depName)
}
}
if len(missingNeedles) != 0 {
g.logger.Debugln("deps to find", missingNeedles)
// provider search is more demanding than a simple search
// try to find name match if possible and then try to find provides.
aurPkgs, errCache := g.aurClient.Get(ctx, &aurc.Query{
By: aurc.Name, Needles: missingNeedles, Contains: false,
})
if errCache != nil {
g.logger.Errorln(errCache)
}
for i := range aurPkgs {
pkg := &aurPkgs[i]
if deps.Contains(pkg.Name) {
g.providerCache[pkg.Name] = append(g.providerCache[pkg.Name], *pkg)
}
for _, val := range pkg.Provides {
if val == pkg.Name {
continue
}
if deps.Contains(val) {
g.providerCache[val] = append(g.providerCache[val], *pkg)
}
}
}
}
for _, depString := range deps.ToSlice() {
var aurPkgs []aurc.Pkg
depName, _, _ := splitDep(depString)
if cachedProvidePkg, ok := g.providerCache[depString]; ok {
aurPkgs = cachedProvidePkg
} else {
var errA error
aurPkgs, errA = g.aurClient.Get(ctx, &aurc.Query{By: aurc.Provides, Needles: []string{depName}, Contains: true})
if errA != nil {
g.logger.Errorln(gotext.Get("Failed to find AUR package for"), depString, ":", errA)
}
}
// remove packages that don't satisfy the dependency
satisfyingPkgs := make([]aurc.Pkg, 0, len(aurPkgs))
for i := range aurPkgs {
if satisfiesAur(depString, &aurPkgs[i]) {
satisfyingPkgs = append(satisfyingPkgs, aurPkgs[i])
}
}
aurPkgs = satisfyingPkgs
if len(aurPkgs) == 0 {
g.logger.Errorln(gotext.Get("No AUR package found for"), " ", depString)
continue
}
pkg := aurPkgs[0]
if len(aurPkgs) > 1 {
chosen := g.provideMenu(depString, aurPkgs)
pkg = *chosen
}
g.providerCache[depString] = []aurc.Pkg{pkg}
deps.Remove(depString)
pkgsToAdd = append(pkgsToAdd, pkg)
}
return pkgsToAdd
}
func (g *Grapher) ValidateAndSetNodeInfo(graph *topo.Graph[string, *InstallInfo],
node string, nodeInfo *topo.NodeInfo[*InstallInfo],
) {
info := graph.GetNodeInfo(node)
if info != nil && info.Value != nil {
if info.Value.Reason < nodeInfo.Value.Reason {
return // refuse to downgrade reason
}
if info.Value.Upgrade {
return // refuse to overwrite an upgrade
}
}
graph.SetNodeInfo(node, nodeInfo)
}
func (g *Grapher) addNodes(
ctx context.Context,
graph *topo.Graph[string, *InstallInfo],
parentPkgName string,
deps []string,
depType Reason,
) {
targetsToFind := mapset.NewThreadUnsafeSet(deps...)
// Check if in graph already
for _, depString := range targetsToFind.ToSlice() {
depName, _, _ := splitDep(depString)
if !graph.Exists(depName) && !graph.ProvidesExists(depName) {
continue
}
if graph.Exists(depName) {
if err := graph.DependOn(depName, parentPkgName); err != nil {
g.logger.Warnln(depString, parentPkgName, err)
}
targetsToFind.Remove(depString)
}
if p := graph.GetProviderNode(depName); p != nil {
if provideSatisfies(p.String(), depString, p.Version) {
if err := graph.DependOn(p.Provider, parentPkgName); err != nil {
g.logger.Warnln(p.Provider, parentPkgName, err)
}
targetsToFind.Remove(depString)
}
}
}
// Check installed
for _, depString := range targetsToFind.ToSlice() {
depName, _, _ := splitDep(depString)
if !g.dbExecutor.LocalSatisfierExists(depString) {
continue
}
if g.fullGraph {
g.ValidateAndSetNodeInfo(
graph,
depName,
&topo.NodeInfo[*InstallInfo]{Color: colorMap[depType], Background: bgColorMap[Local]})
if err := graph.DependOn(depName, parentPkgName); err != nil {
g.logger.Warnln(depName, parentPkgName, err)
}
}
targetsToFind.Remove(depString)
}
// Check Sync
for _, depString := range targetsToFind.ToSlice() {
alpmPkg := g.dbExecutor.SyncSatisfier(depString)
if alpmPkg == nil {
continue
}
if err := graph.DependOn(alpmPkg.Name(), parentPkgName); err != nil {
g.logger.Warnln("repo dep warn:", depString, parentPkgName, err)
}
dbName := alpmPkg.DB().Name()
g.ValidateAndSetNodeInfo(
graph,
alpmPkg.Name(),
&topo.NodeInfo[*InstallInfo]{
Color: colorMap[depType],
Background: bgColorMap[Sync],
Value: &InstallInfo{
Source: Sync,
Reason: depType,
Version: alpmPkg.Version(),
SyncDBName: &dbName,
},
})
if newDeps := alpmPkg.Depends().Slice(); len(newDeps) != 0 && g.fullGraph {
newDepsSlice := make([]string, 0, len(newDeps))
for _, newDep := range newDeps {
newDepsSlice = append(newDepsSlice, newDep.Name)
}
g.addNodes(ctx, graph, alpmPkg.Name(), newDepsSlice, Dep)
}
targetsToFind.Remove(depString)
}
// Check AUR
pkgsToAdd := g.findDepsFromAUR(ctx, targetsToFind)
for i := range pkgsToAdd {
aurPkg := &pkgsToAdd[i]
if err := graph.DependOn(aurPkg.Name, parentPkgName); err != nil {
g.logger.Warnln("aur dep warn:", aurPkg.Name, parentPkgName, err)
}
graph.SetNodeInfo(
aurPkg.Name,
&topo.NodeInfo[*InstallInfo]{
Color: colorMap[depType],
Background: bgColorMap[AUR],
Value: &InstallInfo{
Source: AUR,
Reason: depType,
AURBase: &aurPkg.PackageBase,
Version: aurPkg.Version,
},
})
g.addDepNodes(ctx, aurPkg, graph)
}
// Add missing to graph
for _, depString := range targetsToFind.ToSlice() {
depName, mod, ver := splitDep(depString)
// no dep found. add as missing
if err := graph.DependOn(depName, parentPkgName); err != nil {
g.logger.Warnln("missing dep warn:", depString, parentPkgName, err)
}
graph.SetNodeInfo(depName, &topo.NodeInfo[*InstallInfo]{
Color: colorMap[depType],
Background: bgColorMap[Missing],
Value: &InstallInfo{
Source: Missing,
Reason: depType,
Version: fmt.Sprintf("%s%s", mod, ver),
},
})
}
}
func (g *Grapher) provideMenu(dep string, options []aur.Pkg) *aur.Pkg {
size := len(options)
if size == 1 {
return &options[0]
}
str := text.Bold(gotext.Get("There are %[1]d providers available for %[2]s:", size, dep))
str += "\n"
size = 1
str += g.logger.SprintOperationInfo(gotext.Get("Repository AUR"), "\n ")
for i := range options {
str += fmt.Sprintf("%d) %s ", size, options[i].Name)
size++
}
g.logger.OperationInfoln(str)
for {
g.logger.Println(gotext.Get("\nEnter a number (default=1): "))
if g.noConfirm {
g.logger.Println("1")
return &options[0]
}
numberBuf, err := g.logger.GetInput("", false)
if err != nil {
g.logger.Errorln(err)
return &options[0]
}
if numberBuf == "" {
return &options[0]
}
num, err := strconv.Atoi(numberBuf)
if err != nil {
g.logger.Errorln(gotext.Get("invalid number: %s", numberBuf))
continue
}
if num < 1 || num >= size {
g.logger.Errorln(gotext.Get("invalid value: %d is not between %d and %d",
num, 1, size-1))
continue
}
return &options[num-1]
}
}
func makeAURPKGFromSrcinfo(dbExecutor db.Executor, srcInfo *gosrc.Srcinfo) ([]*aur.Pkg, error) {
pkgs := make([]*aur.Pkg, 0, 1)
alpmArch, err := dbExecutor.AlpmArchitectures()
if err != nil {
return nil, err
}
alpmArch = append(alpmArch, "") // srcinfo assumes no value as ""
getDesc := func(pkg *gosrc.Package) string {
if pkg.Pkgdesc != "" {
return pkg.Pkgdesc
}
return srcInfo.Pkgdesc
}
for i := range srcInfo.Packages {
pkg := &srcInfo.Packages[i]
pkgs = append(pkgs, &aur.Pkg{
ID: 0,
Name: pkg.Pkgname,
PackageBaseID: 0,
PackageBase: srcInfo.Pkgbase,
Version: srcInfo.Version(),
Description: getDesc(pkg),
URL: pkg.URL,
Depends: append(archStringToString(alpmArch, pkg.Depends),
archStringToString(alpmArch, srcInfo.Depends)...),
MakeDepends: archStringToString(alpmArch, srcInfo.MakeDepends),
CheckDepends: archStringToString(alpmArch, srcInfo.CheckDepends),
Conflicts: append(archStringToString(alpmArch, pkg.Conflicts),
archStringToString(alpmArch, srcInfo.Conflicts)...),
Provides: append(archStringToString(alpmArch, pkg.Provides),
archStringToString(alpmArch, srcInfo.Provides)...),
Replaces: append(archStringToString(alpmArch, pkg.Replaces),
archStringToString(alpmArch, srcInfo.Replaces)...),
OptDepends: append(archStringToString(alpmArch, pkg.OptDepends),
archStringToString(alpmArch, srcInfo.OptDepends)...),
Groups: pkg.Groups,
License: pkg.License,
Keywords: []string{},
})
}
return pkgs, nil
}
func archStringToString(alpmArches []string, archString []gosrc.ArchString) []string {
pkgs := make([]string, 0, len(archString))
for _, arch := range archString {
if db.ArchIsSupported(alpmArches, arch.Arch) {
pkgs = append(pkgs, arch.Value)
}
}
return pkgs
}
func aurDepModToAlpmDep(mod string) alpm.DepMod {
switch mod {
case "=":
return alpm.DepModEq
case ">=":
return alpm.DepModGE
case "<=":
return alpm.DepModLE
case ">":
return alpm.DepModGT
case "<":
return alpm.DepModLT
}
return alpm.DepModAny
}

View File

@ -1,811 +0,0 @@
//go:build !integration
// +build !integration
package dep
import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"testing"
aurc "github.com/Jguer/aur"
alpm "github.com/Jguer/go-alpm/v2"
"github.com/stretchr/testify/require"
"github.com/Jguer/yay/v12/pkg/db"
"github.com/Jguer/yay/v12/pkg/db/mock"
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
aur "github.com/Jguer/yay/v12/pkg/query"
"github.com/Jguer/yay/v12/pkg/text"
)
func ptrString(s string) *string {
return &s
}
func getFromFile(t *testing.T, filePath string) mockaur.GetFunc {
f, err := os.Open(filePath)
require.NoError(t, err)
fBytes, err := io.ReadAll(f)
require.NoError(t, err)
pkgs := []aur.Pkg{}
err = json.Unmarshal(fBytes, &pkgs)
require.NoError(t, err)
return func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
return pkgs, nil
}
}
func TestGrapher_GraphFromTargets_jellyfin(t *testing.T) {
mockDB := &mock.DBExecutor{
SyncPackageFn: func(string) mock.IPackage { return nil },
SyncSatisfierFn: func(s string) mock.IPackage {
switch s {
case "jellyfin":
return nil
case "dotnet-runtime-6.0":
return &mock.Package{
PName: "dotnet-runtime-6.0",
PBase: "dotnet-runtime-6.0",
PVersion: "6.0.100-1",
PDB: mock.NewDB("community"),
}
case "dotnet-sdk-6.0":
return &mock.Package{
PName: "dotnet-sdk-6.0",
PBase: "dotnet-sdk-6.0",
PVersion: "6.0.100-1",
PDB: mock.NewDB("community"),
}
}
return nil
},
PackagesFromGroupFn: func(string) []mock.IPackage { return nil },
LocalSatisfierExistsFn: func(s string) bool {
switch s {
case "dotnet-sdk-6.0", "dotnet-runtime-6.0", "jellyfin-server=10.8.8", "jellyfin-web=10.8.8":
return false
}
return true
},
LocalPackageFn: func(string) mock.IPackage { return nil },
}
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
if query.Needles[0] == "jellyfin" {
jfinFn := getFromFile(t, "testdata/jellyfin.json")
return jfinFn(ctx, query)
}
if query.Needles[0] == "jellyfin-web" {
jfinWebFn := getFromFile(t, "testdata/jellyfin-web.json")
return jfinWebFn(ctx, query)
}
if query.Needles[0] == "jellyfin-server" {
jfinServerFn := getFromFile(t, "testdata/jellyfin-server.json")
return jfinServerFn(ctx, query)
}
panic(fmt.Sprintf("implement me %v", query.Needles))
}}
type fields struct {
dbExecutor db.Executor
aurCache aurc.QueryClient
noDeps bool
noCheckDeps bool
}
type args struct {
targets []string
}
tests := []struct {
name string
fields fields
args args
want []map[string]*InstallInfo
wantErr bool
}{
{
name: "noDeps",
fields: fields{
dbExecutor: mockDB,
aurCache: mockAUR,
noDeps: true,
noCheckDeps: false,
},
args: args{
targets: []string{"jellyfin"},
},
want: []map[string]*InstallInfo{
{
"jellyfin": {
Source: AUR,
Reason: Explicit,
Version: "10.8.8-1",
AURBase: ptrString("jellyfin"),
},
},
{
"dotnet-sdk-6.0": {
Source: Sync,
Reason: MakeDep,
Version: "6.0.100-1",
SyncDBName: ptrString("community"),
},
},
},
wantErr: false,
},
{
name: "deps",
fields: fields{
dbExecutor: mockDB,
aurCache: mockAUR,
noDeps: false,
noCheckDeps: false,
},
args: args{
targets: []string{"jellyfin"},
},
want: []map[string]*InstallInfo{
{
"jellyfin": {
Source: AUR,
Reason: Explicit,
Version: "10.8.8-1",
AURBase: ptrString("jellyfin"),
},
},
{
"jellyfin-web": {
Source: AUR,
Reason: Dep,
Version: "10.8.8-1",
AURBase: ptrString("jellyfin"),
},
"jellyfin-server": {
Source: AUR,
Reason: Dep,
Version: "10.8.8-1",
AURBase: ptrString("jellyfin"),
},
},
{
"dotnet-sdk-6.0": {
Source: Sync,
Reason: MakeDep,
Version: "6.0.100-1",
SyncDBName: ptrString("community"),
},
"dotnet-runtime-6.0": {
Source: Sync,
Reason: Dep,
Version: "6.0.100-1",
SyncDBName: ptrString("community"),
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewGrapher(tt.fields.dbExecutor,
tt.fields.aurCache, false, true,
tt.fields.noDeps, tt.fields.noCheckDeps, false,
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
got, err := g.GraphFromTargets(context.Background(), nil, tt.args.targets)
require.NoError(t, err)
layers := got.TopoSortedLayerMap(nil)
require.EqualValues(t, tt.want, layers, layers)
})
}
}
func TestGrapher_GraphProvides_androidsdk(t *testing.T) {
mockDB := &mock.DBExecutor{
SyncPackageFn: func(string) mock.IPackage { return nil },
SyncSatisfierFn: func(s string) mock.IPackage {
switch s {
case "android-sdk":
return nil
case "jdk11-openjdk":
return &mock.Package{
PName: "jdk11-openjdk",
PVersion: "11.0.12.u7-1",
PDB: mock.NewDB("community"),
PProvides: mock.DependList{
Depends: []alpm.Depend{
{Name: "java-environment", Version: "11", Mod: alpm.DepModEq},
{Name: "java-environment-openjdk", Version: "11", Mod: alpm.DepModEq},
{Name: "jdk11-openjdk", Version: "11.0.19.u7-1", Mod: alpm.DepModEq},
},
},
}
case "java-environment":
panic("not supposed to be called")
}
panic("implement me " + s)
},
PackagesFromGroupFn: func(string) []mock.IPackage { return nil },
LocalSatisfierExistsFn: func(s string) bool {
switch s {
case "java-environment":
return false
}
switch s {
case "libxtst", "fontconfig", "freetype2", "lib32-gcc-libs", "lib32-glibc", "libx11", "libxext", "libxrender", "zlib", "gcc-libs":
return true
}
panic("implement me " + s)
},
LocalPackageFn: func(string) mock.IPackage { return nil },
}
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
if query.Needles[0] == "android-sdk" {
jfinFn := getFromFile(t, "testdata/android-sdk.json")
return jfinFn(ctx, query)
}
panic(fmt.Sprintf("implement me %v", query.Needles))
}}
type fields struct {
dbExecutor db.Executor
aurCache aurc.QueryClient
noDeps bool
noCheckDeps bool
}
type args struct {
targets []string
}
tests := []struct {
name string
fields fields
args args
want []map[string]*InstallInfo
wantErr bool
}{
{
name: "explicit dep",
fields: fields{
dbExecutor: mockDB,
aurCache: mockAUR,
noDeps: false,
noCheckDeps: false,
},
args: args{
targets: []string{"android-sdk", "jdk11-openjdk"},
},
want: []map[string]*InstallInfo{
{
"android-sdk": {
Source: AUR,
Reason: Explicit,
Version: "26.1.1-2",
AURBase: ptrString("android-sdk"),
},
},
{
"jdk11-openjdk": {
Source: Sync,
Reason: Explicit,
Version: "11.0.12.u7-1",
SyncDBName: ptrString("community"),
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewGrapher(tt.fields.dbExecutor,
tt.fields.aurCache, false, true,
tt.fields.noDeps, tt.fields.noCheckDeps, false,
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
got, err := g.GraphFromTargets(context.Background(), nil, tt.args.targets)
require.NoError(t, err)
layers := got.TopoSortedLayerMap(nil)
require.EqualValues(t, tt.want, layers, layers)
})
}
}
func TestGrapher_GraphFromAUR_Deps_ceph_bin(t *testing.T) {
mockDB := &mock.DBExecutor{
SyncPackageFn: func(string) mock.IPackage { return nil },
PackagesFromGroupFn: func(string) []mock.IPackage { return []mock.IPackage{} },
SyncSatisfierFn: func(s string) mock.IPackage {
switch s {
case "ceph-bin", "ceph-libs-bin":
return nil
case "ceph", "ceph-libs", "ceph-libs=17.2.6-2":
return nil
}
panic("implement me " + s)
},
LocalSatisfierExistsFn: func(s string) bool {
switch s {
case "ceph-libs", "ceph-libs=17.2.6-2":
return false
case "dep1", "dep2", "dep3", "makedep1", "makedep2", "checkdep1":
return true
}
panic("implement me " + s)
},
LocalPackageFn: func(string) mock.IPackage { return nil },
}
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
mockPkgs := map[string]aur.Pkg{
"ceph-bin": {
Name: "ceph-bin",
PackageBase: "ceph-bin",
Version: "17.2.6-2",
Depends: []string{"ceph-libs=17.2.6-2", "dep1"},
Provides: []string{"ceph=17.2.6-2"},
},
"ceph-libs-bin": {
Name: "ceph-libs-bin",
PackageBase: "ceph-bin",
Version: "17.2.6-2",
Depends: []string{"dep1", "dep2"},
Provides: []string{"ceph-libs=17.2.6-2"},
},
"ceph": {
Name: "ceph",
PackageBase: "ceph",
Version: "17.2.6-2",
Depends: []string{"ceph-libs=17.2.6-2", "dep1"},
MakeDepends: []string{"makedep1"},
CheckDepends: []string{"checkdep1"},
Provides: []string{"ceph=17.2.6-2"},
},
"ceph-libs": {
Name: "ceph-libs",
PackageBase: "ceph",
Version: "17.2.6-2",
Depends: []string{"dep1", "dep2", "dep3"},
MakeDepends: []string{"makedep1", "makedep2"},
CheckDepends: []string{"checkdep1"},
Provides: []string{"ceph-libs=17.2.6-2"},
},
}
pkgs := []aur.Pkg{}
for _, needle := range query.Needles {
if pkg, ok := mockPkgs[needle]; ok {
pkgs = append(pkgs, pkg)
} else {
panic(fmt.Sprintf("implement me %v", needle))
}
}
return pkgs, nil
}}
installInfos := map[string]*InstallInfo{
"ceph-bin exp": {
Source: AUR,
Reason: Explicit,
Version: "17.2.6-2",
AURBase: ptrString("ceph-bin"),
},
"ceph-libs-bin exp": {
Source: AUR,
Reason: Explicit,
Version: "17.2.6-2",
AURBase: ptrString("ceph-bin"),
},
"ceph exp": {
Source: AUR,
Reason: Explicit,
Version: "17.2.6-2",
AURBase: ptrString("ceph"),
},
"ceph-libs exp": {
Source: AUR,
Reason: Explicit,
Version: "17.2.6-2",
AURBase: ptrString("ceph"),
},
"ceph-libs dep": {
Source: AUR,
Reason: Dep,
Version: "17.2.6-2",
AURBase: ptrString("ceph"),
},
}
tests := []struct {
name string
targets []string
wantLayers []map[string]*InstallInfo
wantErr bool
}{
{
name: "ceph-bin ceph-libs-bin",
targets: []string{"ceph-bin", "ceph-libs-bin"},
wantLayers: []map[string]*InstallInfo{
{"ceph-bin": installInfos["ceph-bin exp"]},
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
},
wantErr: false,
},
{
name: "ceph-libs-bin ceph-bin (reversed order)",
targets: []string{"ceph-libs-bin", "ceph-bin"},
wantLayers: []map[string]*InstallInfo{
{"ceph-bin": installInfos["ceph-bin exp"]},
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
},
wantErr: false,
},
{
name: "ceph",
targets: []string{"ceph"},
wantLayers: []map[string]*InstallInfo{
{"ceph": installInfos["ceph exp"]},
{"ceph-libs": installInfos["ceph-libs dep"]},
},
wantErr: false,
},
{
name: "ceph-bin",
targets: []string{"ceph-bin"},
wantLayers: []map[string]*InstallInfo{
{"ceph-bin": installInfos["ceph-bin exp"]},
{"ceph-libs": installInfos["ceph-libs dep"]},
},
wantErr: false,
},
{
name: "ceph-bin ceph-libs",
targets: []string{"ceph-bin", "ceph-libs"},
wantLayers: []map[string]*InstallInfo{
{"ceph-bin": installInfos["ceph-bin exp"]},
{"ceph-libs": installInfos["ceph-libs exp"]},
},
wantErr: false,
},
{
name: "ceph-libs ceph-bin (reversed order)",
targets: []string{"ceph-libs", "ceph-bin"},
wantLayers: []map[string]*InstallInfo{
{"ceph-bin": installInfos["ceph-bin exp"]},
{"ceph-libs": installInfos["ceph-libs exp"]},
},
wantErr: false,
},
{
name: "ceph ceph-libs-bin",
targets: []string{"ceph", "ceph-libs-bin"},
wantLayers: []map[string]*InstallInfo{
{"ceph": installInfos["ceph exp"]},
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
},
wantErr: false,
},
{
name: "ceph-libs-bin ceph (reversed order)",
targets: []string{"ceph-libs-bin", "ceph"},
wantLayers: []map[string]*InstallInfo{
{"ceph": installInfos["ceph exp"]},
{"ceph-libs-bin": installInfos["ceph-libs-bin exp"]},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewGrapher(mockDB, mockAUR,
false, true, false, false, false,
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
got, err := g.GraphFromTargets(context.Background(), nil, tt.targets)
require.NoError(t, err)
layers := got.TopoSortedLayerMap(nil)
require.EqualValues(t, tt.wantLayers, layers, layers)
})
}
}
func TestGrapher_GraphFromAUR_Deps_gourou(t *testing.T) {
mockDB := &mock.DBExecutor{
SyncPackageFn: func(string) mock.IPackage { return nil },
PackagesFromGroupFn: func(string) []mock.IPackage { return []mock.IPackage{} },
SyncSatisfierFn: func(s string) mock.IPackage {
switch s {
case "gourou", "libzip-git":
return nil
case "libzip":
return &mock.Package{
PName: "libzip",
PVersion: "1.9.2-1",
PDB: mock.NewDB("extra"),
}
}
panic("implement me " + s)
},
LocalSatisfierExistsFn: func(s string) bool {
switch s {
case "gourou", "libzip", "libzip-git":
return false
case "dep1", "dep2":
return true
}
panic("implement me " + s)
},
LocalPackageFn: func(string) mock.IPackage { return nil },
}
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
mockPkgs := map[string]aur.Pkg{
"gourou": {
Name: "gourou",
PackageBase: "gourou",
Version: "0.8.1",
Depends: []string{"libzip"},
},
"libzip-git": {
Name: "libzip-git",
PackageBase: "libzip-git",
Version: "1.9.2.r159.gb3ac716c-1",
Depends: []string{"dep1", "dep2"},
Provides: []string{"libzip=1.9.2.r159.gb3ac716c"},
},
}
pkgs := []aur.Pkg{}
for _, needle := range query.Needles {
if pkg, ok := mockPkgs[needle]; ok {
pkgs = append(pkgs, pkg)
} else {
panic(fmt.Sprintf("implement me %v", needle))
}
}
return pkgs, nil
}}
installInfos := map[string]*InstallInfo{
"gourou exp": {
Source: AUR,
Reason: Explicit,
Version: "0.8.1",
AURBase: ptrString("gourou"),
},
"libzip dep": {
Source: Sync,
Reason: Dep,
Version: "1.9.2-1",
SyncDBName: ptrString("extra"),
},
"libzip exp": {
Source: Sync,
Reason: Explicit,
Version: "1.9.2-1",
SyncDBName: ptrString("extra"),
},
"libzip-git exp": {
Source: AUR,
Reason: Explicit,
Version: "1.9.2.r159.gb3ac716c-1",
AURBase: ptrString("libzip-git"),
},
}
tests := []struct {
name string
targets []string
wantLayers []map[string]*InstallInfo
wantErr bool
}{
{
name: "gourou",
targets: []string{"gourou"},
wantLayers: []map[string]*InstallInfo{
{"gourou": installInfos["gourou exp"]},
{"libzip": installInfos["libzip dep"]},
},
wantErr: false,
},
{
name: "gourou libzip",
targets: []string{"gourou", "libzip"},
wantLayers: []map[string]*InstallInfo{
{"gourou": installInfos["gourou exp"]},
{"libzip": installInfos["libzip exp"]},
},
wantErr: false,
},
{
name: "gourou libzip-git",
targets: []string{"gourou", "libzip-git"},
wantLayers: []map[string]*InstallInfo{
{"gourou": installInfos["gourou exp"]},
{"libzip-git": installInfos["libzip-git exp"]},
},
wantErr: false,
},
{
name: "libzip-git gourou (reversed order)",
targets: []string{"libzip-git", "gourou"},
wantLayers: []map[string]*InstallInfo{
{"gourou": installInfos["gourou exp"]},
{"libzip-git": installInfos["libzip-git exp"]},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewGrapher(mockDB, mockAUR,
false, true, false, false, false,
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
got, err := g.GraphFromTargets(context.Background(), nil, tt.targets)
require.NoError(t, err)
layers := got.TopoSortedLayerMap(nil)
require.EqualValues(t, tt.wantLayers, layers, layers)
})
}
}
func TestGrapher_GraphFromTargets_ReinstalledDeps(t *testing.T) {
mockDB := &mock.DBExecutor{
SyncPackageFn: func(string) mock.IPackage { return nil },
PackagesFromGroupFn: func(string) []mock.IPackage { return []mock.IPackage{} },
SyncSatisfierFn: func(s string) mock.IPackage {
switch s {
case "gourou":
return nil
case "libzip":
return &mock.Package{
PName: "libzip",
PVersion: "1.9.2-1",
PDB: mock.NewDB("extra"),
}
}
panic("implement me " + s)
},
SatisfierFromDBFn: func(s, s2 string) (mock.IPackage, error) {
if s2 == "extra" {
switch s {
case "libzip":
return &mock.Package{
PName: "libzip",
PVersion: "1.9.2-1",
PDB: mock.NewDB("extra"),
}, nil
}
}
panic("implement me " + s2 + "/" + s)
},
LocalSatisfierExistsFn: func(s string) bool {
switch s {
case "gourou", "libzip":
return true
}
panic("implement me " + s)
},
LocalPackageFn: func(s string) mock.IPackage {
switch s {
case "libzip":
return &mock.Package{
PName: "libzip",
PVersion: "1.9.2-1",
PDB: mock.NewDB("extra"),
PReason: alpm.PkgReasonDepend,
}
case "gourou":
return &mock.Package{
PName: "gourou",
PVersion: "0.8.1",
PDB: mock.NewDB("aur"),
PReason: alpm.PkgReasonDepend,
}
}
return nil
},
}
mockAUR := &mockaur.MockAUR{GetFn: func(ctx context.Context, query *aurc.Query) ([]aur.Pkg, error) {
mockPkgs := map[string]aur.Pkg{
"gourou": {
Name: "gourou",
PackageBase: "gourou",
Version: "0.8.1",
Depends: []string{"libzip"},
},
}
pkgs := []aur.Pkg{}
for _, needle := range query.Needles {
if pkg, ok := mockPkgs[needle]; ok {
pkgs = append(pkgs, pkg)
} else {
panic(fmt.Sprintf("implement me %v", needle))
}
}
return pkgs, nil
}}
installInfos := map[string]*InstallInfo{
"gourou dep": {
Source: AUR,
Reason: Dep,
Version: "0.8.1",
AURBase: ptrString("gourou"),
},
"libzip dep": {
Source: Sync,
Reason: Dep,
Version: "1.9.2-1",
SyncDBName: ptrString("extra"),
},
}
tests := []struct {
name string
targets []string
wantLayers []map[string]*InstallInfo
wantErr bool
}{
{
name: "gourou libzip",
targets: []string{"gourou", "libzip"},
wantLayers: []map[string]*InstallInfo{
{"gourou": installInfos["gourou dep"]},
{"libzip": installInfos["libzip dep"]},
},
wantErr: false,
},
{
name: "aur/gourou extra/libzip",
targets: []string{"aur/gourou", "extra/libzip"},
wantLayers: []map[string]*InstallInfo{
{"gourou": installInfos["gourou dep"]},
{"libzip": installInfos["libzip dep"]},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewGrapher(mockDB, mockAUR,
false, true, false, false, false,
text.NewLogger(io.Discard, io.Discard, &os.File{}, true, "test"))
got, err := g.GraphFromTargets(context.Background(), nil, tt.targets)
require.NoError(t, err)
layers := got.TopoSortedLayerMap(nil)
require.EqualValues(t, tt.wantLayers, layers, layers)
})
}
}

View File

@ -1,21 +0,0 @@
package mock
import (
"context"
"github.com/Jguer/aur"
)
type GetFunc func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error)
type MockAUR struct {
GetFn GetFunc
}
func (m *MockAUR) Get(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
if m.GetFn != nil {
return m.GetFn(ctx, query)
}
panic("implement me")
}

View File

@ -1,34 +0,0 @@
package dep
import "github.com/Jguer/yay/v12/pkg/text"
type Target struct {
DB string
Name string
Mod string
Version string
}
func ToTarget(pkg string) Target {
dbName, depString := text.SplitDBFromName(pkg)
name, mod, depVersion := splitDep(depString)
return Target{
DB: dbName,
Name: name,
Mod: mod,
Version: depVersion,
}
}
func (t Target) DepString() string {
return t.Name + t.Mod + t.Version
}
func (t Target) String() string {
if t.DB != "" {
return t.DB + "/" + t.DepString()
}
return t.DepString()
}

View File

@ -1,3 +0,0 @@
[
{"ID":1055234,"Name":"android-sdk","PackageBaseID":13751,"PackageBase":"android-sdk","Version":"26.1.1-2","Description":"Google Android SDK","URL":"https://developer.android.com/studio/releases/sdk-tools.html","NumVotes":1487,"Popularity":0.802316,"OutOfDate":null,"Maintainer":"dreamingincode","Submitter":null,"FirstSubmitted":1194895596,"LastModified":1647982720,"URLPath":"/cgit/aur.git/snapshot/android-sdk.tar.gz","Depends":["java-environment","libxtst","fontconfig","freetype2","lib32-gcc-libs","lib32-glibc","libx11","libxext","libxrender","zlib","gcc-libs"],"OptDepends":["android-emulator","android-sdk-platform-tools","android-udev"],"License":["custom"],"Keywords":["android","development"]}
]

View File

@ -1,3 +0,0 @@
[
{"ID":1176791,"Name":"jellyfin-server","PackageBaseID":138631,"PackageBase":"jellyfin","Version":"10.8.8-1","Description":"Jellyfin server component","URL":"https://github.com/jellyfin/jellyfin","NumVotes":84,"Popularity":1.272964,"OutOfDate":null,"Maintainer":"z3ntu","Submitter":"z3ntu","FirstSubmitted":1547053171,"LastModified":1669830147,"URLPath":"/cgit/aur.git/snapshot/jellyfin-server.tar.gz","Depends":["dotnet-runtime-6.0","aspnet-runtime-6.0","ffmpeg","sqlite"],"MakeDepends":["dotnet-sdk-6.0","nodejs","npm","git"],"License":["GPL2"]}
]

View File

@ -1,3 +0,0 @@
[
{"ID":1176790,"Name":"jellyfin-web","PackageBaseID":138631,"PackageBase":"jellyfin","Version":"10.8.8-1","Description":"Jellyfin web client","URL":"https://github.com/jellyfin/jellyfin","NumVotes":84,"Popularity":1.272964,"OutOfDate":null,"Maintainer":"z3ntu","Submitter":"z3ntu","FirstSubmitted":1547053171,"LastModified":1669830147,"URLPath":"/cgit/aur.git/snapshot/jellyfin-web.tar.gz","MakeDepends":["dotnet-sdk-6.0","nodejs","npm","git"],"License":["GPL2"]}
]

View File

@ -1,3 +0,0 @@
[
{"ID":1176789,"Name":"jellyfin","PackageBaseID":138631,"PackageBase":"jellyfin","Version":"10.8.8-1","Description":"The Free Software Media System","URL":"https://github.com/jellyfin/jellyfin","NumVotes":84,"Popularity":1.272964,"OutOfDate":null,"Maintainer":"z3ntu","Submitter":"z3ntu","FirstSubmitted":1547053171,"LastModified":1669830147,"URLPath":"/cgit/aur.git/snapshot/jellyfin.tar.gz","Depends":["jellyfin-web=10.8.8","jellyfin-server=10.8.8"],"MakeDepends":["dotnet-sdk-6.0","nodejs","npm","git"],"License":["GPL2"]}
]

View File

@ -1,371 +0,0 @@
package topo
import (
"fmt"
"strings"
"github.com/Jguer/go-alpm/v2"
)
type (
NodeSet[T comparable] map[T]bool
ProvidesMap[T comparable] map[T]*DependencyInfo[T]
DepMap[T comparable] map[T]NodeSet[T]
)
func (n NodeSet[T]) Slice() []T {
var slice []T
for node := range n {
slice = append(slice, node)
}
return slice
}
type NodeInfo[V any] struct {
Color string
Background string
Value V
}
type DependencyInfo[T comparable] struct {
Provider T
alpm.Depend
}
type CheckFn[T comparable, V any] func(T, V) error
type Graph[T comparable, V any] struct {
nodes NodeSet[T]
// node info map
nodeInfo map[T]*NodeInfo[V]
// `provides` tracks provides -> node.
provides ProvidesMap[T]
// `dependencies` tracks child -> parents.
dependencies DepMap[T]
// `dependents` tracks parent -> children.
dependents DepMap[T]
}
func New[T comparable, V any]() *Graph[T, V] {
return &Graph[T, V]{
nodes: make(NodeSet[T]),
dependencies: make(DepMap[T]),
dependents: make(DepMap[T]),
nodeInfo: make(map[T]*NodeInfo[V]),
provides: make(ProvidesMap[T]),
}
}
func (g *Graph[T, V]) Len() int {
return len(g.nodes)
}
func (g *Graph[T, V]) Exists(node T) bool {
_, ok := g.nodes[node]
return ok
}
func (g *Graph[T, V]) AddNode(node T) {
g.nodes[node] = true
}
func (g *Graph[T, V]) ProvidesExists(provides T) bool {
_, ok := g.provides[provides]
return ok
}
func (g *Graph[T, V]) GetProviderNode(provides T) *DependencyInfo[T] {
return g.provides[provides]
}
func (g *Graph[T, V]) Provides(provides T, depInfo *alpm.Depend, node T) {
g.provides[provides] = &DependencyInfo[T]{
Provider: node,
Depend: *depInfo,
}
}
func (g *Graph[T, V]) ForEach(f CheckFn[T, V]) error {
for node := range g.nodes {
if err := f(node, g.nodeInfo[node].Value); err != nil {
return err
}
}
return nil
}
func (g *Graph[T, V]) SetNodeInfo(node T, nodeInfo *NodeInfo[V]) {
g.nodeInfo[node] = nodeInfo
}
func (g *Graph[T, V]) GetNodeInfo(node T) *NodeInfo[V] {
return g.nodeInfo[node]
}
func (g *Graph[T, V]) DependOn(child, parent T) error {
if child == parent {
return ErrSelfReferential
}
if g.DependsOn(parent, child) {
return ErrCircular
}
g.AddNode(parent)
g.AddNode(child)
// Add edges.
g.dependents.addNodeToNodeset(parent, child)
g.dependencies.addNodeToNodeset(child, parent)
return nil
}
func (g *Graph[T, V]) String() string {
var sb strings.Builder
sb.WriteString("digraph {\n")
sb.WriteString("compound=true;\n")
sb.WriteString("concentrate=true;\n")
sb.WriteString("node [shape = record, ordering=out];\n")
for node := range g.nodes {
extra := ""
if info, ok := g.nodeInfo[node]; ok {
if info.Background != "" || info.Color != "" {
extra = fmt.Sprintf("[color = %s, style = filled, fillcolor = %s]", info.Color, info.Background)
}
}
sb.WriteString(fmt.Sprintf("\t\"%v\"%s;\n", node, extra))
}
for parent, children := range g.dependencies {
for child := range children {
sb.WriteString(fmt.Sprintf("\t\"%v\" -> \"%v\";\n", parent, child))
}
}
sb.WriteString("}")
return sb.String()
}
func (g *Graph[T, V]) DependsOn(child, parent T) bool {
deps := g.Dependencies(child)
_, ok := deps[parent]
return ok
}
func (g *Graph[T, V]) HasDependent(parent, child T) bool {
deps := g.Dependents(parent)
_, ok := deps[child]
return ok
}
// leavesMap returns a map of leaves with the node as key and the node info value as value.
func (g *Graph[T, V]) leavesMap() map[T]V {
leaves := make(map[T]V, 0)
for node := range g.nodes {
if _, ok := g.dependencies[node]; !ok {
nodeInfo := g.GetNodeInfo(node)
if nodeInfo == nil {
nodeInfo = &NodeInfo[V]{}
}
leaves[node] = nodeInfo.Value
}
}
return leaves
}
// TopoSortedLayerMap returns a slice of all of the graph nodes in topological sort order with their node info.
func (g *Graph[T, V]) TopoSortedLayerMap(checkFn CheckFn[T, V]) []map[T]V {
layers := []map[T]V{}
// Copy the graph
shrinkingGraph := g.clone()
for {
leaves := shrinkingGraph.leavesMap()
if len(leaves) == 0 {
break
}
layers = append(layers, leaves)
for leafNode := range leaves {
if checkFn != nil {
if err := checkFn(leafNode, leaves[leafNode]); err != nil {
return nil
}
}
shrinkingGraph.remove(leafNode)
}
}
return layers
}
// returns if it was the last
func (dm DepMap[T]) removeFromDepmap(key, node T) bool {
if nodes := dm[key]; len(nodes) == 1 {
// The only element in the nodeset must be `node`, so we
// can delete the entry entirely.
delete(dm, key)
return true
} else {
// Otherwise, remove the single node from the nodeset.
delete(nodes, node)
return false
}
}
// Prune removes the node,
// its dependencies if there are no other dependents
// and its dependents
func (g *Graph[T, V]) Prune(node T) []T {
pruned := []T{node}
// Remove edges from things that depend on `node`.
for dependent := range g.dependents[node] {
last := g.dependencies.removeFromDepmap(dependent, node)
if last {
pruned = append(pruned, g.Prune(dependent)...)
}
}
delete(g.dependents, node)
// Remove all edges from node to the things it depends on.
for dependency := range g.dependencies[node] {
last := g.dependents.removeFromDepmap(dependency, node)
if last {
pruned = append(pruned, g.Prune(dependency)...)
}
}
delete(g.dependencies, node)
// Finally, remove the node itself.
delete(g.nodes, node)
return pruned
}
func (g *Graph[T, V]) remove(node T) {
// Remove edges from things that depend on `node`.
for dependent := range g.dependents[node] {
g.dependencies.removeFromDepmap(dependent, node)
}
delete(g.dependents, node)
// Remove all edges from node to the things it depends on.
for dependency := range g.dependencies[node] {
g.dependents.removeFromDepmap(dependency, node)
}
delete(g.dependencies, node)
// Finally, remove the node itself.
delete(g.nodes, node)
}
func (g *Graph[T, V]) Dependencies(child T) NodeSet[T] {
return g.buildTransitive(child, g.ImmediateDependencies)
}
func (g *Graph[T, V]) ImmediateDependencies(node T) NodeSet[T] {
return g.dependencies[node]
}
func (g *Graph[T, V]) Dependents(parent T) NodeSet[T] {
return g.buildTransitive(parent, g.immediateDependents)
}
func (g *Graph[T, V]) immediateDependents(node T) NodeSet[T] {
return g.dependents[node]
}
func (g *Graph[T, V]) clone() *Graph[T, V] {
return &Graph[T, V]{
dependencies: g.dependencies.copy(),
dependents: g.dependents.copy(),
nodes: g.nodes.copy(),
nodeInfo: g.nodeInfo, // not copied, as it is not modified
}
}
// buildTransitive starts at `root` and continues calling `nextFn` to keep discovering more nodes until
// the graph cannot produce any more. It returns the set of all discovered nodes.
func (g *Graph[T, V]) buildTransitive(root T, nextFn func(T) NodeSet[T]) NodeSet[T] {
if _, ok := g.nodes[root]; !ok {
return nil
}
out := make(NodeSet[T])
searchNext := []T{root}
for len(searchNext) > 0 {
// List of new nodes from this layer of the dependency graph. This is
// assigned to `searchNext` at the end of the outer "discovery" loop.
discovered := []T{}
for _, node := range searchNext {
// For each node to discover, find the next nodes.
for nextNode := range nextFn(node) {
// If we have not seen the node before, add it to the output as well
// as the list of nodes to traverse in the next iteration.
if _, ok := out[nextNode]; !ok {
out[nextNode] = true
discovered = append(discovered, nextNode)
}
}
}
searchNext = discovered
}
return out
}
func (s NodeSet[T]) copy() NodeSet[T] {
out := make(NodeSet[T], len(s))
for k, v := range s {
out[k] = v
}
return out
}
func (dm DepMap[T]) copy() DepMap[T] {
out := make(DepMap[T], len(dm))
for k := range dm {
out[k] = dm[k].copy()
}
return out
}
func (dm DepMap[T]) addNodeToNodeset(key, node T) {
nodes, ok := dm[key]
if !ok {
nodes = make(NodeSet[T])
dm[key] = nodes
}
nodes[node] = true
}

View File

@ -1,9 +0,0 @@
package topo
import "errors"
var (
ErrSelfReferential = errors.New(" self-referential dependencies not allowed")
ErrConflictingAlias = errors.New(" alias already defined")
ErrCircular = errors.New(" circular dependencies not allowed")
)

View File

@ -6,61 +6,63 @@ import (
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"regexp"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/settings/exe"
) )
const ( const (
MaxConcurrentFetch = 20 MaxConcurrentFetch = 20
absPackageURL = "https://gitlab.archlinux.org/archlinux/packaging/packages" _urlPackagePath = "%s/raw/packages/%s/trunk/PKGBUILD"
) )
var ( var (
ErrInvalidRepository = errors.New(gotext.Get("invalid repository")) ErrInvalidRepository = errors.New(gotext.Get("invalid repository"))
ErrABSPackageNotFound = errors.New(gotext.Get("package not found in repos")) ErrABSPackageNotFound = errors.New(gotext.Get("package not found in repos"))
ABSPackageURL = "https://github.com/archlinux/svntogit-packages"
ABSCommunityURL = "https://github.com/archlinux/svntogit-community"
) )
type regexReplace struct { func getRepoURL(db string) (string, error) {
repl string switch db {
match *regexp.Regexp case "core", "extra", "testing":
} return ABSPackageURL, nil
case "community", "multilib", "community-testing", "multilib-testing":
return ABSCommunityURL, nil
}
// regex replacements for Gitlab URLs return "", ErrInvalidRepository
// info: https://gitlab.archlinux.org/archlinux/devtools/-/blob/6ce666a1669235749c17d5c44d8a24dea4a135da/src/lib/api/gitlab.sh#L84
var gitlabRepl = []regexReplace{
{repl: `$1-$2`, match: regexp.MustCompile(`([a-zA-Z0-9]+)\+([a-zA-Z]+)`)},
{repl: `plus`, match: regexp.MustCompile(`\+`)},
{repl: `-`, match: regexp.MustCompile(`[^a-zA-Z0-9_\-.]`)},
{repl: `-`, match: regexp.MustCompile(`[_\-]{2,}`)},
{repl: `unix-tree`, match: regexp.MustCompile(`^tree$`)},
} }
// Return format for pkgbuild // Return format for pkgbuild
// https://gitlab.archlinux.org/archlinux/packaging/packages/0ad/-/raw/main/PKGBUILD // https://github.com/archlinux/svntogit-community/raw/packages/neovim/trunk/PKGBUILD
func getPackagePKGBUILDURL(pkgName string) string { func getPackageURL(db, pkgName string) (string, error) {
return fmt.Sprintf("%s/%s/-/raw/main/PKGBUILD", absPackageURL, convertPkgNameForURL(pkgName)) repoURL, err := getRepoURL(db)
if err != nil {
return "", err
}
return fmt.Sprintf(_urlPackagePath, repoURL, pkgName), err
} }
// Return format for pkgbuild repo // Return format for pkgbuild repo
// https://gitlab.archlinux.org/archlinux/packaging/packages/0ad.git // https://github.com/archlinux/svntogit-community.git
func getPackageRepoURL(pkgName string) string { func getPackageRepoURL(db string) (string, error) {
return fmt.Sprintf("%s/%s.git", absPackageURL, convertPkgNameForURL(pkgName)) repoURL, err := getRepoURL(db)
} if err != nil {
return "", err
// convert pkgName for Gitlab URL path (repo name)
func convertPkgNameForURL(pkgName string) string {
for _, regex := range gitlabRepl {
pkgName = regex.match.ReplaceAllString(pkgName, regex.repl)
} }
return pkgName
return repoURL + ".git", err
} }
// ABSPKGBUILD retrieves the PKGBUILD file to a dest directory. // ABSPKGBUILD retrieves the PKGBUILD file to a dest directory.
func ABSPKGBUILD(httpClient httpRequestDoer, dbName, pkgName string) ([]byte, error) { func ABSPKGBUILD(httpClient httpRequestDoer, dbName, pkgName string) ([]byte, error) {
packageURL := getPackagePKGBUILDURL(pkgName) packageURL, err := getPackageURL(dbName, pkgName)
if err != nil {
return nil, err
}
resp, err := httpClient.Get(packageURL) resp, err := httpClient.Get(packageURL)
if err != nil { if err != nil {
@ -83,10 +85,12 @@ func ABSPKGBUILD(httpClient httpRequestDoer, dbName, pkgName string) ([]byte, er
// ABSPKGBUILDRepo retrieves the PKGBUILD repository to a dest directory. // ABSPKGBUILDRepo retrieves the PKGBUILD repository to a dest directory.
func ABSPKGBUILDRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder, func ABSPKGBUILDRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder,
dbName, pkgName, dest string, force bool, dbName, pkgName, dest string, force bool) (bool, error) {
) (bool, error) { pkgURL, err := getPackageRepoURL(dbName)
pkgURL := getPackageRepoURL(pkgName) if err != nil {
return false, err
}
return downloadGitRepo(ctx, cmdBuilder, pkgURL, return downloadGitRepo(ctx, cmdBuilder, pkgURL,
pkgName, dest, force, "--single-branch") pkgName, dest, force, "--single-branch", "-b", "packages/"+pkgName)
} }

View File

@ -1,6 +1,3 @@
//go:build !integration
// +build !integration
package download package download
import ( import (
@ -13,7 +10,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/settings/exe"
) )
const gitExtrasPKGBUILD = `pkgname=git-extras const gitExtrasPKGBUILD = `pkgname=git-extras
@ -50,12 +47,12 @@ func Test_getPackageURL(t *testing.T) {
wantErr bool wantErr bool
}{ }{
{ {
name: "extra package", name: "community package",
args: args{ args: args{
db: "extra", db: "community",
pkgName: "kitty", pkgName: "kitty",
}, },
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/kitty/-/raw/main/PKGBUILD", want: "https://github.com/archlinux/svntogit-community/raw/packages/kitty/trunk/PKGBUILD",
wantErr: false, wantErr: false,
}, },
{ {
@ -64,69 +61,27 @@ func Test_getPackageURL(t *testing.T) {
db: "core", db: "core",
pkgName: "linux", pkgName: "linux",
}, },
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/linux/-/raw/main/PKGBUILD", want: "https://github.com/archlinux/svntogit-packages/raw/packages/linux/trunk/PKGBUILD",
wantErr: false, wantErr: false,
}, },
{ {
name: "personal repo package", name: "personal repo package",
args: args{ args: args{
db: "sweswe", db: "sweswe",
pkgName: "zabix", pkgName: "linux",
}, },
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/zabix/-/raw/main/PKGBUILD", want: "",
wantErr: false, wantErr: true,
},
{
name: "special name +",
args: args{
db: "core",
pkgName: "my+package",
},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package/-/raw/main/PKGBUILD",
wantErr: false,
},
{
name: "special name %",
args: args{
db: "core",
pkgName: "my%package",
},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package/-/raw/main/PKGBUILD",
wantErr: false,
},
{
name: "special name _-",
args: args{
db: "core",
pkgName: "my_-package",
},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package/-/raw/main/PKGBUILD",
wantErr: false,
},
{
name: "special name ++",
args: args{
db: "core",
pkgName: "my++package",
},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/mypluspluspackage/-/raw/main/PKGBUILD",
wantErr: false,
},
{
name: "special name tree",
args: args{
db: "sweswe",
pkgName: "tree",
},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/unix-tree/-/raw/main/PKGBUILD",
wantErr: false,
}, },
} }
for _, tt := range tests { for _, tt := range tests {
tt := tt tt := tt
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
t.Parallel() t.Parallel()
got := getPackagePKGBUILDURL(tt.args.pkgName) got, err := getPackageURL(tt.args.db, tt.args.pkgName)
if tt.wantErr {
assert.ErrorIs(t, err, ErrInvalidRepository)
}
assert.Equal(t, tt.want, got) assert.Equal(t, tt.want, got)
}) })
} }
@ -155,7 +110,7 @@ func TestGetABSPkgbuild(t *testing.T) {
body: gitExtrasPKGBUILD, body: gitExtrasPKGBUILD,
status: 200, status: 200,
pkgName: "git-extras", pkgName: "git-extras",
wantURL: "https://gitlab.archlinux.org/archlinux/packaging/packages/git-extras/-/raw/main/PKGBUILD", wantURL: "https://github.com/archlinux/svntogit-packages/raw/packages/git-extras/trunk/PKGBUILD",
}, },
want: gitExtrasPKGBUILD, want: gitExtrasPKGBUILD,
wantErr: false, wantErr: false,
@ -167,7 +122,7 @@ func TestGetABSPkgbuild(t *testing.T) {
body: "", body: "",
status: 404, status: 404,
pkgName: "git-git", pkgName: "git-git",
wantURL: "https://gitlab.archlinux.org/archlinux/packaging/packages/git-git/-/raw/main/PKGBUILD", wantURL: "https://github.com/archlinux/svntogit-packages/raw/packages/git-git/trunk/PKGBUILD",
}, },
want: "", want: "",
wantErr: true, wantErr: true,
@ -199,7 +154,7 @@ func Test_getPackageRepoURL(t *testing.T) {
t.Parallel() t.Parallel()
type args struct { type args struct {
pkgName string db string
} }
tests := []struct { tests := []struct {
name string name string
@ -208,59 +163,32 @@ func Test_getPackageRepoURL(t *testing.T) {
wantErr bool wantErr bool
}{ }{
{ {
name: "extra package", name: "community package",
args: args{pkgName: "zoxide"}, args: args{db: "community"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/zoxide.git", want: "https://github.com/archlinux/svntogit-community.git",
wantErr: false, wantErr: false,
}, },
{ {
name: "core package", name: "core package",
args: args{pkgName: "linux"}, args: args{db: "core"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/linux.git", want: "https://github.com/archlinux/svntogit-packages.git",
wantErr: false, wantErr: false,
}, },
{ {
name: "personal repo package", name: "personal repo package",
args: args{pkgName: "sweswe"}, args: args{db: "sweswe"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/sweswe.git", want: "",
wantErr: false, wantErr: true,
},
{
name: "special name +",
args: args{pkgName: "my+package"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package.git",
wantErr: false,
},
{
name: "special name %",
args: args{pkgName: "my%package"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package.git",
wantErr: false,
},
{
name: "special name _-",
args: args{pkgName: "my_-package"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/my-package.git",
wantErr: false,
},
{
name: "special name ++",
args: args{pkgName: "my++package"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/mypluspluspackage.git",
wantErr: false,
},
{
name: "special name tree",
args: args{pkgName: "tree"},
want: "https://gitlab.archlinux.org/archlinux/packaging/packages/unix-tree.git",
wantErr: false,
}, },
} }
for _, tt := range tests { for _, tt := range tests {
tt := tt tt := tt
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
t.Parallel() t.Parallel()
got := getPackageRepoURL(tt.args.pkgName) got, err := getPackageRepoURL(tt.args.db)
if tt.wantErr {
assert.ErrorIs(t, err, ErrInvalidRepository)
}
assert.Equal(t, tt.want, got) assert.Equal(t, tt.want, got)
}) })
} }
@ -272,13 +200,13 @@ func Test_getPackageRepoURL(t *testing.T) {
func TestABSPKGBUILDRepo(t *testing.T) { func TestABSPKGBUILDRepo(t *testing.T) {
t.Parallel() t.Parallel()
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
want := "/usr/local/bin/git --no-replace-objects -C /tmp/doesnt-exist clone --no-progress --single-branch https://gitlab.archlinux.org/archlinux/packaging/packages/linux.git linux" want := "/usr/local/bin/git --no-replace-objects -C /tmp/doesnt-exist clone --no-progress --single-branch -b packages/linux https://github.com/archlinux/svntogit-packages.git linux"
if os.Getuid() == 0 { if os.Getuid() == 0 {
ld := "systemd-run" ld := "systemd-run"
if path, _ := exec.LookPath(ld); path != "" { if path, _ := exec.LookPath(ld); path != "" {
ld = path ld = path
} }
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C /tmp/doesnt-exist clone --no-progress --single-branch https://gitlab.archlinux.org/archlinux/packaging/packages/linux.git linux", ld) want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C /tmp/doesnt-exist clone --no-progress --single-branch -b packages/linux https://github.com/archlinux/svntogit-packages.git linux", ld)
} }
cmdBuilder := &testGitBuilder{ cmdBuilder := &testGitBuilder{
@ -291,7 +219,7 @@ func TestABSPKGBUILDRepo(t *testing.T) {
GitFlags: []string{"--no-replace-objects"}, GitFlags: []string{"--no-replace-objects"},
}, },
} }
newClone, err := ABSPKGBUILDRepo(context.Background(), cmdBuilder, "core", "linux", "/tmp/doesnt-exist", false) newClone, err := ABSPKGBUILDRepo(context.TODO(), cmdBuilder, "core", "linux", "/tmp/doesnt-exist", false)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, true, newClone) assert.Equal(t, true, newClone)
} }
@ -301,17 +229,18 @@ func TestABSPKGBUILDRepo(t *testing.T) {
// THEN a pull command should be formed // THEN a pull command should be formed
func TestABSPKGBUILDRepoExistsPerms(t *testing.T) { func TestABSPKGBUILDRepoExistsPerms(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
os.MkdirAll(filepath.Join(dir, "linux", ".git"), 0o777) os.MkdirAll(filepath.Join(dir, "linux", ".git"), 0o777)
want := fmt.Sprintf("/usr/local/bin/git --no-replace-objects -C %s/linux pull --rebase --autostash", dir) want := fmt.Sprintf("/usr/local/bin/git --no-replace-objects -C %s/linux pull --ff-only", dir)
if os.Getuid() == 0 { if os.Getuid() == 0 {
ld := "systemd-run" ld := "systemd-run"
if path, _ := exec.LookPath(ld); path != "" { if path, _ := exec.LookPath(ld); path != "" {
ld = path ld = path
} }
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C %s/linux pull --rebase --autostash", ld, dir) want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C %s/linux pull --ff-only", ld, dir)
} }
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
@ -325,7 +254,7 @@ func TestABSPKGBUILDRepoExistsPerms(t *testing.T) {
GitFlags: []string{"--no-replace-objects"}, GitFlags: []string{"--no-replace-objects"},
}, },
} }
newClone, err := ABSPKGBUILDRepo(context.Background(), cmdBuilder, "core", "linux", dir, false) newClone, err := ABSPKGBUILDRepo(context.TODO(), cmdBuilder, "core", "linux", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, false, newClone) assert.Equal(t, false, newClone)
} }

View File

@ -10,9 +10,9 @@ import (
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/multierror" "github.com/Jguer/yay/v11/pkg/multierror"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/text"
) )
func AURPKGBUILD(httpClient httpRequestDoer, pkgName, aurURL string) ([]byte, error) { func AURPKGBUILD(httpClient httpRequestDoer, pkgName, aurURL string) ([]byte, error) {
@ -48,9 +48,8 @@ func AURPKGBUILDRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder, aurURL,
func AURPKGBUILDRepos( func AURPKGBUILDRepos(
ctx context.Context, ctx context.Context,
cmdBuilder exe.GitCmdBuilder, logger *text.Logger, cmdBuilder exe.GitCmdBuilder,
targets []string, aurURL, dest string, force bool, targets []string, aurURL, dest string, force bool) (map[string]bool, error) {
) (map[string]bool, error) {
cloned := make(map[string]bool, len(targets)) cloned := make(map[string]bool, len(targets))
var ( var (
@ -63,34 +62,30 @@ func AURPKGBUILDRepos(
for _, target := range targets { for _, target := range targets {
sem <- 1 sem <- 1
wg.Add(1) wg.Add(1)
go func(target string) { go func(target string) {
defer func() {
<-sem
wg.Done()
}()
newClone, err := AURPKGBUILDRepo(ctx, cmdBuilder, aurURL, target, dest, force) newClone, err := AURPKGBUILDRepo(ctx, cmdBuilder, aurURL, target, dest, force)
mux.Lock() progress := 0
progress := len(cloned)
if err != nil { if err != nil {
errs.Add(err) errs.Add(err)
mux.Unlock() } else {
logger.OperationInfoln( mux.Lock()
gotext.Get("(%d/%d) Failed to download PKGBUILD: %s",
progress, len(targets), text.Cyan(target)))
return
}
cloned[target] = newClone cloned[target] = newClone
progress = len(cloned) progress = len(cloned)
mux.Unlock() mux.Unlock()
}
logger.OperationInfoln( text.OperationInfoln(
gotext.Get("(%d/%d) Downloaded PKGBUILD: %s", gotext.Get("(%d/%d) Downloaded PKGBUILD: %s",
progress, len(targets), text.Cyan(target))) progress, len(targets), text.Cyan(target)))
<-sem
wg.Done()
}(target) }(target)
} }

View File

@ -1,6 +1,3 @@
//go:build !integration
// +build !integration
package download package download
import ( import (
@ -13,7 +10,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/settings/exe"
) )
func TestGetAURPkgbuild(t *testing.T) { func TestGetAURPkgbuild(t *testing.T) {
@ -87,7 +84,7 @@ func TestAURPKGBUILDRepo(t *testing.T) {
if path, _ := exec.LookPath(ld); path != "" { if path, _ := exec.LookPath(ld); path != "" {
ld = path ld = path
} }
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C /tmp/doesnt-exist clone --no-progress https://aur.archlinux.org/yay-bin.git yay-bin", ld) want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C /tmp/doesnt-exist clone --no-progress https://aur.archlinux.org/yay-bin.git yay-bin", ld)
} }
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
@ -101,7 +98,7 @@ func TestAURPKGBUILDRepo(t *testing.T) {
GitFlags: []string{"--no-replace-objects"}, GitFlags: []string{"--no-replace-objects"},
}, },
} }
newCloned, err := AURPKGBUILDRepo(context.Background(), cmdBuilder, "https://aur.archlinux.org", "yay-bin", "/tmp/doesnt-exist", false) newCloned, err := AURPKGBUILDRepo(context.TODO(), cmdBuilder, "https://aur.archlinux.org", "yay-bin", "/tmp/doesnt-exist", false)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, true, newCloned) assert.Equal(t, true, newCloned)
} }
@ -111,17 +108,18 @@ func TestAURPKGBUILDRepo(t *testing.T) {
// THEN a pull command should be formed // THEN a pull command should be formed
func TestAURPKGBUILDRepoExistsPerms(t *testing.T) { func TestAURPKGBUILDRepoExistsPerms(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
os.MkdirAll(filepath.Join(dir, "yay-bin", ".git"), 0o777) os.MkdirAll(filepath.Join(dir, "yay-bin", ".git"), 0o777)
want := fmt.Sprintf("/usr/local/bin/git --no-replace-objects -C %s/yay-bin pull --rebase --autostash", dir) want := fmt.Sprintf("/usr/local/bin/git --no-replace-objects -C %s/yay-bin pull --ff-only", dir)
if os.Getuid() == 0 { if os.Getuid() == 0 {
ld := "systemd-run" ld := "systemd-run"
if path, _ := exec.LookPath(ld); path != "" { if path, _ := exec.LookPath(ld); path != "" {
ld = path ld = path
} }
want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty --quiet -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C %s/yay-bin pull --rebase --autostash", ld, dir) want = fmt.Sprintf("%s --service-type=oneshot --pipe --wait --pty -p DynamicUser=yes -p CacheDirectory=yay -E HOME=/tmp --no-replace-objects -C %s/yay-bin pull --ff-only", ld, dir)
} }
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
@ -135,14 +133,15 @@ func TestAURPKGBUILDRepoExistsPerms(t *testing.T) {
GitFlags: []string{"--no-replace-objects"}, GitFlags: []string{"--no-replace-objects"},
}, },
} }
cloned, err := AURPKGBUILDRepo(context.Background(), cmdBuilder, "https://aur.archlinux.org", "yay-bin", dir, false) cloned, err := AURPKGBUILDRepo(context.TODO(), cmdBuilder, "https://aur.archlinux.org", "yay-bin", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, false, cloned) assert.Equal(t, false, cloned)
} }
func TestAURPKGBUILDRepos(t *testing.T) { func TestAURPKGBUILDRepos(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
os.MkdirAll(filepath.Join(dir, "yay-bin", ".git"), 0o777) os.MkdirAll(filepath.Join(dir, "yay-bin", ".git"), 0o777)
@ -158,7 +157,7 @@ func TestAURPKGBUILDRepos(t *testing.T) {
GitFlags: []string{}, GitFlags: []string{},
}, },
} }
cloned, err := AURPKGBUILDRepos(context.Background(), cmdBuilder, newTestLogger(), targets, "https://aur.archlinux.org", dir, false) cloned, err := AURPKGBUILDRepos(context.TODO(), cmdBuilder, targets, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
assert.EqualValues(t, map[string]bool{"yay": true, "yay-bin": false, "yay-git": true}, cloned) assert.EqualValues(t, map[string]bool{"yay": true, "yay-bin": false, "yay-git": true}, cloned)

View File

@ -9,13 +9,11 @@ import (
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/aur" "github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v11/pkg/multierror"
"github.com/Jguer/yay/v12/pkg/db" "github.com/Jguer/yay/v11/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/multierror" "github.com/Jguer/yay/v11/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/text"
"github.com/Jguer/yay/v12/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/text"
) )
type httpRequestDoer interface { type httpRequestDoer interface {
@ -24,12 +22,11 @@ type httpRequestDoer interface {
type DBSearcher interface { type DBSearcher interface {
SyncPackage(string) db.IPackage SyncPackage(string) db.IPackage
SyncPackageFromDB(string, string) db.IPackage SatisfierFromDB(string, string) db.IPackage
} }
func downloadGitRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder, func downloadGitRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder,
pkgURL, pkgName, dest string, force bool, gitArgs ...string, pkgURL, pkgName, dest string, force bool, gitArgs ...string) (bool, error) {
) (bool, error) {
finalDir := filepath.Join(dest, pkgName) finalDir := filepath.Join(dest, pkgName)
newClone := true newClone := true
@ -59,7 +56,7 @@ func downloadGitRepo(ctx context.Context, cmdBuilder exe.GitCmdBuilder,
errOut: gotext.Get("error reading %s", filepath.Join(dest, pkgName, ".git")), errOut: gotext.Get("error reading %s", filepath.Join(dest, pkgName, ".git")),
} }
default: default:
cmd := cmdBuilder.BuildGitCmd(ctx, filepath.Join(dest, pkgName), "pull", "--rebase", "--autostash") cmd := cmdBuilder.BuildGitCmd(ctx, filepath.Join(dest, pkgName), "pull", "--ff-only")
_, stderr, errCmd := cmdBuilder.Capture(cmd) _, stderr, errCmd := cmdBuilder.Capture(cmd)
if errCmd != nil { if errCmd != nil {
@ -81,9 +78,8 @@ func getURLName(pkg db.IPackage) string {
return name return name
} }
func PKGBUILDs(dbExecutor DBSearcher, aurClient aur.QueryClient, httpClient *http.Client, func PKGBUILDs(dbExecutor DBSearcher, httpClient *http.Client, targets []string,
logger *text.Logger, targets []string, aurURL string, mode parser.TargetMode, aurURL string, mode parser.TargetMode) (map[string][]byte, error) {
) (map[string][]byte, error) {
pkgbuilds := make(map[string][]byte, len(targets)) pkgbuilds := make(map[string][]byte, len(targets))
var ( var (
@ -96,7 +92,7 @@ func PKGBUILDs(dbExecutor DBSearcher, aurClient aur.QueryClient, httpClient *htt
for _, target := range targets { for _, target := range targets {
// Probably replaceable by something in query. // Probably replaceable by something in query.
dbName, name, isAUR, toSkip := getPackageUsableName(dbExecutor, aurClient, logger, target, mode) dbName, name, aur, toSkip := getPackageUsableName(dbExecutor, target, mode)
if toSkip { if toSkip {
continue continue
} }
@ -127,7 +123,7 @@ func PKGBUILDs(dbExecutor DBSearcher, aurClient aur.QueryClient, httpClient *htt
<-sem <-sem
wg.Done() wg.Done()
}(target, dbName, name, isAUR) }(target, dbName, name, aur)
} }
wg.Wait() wg.Wait()
@ -135,10 +131,9 @@ func PKGBUILDs(dbExecutor DBSearcher, aurClient aur.QueryClient, httpClient *htt
return pkgbuilds, errs.Return() return pkgbuilds, errs.Return()
} }
func PKGBUILDRepos(ctx context.Context, dbExecutor DBSearcher, aurClient aur.QueryClient, func PKGBUILDRepos(ctx context.Context, dbExecutor DBSearcher,
cmdBuilder exe.GitCmdBuilder, logger *text.Logger, cmdBuilder exe.GitCmdBuilder,
targets []string, mode parser.TargetMode, aurURL, dest string, force bool, targets []string, mode parser.TargetMode, aurURL, dest string, force bool) (map[string]bool, error) {
) (map[string]bool, error) {
cloned := make(map[string]bool, len(targets)) cloned := make(map[string]bool, len(targets))
var ( var (
@ -151,7 +146,7 @@ func PKGBUILDRepos(ctx context.Context, dbExecutor DBSearcher, aurClient aur.Que
for _, target := range targets { for _, target := range targets {
// Probably replaceable by something in query. // Probably replaceable by something in query.
dbName, name, isAUR, toSkip := getPackageUsableName(dbExecutor, aurClient, logger, target, mode) dbName, name, aur, toSkip := getPackageUsableName(dbExecutor, target, mode)
if toSkip { if toSkip {
continue continue
} }
@ -184,11 +179,11 @@ func PKGBUILDRepos(ctx context.Context, dbExecutor DBSearcher, aurClient aur.Que
} }
if aur { if aur {
logger.OperationInfoln( text.OperationInfoln(
gotext.Get("(%d/%d) Downloaded PKGBUILD: %s", gotext.Get("(%d/%d) Downloaded PKGBUILD: %s",
progress, len(targets), text.Cyan(pkgName))) progress, len(targets), text.Cyan(pkgName)))
} else { } else {
logger.OperationInfoln( text.OperationInfoln(
gotext.Get("(%d/%d) Downloaded PKGBUILD from ABS: %s", gotext.Get("(%d/%d) Downloaded PKGBUILD from ABS: %s",
progress, len(targets), text.Cyan(pkgName))) progress, len(targets), text.Cyan(pkgName)))
} }
@ -196,7 +191,7 @@ func PKGBUILDRepos(ctx context.Context, dbExecutor DBSearcher, aurClient aur.Que
<-sem <-sem
wg.Done() wg.Done()
}(target, dbName, name, isAUR) }(target, dbName, name, aur)
} }
wg.Wait() wg.Wait()
@ -205,47 +200,34 @@ func PKGBUILDRepos(ctx context.Context, dbExecutor DBSearcher, aurClient aur.Que
} }
// TODO: replace with dep.ResolveTargets. // TODO: replace with dep.ResolveTargets.
func getPackageUsableName(dbExecutor DBSearcher, aurClient aur.QueryClient, func getPackageUsableName(dbExecutor DBSearcher, target string, mode parser.TargetMode) (dbname, pkgname string, aur, toSkip bool) {
logger *text.Logger, target string, mode parser.TargetMode, aur = true
) (dbname, pkgname string, isAUR, toSkip bool) {
dbName, name := text.SplitDBFromName(target) dbName, name := text.SplitDBFromName(target)
if dbName != "aur" && mode.AtLeastRepo() { if dbName != "aur" && mode.AtLeastRepo() {
var pkg db.IPackage var pkg db.IPackage
if dbName != "" { if dbName != "" {
pkg = dbExecutor.SyncPackageFromDB(name, dbName) pkg = dbExecutor.SatisfierFromDB(name, dbName)
if pkg == nil {
// if the user precised a db but the package is not in the db
// then it is missing
// Mode does not allow AUR packages
return dbName, name, aur, true
}
} else { } else {
pkg = dbExecutor.SyncPackage(name) pkg = dbExecutor.SyncPackage(name)
} }
if pkg != nil { if pkg != nil {
aur = false
name = getURLName(pkg) name = getURLName(pkg)
dbName = pkg.DB().Name() dbName = pkg.DB().Name()
return dbName, name, false, false
}
// If the package is not found in the database and it was expected to be
if pkg == nil && dbName != "" {
return dbName, name, true, true
} }
} }
if mode == parser.ModeRepo { if aur && mode == parser.ModeRepo {
return dbName, name, true, true return dbName, name, aur, true
} }
pkgs, err := aurClient.Get(context.Background(), &aur.Query{ return dbName, name, aur, false
By: aur.Name,
Contains: false,
Needles: []string{name},
})
if err != nil {
logger.Warnln(err)
return dbName, name, true, true
}
if len(pkgs) == 0 {
return dbName, name, true, true
}
return "aur", name, true, false
} }

View File

@ -1,106 +0,0 @@
//go:build integration
// +build integration
package download
import (
"context"
"net/http"
"os"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/Jguer/aur"
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
"github.com/Jguer/yay/v12/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/text"
)
func TestIntegrationPKGBUILDReposDefinedDBClone(t *testing.T) {
dir := t.TempDir()
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil // fakes a package found for all
},
}
targets := []string{"core/linux", "yay-bin", "yay-git"}
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
cmdRunner := &exe.OSRunner{Log: testLogger}
cmdBuilder := &exe.CmdBuilder{
Runner: cmdRunner,
GitBin: "git",
GitFlags: []string{},
Log: testLogger,
}
searcher := &testDBSearcher{
absPackagesDB: map[string]string{"linux": "core"},
}
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
cmdBuilder, testLogger.Child("test"),
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err)
assert.EqualValues(t, map[string]bool{"core/linux": true, "yay-bin": true, "yay-git": true}, cloned)
}
func TestIntegrationPKGBUILDReposNotExist(t *testing.T) {
dir := t.TempDir()
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil // fakes a package found for all
},
}
targets := []string{"core/yay", "yay-bin", "yay-git"}
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
cmdRunner := &exe.OSRunner{Log: testLogger}
cmdBuilder := &exe.CmdBuilder{
Runner: cmdRunner,
GitBin: "git",
GitFlags: []string{},
Log: testLogger,
}
searcher := &testDBSearcher{
absPackagesDB: map[string]string{"yay": "core"},
}
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
cmdBuilder, testLogger.Child("test"),
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
assert.Error(t, err)
assert.EqualValues(t, map[string]bool{"yay-bin": true, "yay-git": true}, cloned)
}
// GIVEN 2 aur packages and 1 in repo
// WHEN defining as specified targets
// THEN all aur be found and cloned
func TestIntegrationPKGBUILDFull(t *testing.T) {
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil
},
}
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
targets := []string{"core/linux", "aur/yay-bin", "yay-git"}
searcher := &testDBSearcher{
absPackagesDB: map[string]string{"linux": "core"},
}
fetched, err := PKGBUILDs(searcher, mockClient, &http.Client{}, testLogger.Child("test"),
targets, "https://aur.archlinux.org", parser.ModeAny)
assert.NoError(t, err)
for _, target := range targets {
assert.Contains(t, fetched, target)
assert.NotEmpty(t, fetched[target])
}
}

View File

@ -1,47 +1,27 @@
//go:build !integration
// +build !integration
package download package download
import ( import (
"context" "context"
"io"
"net/http" "net/http"
"os" "os"
"path/filepath" "path/filepath"
"strings"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"gopkg.in/h2non/gock.v1" "gopkg.in/h2non/gock.v1"
"github.com/Jguer/aur" "github.com/Jguer/yay/v11/pkg/settings/exe"
"github.com/Jguer/yay/v11/pkg/settings/parser"
mockaur "github.com/Jguer/yay/v12/pkg/dep/mock"
"github.com/Jguer/yay/v12/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/settings/parser"
"github.com/Jguer/yay/v12/pkg/text"
) )
func newTestLogger() *text.Logger {
return text.NewLogger(io.Discard, io.Discard, strings.NewReader(""), true, "test")
}
// GIVEN 2 aur packages and 1 in repo // GIVEN 2 aur packages and 1 in repo
// GIVEN package in repo is already present // GIVEN package in repo is already present
// WHEN defining package db as a target // WHEN defining package db as a target
// THEN all should be found and cloned, except the repo one // THEN all should be found and cloned, except the repo one
func TestPKGBUILDReposDefinedDBPull(t *testing.T) { func TestPKGBUILDReposDefinedDBPull(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil // fakes a package found for all
},
}
testLogger := text.NewLogger(os.Stdout, os.Stderr, strings.NewReader(""), true, "test")
os.MkdirAll(filepath.Join(dir, "yay", ".git"), 0o777) os.MkdirAll(filepath.Join(dir, "yay", ".git"), 0o777)
@ -54,14 +34,13 @@ func TestPKGBUILDReposDefinedDBPull(t *testing.T) {
Runner: cmdRunner, Runner: cmdRunner,
GitBin: "/usr/local/bin/git", GitBin: "/usr/local/bin/git",
GitFlags: []string{}, GitFlags: []string{},
Log: testLogger,
}, },
} }
searcher := &testDBSearcher{ searcher := &testDBSearcher{
absPackagesDB: map[string]string{"yay": "core"}, absPackagesDB: map[string]string{"yay": "core"},
} }
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient, cloned, err := PKGBUILDRepos(context.TODO(), searcher,
cmdBuilder, newTestLogger(), cmdBuilder,
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false) targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
@ -73,13 +52,9 @@ func TestPKGBUILDReposDefinedDBPull(t *testing.T) {
// THEN all should be found and cloned // THEN all should be found and cloned
func TestPKGBUILDReposDefinedDBClone(t *testing.T) { func TestPKGBUILDReposDefinedDBClone(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil // fakes a package found for all
},
}
targets := []string{"core/yay", "yay-bin", "yay-git"} targets := []string{"core/yay", "yay-bin", "yay-git"}
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
cmdBuilder := &testGitBuilder{ cmdBuilder := &testGitBuilder{
@ -94,8 +69,8 @@ func TestPKGBUILDReposDefinedDBClone(t *testing.T) {
searcher := &testDBSearcher{ searcher := &testDBSearcher{
absPackagesDB: map[string]string{"yay": "core"}, absPackagesDB: map[string]string{"yay": "core"},
} }
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient, cloned, err := PKGBUILDRepos(context.TODO(), searcher,
cmdBuilder, newTestLogger(), cmdBuilder,
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false) targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
@ -107,13 +82,9 @@ func TestPKGBUILDReposDefinedDBClone(t *testing.T) {
// THEN all should be found and cloned // THEN all should be found and cloned
func TestPKGBUILDReposClone(t *testing.T) { func TestPKGBUILDReposClone(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil // fakes a package found for all
},
}
targets := []string{"yay", "yay-bin", "yay-git"} targets := []string{"yay", "yay-bin", "yay-git"}
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
cmdBuilder := &testGitBuilder{ cmdBuilder := &testGitBuilder{
@ -128,8 +99,8 @@ func TestPKGBUILDReposClone(t *testing.T) {
searcher := &testDBSearcher{ searcher := &testDBSearcher{
absPackagesDB: map[string]string{"yay": "core"}, absPackagesDB: map[string]string{"yay": "core"},
} }
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient, cloned, err := PKGBUILDRepos(context.TODO(), searcher,
cmdBuilder, newTestLogger(), cmdBuilder,
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false) targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
@ -141,13 +112,9 @@ func TestPKGBUILDReposClone(t *testing.T) {
// THEN all aur be found and cloned // THEN all aur be found and cloned
func TestPKGBUILDReposNotFound(t *testing.T) { func TestPKGBUILDReposNotFound(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil // fakes a package found for all
},
}
targets := []string{"extra/yay", "yay-bin", "yay-git"} targets := []string{"extra/yay", "yay-bin", "yay-git"}
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
cmdBuilder := &testGitBuilder{ cmdBuilder := &testGitBuilder{
@ -162,8 +129,8 @@ func TestPKGBUILDReposNotFound(t *testing.T) {
searcher := &testDBSearcher{ searcher := &testDBSearcher{
absPackagesDB: map[string]string{"yay": "core"}, absPackagesDB: map[string]string{"yay": "core"},
} }
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient, cloned, err := PKGBUILDRepos(context.TODO(), searcher,
cmdBuilder, newTestLogger(), cmdBuilder,
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false) targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
@ -175,13 +142,9 @@ func TestPKGBUILDReposNotFound(t *testing.T) {
// THEN only repo should be cloned // THEN only repo should be cloned
func TestPKGBUILDReposRepoMode(t *testing.T) { func TestPKGBUILDReposRepoMode(t *testing.T) {
t.Parallel() t.Parallel()
dir := t.TempDir() dir, _ := os.MkdirTemp("/tmp/", "yay-test")
defer os.RemoveAll(dir)
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{}, nil // fakes a package found for all
},
}
targets := []string{"yay", "yay-bin", "yay-git"} targets := []string{"yay", "yay-bin", "yay-git"}
cmdRunner := &testRunner{} cmdRunner := &testRunner{}
cmdBuilder := &testGitBuilder{ cmdBuilder := &testGitBuilder{
@ -196,8 +159,8 @@ func TestPKGBUILDReposRepoMode(t *testing.T) {
searcher := &testDBSearcher{ searcher := &testDBSearcher{
absPackagesDB: map[string]string{"yay": "core"}, absPackagesDB: map[string]string{"yay": "core"},
} }
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient, cloned, err := PKGBUILDRepos(context.TODO(), searcher,
cmdBuilder, newTestLogger(), cmdBuilder,
targets, parser.ModeRepo, "https://aur.archlinux.org", dir, false) targets, parser.ModeRepo, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err) assert.NoError(t, err)
@ -210,11 +173,6 @@ func TestPKGBUILDReposRepoMode(t *testing.T) {
func TestPKGBUILDFull(t *testing.T) { func TestPKGBUILDFull(t *testing.T) {
t.Parallel() t.Parallel()
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{{}}, nil
},
}
gock.New("https://aur.archlinux.org"). gock.New("https://aur.archlinux.org").
Get("/cgit/aur.git/plain/PKGBUILD").MatchParam("h", "yay-git"). Get("/cgit/aur.git/plain/PKGBUILD").MatchParam("h", "yay-git").
Reply(200). Reply(200).
@ -224,8 +182,8 @@ func TestPKGBUILDFull(t *testing.T) {
Reply(200). Reply(200).
BodyString("example_yay-bin") BodyString("example_yay-bin")
gock.New("https://gitlab.archlinux.org/"). gock.New("https://github.com/").
Get("archlinux/packaging/packages/yay/-/raw/main/PKGBUILD"). Get("/archlinux/svntogit-packages/raw/packages/yay/trunk/PKGBUILD").
Reply(200). Reply(200).
BodyString("example_yay") BodyString("example_yay")
@ -235,7 +193,7 @@ func TestPKGBUILDFull(t *testing.T) {
absPackagesDB: map[string]string{"yay": "core"}, absPackagesDB: map[string]string{"yay": "core"},
} }
fetched, err := PKGBUILDs(searcher, mockClient, &http.Client{}, newTestLogger(), fetched, err := PKGBUILDs(searcher, &http.Client{},
targets, "https://aur.archlinux.org", parser.ModeAny) targets, "https://aur.archlinux.org", parser.ModeAny)
assert.NoError(t, err) assert.NoError(t, err)
@ -245,37 +203,3 @@ func TestPKGBUILDFull(t *testing.T) {
"yay-git": []byte("example_yay-git"), "yay-git": []byte("example_yay-git"),
}, fetched) }, fetched)
} }
// GIVEN 2 aur packages and 1 in repo
// WHEN aur packages are not found
// only repo should be cloned
func TestPKGBUILDReposMissingAUR(t *testing.T) {
t.Parallel()
dir := t.TempDir()
mockClient := &mockaur.MockAUR{
GetFn: func(ctx context.Context, query *aur.Query) ([]aur.Pkg, error) {
return []aur.Pkg{}, nil // fakes a package found for all
},
}
targets := []string{"core/yay", "aur/yay-bin", "aur/yay-git"}
cmdRunner := &testRunner{}
cmdBuilder := &testGitBuilder{
index: 0,
test: t,
parentBuilder: &exe.CmdBuilder{
Runner: cmdRunner,
GitBin: "/usr/local/bin/git",
GitFlags: []string{},
},
}
searcher := &testDBSearcher{
absPackagesDB: map[string]string{"yay": "core"},
}
cloned, err := PKGBUILDRepos(context.Background(), searcher, mockClient,
cmdBuilder, newTestLogger(),
targets, parser.ModeAny, "https://aur.archlinux.org", dir, false)
assert.NoError(t, err)
assert.EqualValues(t, map[string]bool{"core/yay": true}, cloned)
}

View File

@ -12,8 +12,8 @@ import (
"github.com/Jguer/go-alpm/v2" "github.com/Jguer/go-alpm/v2"
"github.com/Jguer/yay/v12/pkg/db" "github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/settings/exe"
) )
type testRunner struct{} type testRunner struct{}
@ -102,7 +102,7 @@ func (d *testDBSearcher) SyncPackage(name string) db.IPackage {
return nil return nil
} }
func (d *testDBSearcher) SyncPackageFromDB(name string, db string) db.IPackage { func (d *testDBSearcher) SatisfierFromDB(name string, db string) db.IPackage {
if v, ok := d.absPackagesDB[name]; ok && v == db { if v, ok := d.absPackagesDB[name]; ok && v == db {
return &testPackage{ return &testPackage{
name: name, name: name,

View File

@ -5,7 +5,7 @@ import (
"strings" "strings"
"unicode" "unicode"
mapset "github.com/deckarep/golang-set/v2" "github.com/Jguer/yay/v11/pkg/stringset"
) )
// IntRange stores a max and min amount for range. // IntRange stores a max and min amount for range.
@ -17,10 +17,10 @@ type IntRange struct {
// IntRanges is a slice of IntRange. // IntRanges is a slice of IntRange.
type IntRanges []IntRange type IntRanges []IntRange
func makeIntRange(minVal, maxVal int) IntRange { func makeIntRange(min, max int) IntRange {
return IntRange{ return IntRange{
min: minVal, min,
max: maxVal, max,
} }
} }
@ -42,6 +42,24 @@ func (rs IntRanges) Get(n int) bool {
return false return false
} }
// Min returns min value between a and b.
func Min(a, b int) int {
if a < b {
return a
}
return b
}
// Max returns max value between a and b.
func Max(a, b int) int {
if a < b {
return b
}
return a
}
// ParseNumberMenu parses input for number menus split by spaces or commas // ParseNumberMenu parses input for number menus split by spaces or commas
// supports individual selection: 1 2 3 4 // supports individual selection: 1 2 3 4
// supports range selections: 1-4 10-20 // supports range selections: 1-4 10-20
@ -53,12 +71,11 @@ func (rs IntRanges) Get(n int) bool {
// of course the implementation is up to the caller, this function mearley parses // of course the implementation is up to the caller, this function mearley parses
// the input and organizes it. // the input and organizes it.
func ParseNumberMenu(input string) (include, exclude IntRanges, func ParseNumberMenu(input string) (include, exclude IntRanges,
otherInclude, otherExclude mapset.Set[string], otherInclude, otherExclude stringset.StringSet) {
) {
include = make(IntRanges, 0) include = make(IntRanges, 0)
exclude = make(IntRanges, 0) exclude = make(IntRanges, 0)
otherInclude = mapset.NewThreadUnsafeSet[string]() otherInclude = make(stringset.StringSet)
otherExclude = mapset.NewThreadUnsafeSet[string]() otherExclude = make(stringset.StringSet)
words := strings.FieldsFunc(input, func(c rune) bool { words := strings.FieldsFunc(input, func(c rune) bool {
return unicode.IsSpace(c) || c == ',' return unicode.IsSpace(c) || c == ','
@ -84,22 +101,22 @@ func ParseNumberMenu(input string) (include, exclude IntRanges,
num1, err = strconv.Atoi(ranges[0]) num1, err = strconv.Atoi(ranges[0])
if err != nil { if err != nil {
other.Add(strings.ToLower(word)) other.Set(strings.ToLower(word))
continue continue
} }
if len(ranges) == 2 { if len(ranges) == 2 {
num2, err = strconv.Atoi(ranges[1]) num2, err = strconv.Atoi(ranges[1])
if err != nil { if err != nil {
other.Add(strings.ToLower(word)) other.Set(strings.ToLower(word))
continue continue
} }
} else { } else {
num2 = num1 num2 = num1
} }
mi := min(num1, num2) mi := Min(num1, num2)
ma := max(num1, num2) ma := Max(num1, num2)
if !invert { if !invert {
include = append(include, makeIntRange(mi, ma)) include = append(include, makeIntRange(mi, ma))

View File

@ -1,13 +1,9 @@
//go:build !integration
// +build !integration
package intrange package intrange
import ( import (
"testing" "testing"
mapset "github.com/deckarep/golang-set/v2" "github.com/Jguer/yay/v11/pkg/stringset"
"github.com/stretchr/testify/assert"
) )
func TestParseNumberMenu(t *testing.T) { func TestParseNumberMenu(t *testing.T) {
@ -15,8 +11,8 @@ func TestParseNumberMenu(t *testing.T) {
type result struct { type result struct {
Include IntRanges Include IntRanges
Exclude IntRanges Exclude IntRanges
OtherInclude mapset.Set[string] OtherInclude stringset.StringSet
OtherExclude mapset.Set[string] OtherExclude stringset.StringSet
} }
inputs := []string{ inputs := []string{
@ -41,15 +37,15 @@ func TestParseNumberMenu(t *testing.T) {
makeIntRange(3, 3), makeIntRange(3, 3),
makeIntRange(4, 4), makeIntRange(4, 4),
makeIntRange(5, 5), makeIntRange(5, 5),
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()}, }, IntRanges{}, make(stringset.StringSet), make(stringset.StringSet)},
{IntRanges{ {IntRanges{
makeIntRange(1, 10), makeIntRange(1, 10),
makeIntRange(5, 15), makeIntRange(5, 15),
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()}, }, IntRanges{}, make(stringset.StringSet), make(stringset.StringSet)},
{IntRanges{ {IntRanges{
makeIntRange(5, 10), makeIntRange(5, 10),
makeIntRange(85, 90), makeIntRange(85, 90),
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()}, }, IntRanges{}, make(stringset.StringSet), make(stringset.StringSet)},
{ {
IntRanges{ IntRanges{
makeIntRange(1, 1), makeIntRange(1, 1),
@ -62,18 +58,18 @@ func TestParseNumberMenu(t *testing.T) {
makeIntRange(38, 40), makeIntRange(38, 40),
makeIntRange(123, 123), makeIntRange(123, 123),
}, },
mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string](), make(stringset.StringSet), make(stringset.StringSet),
}, },
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("abort", "all", "none"), mapset.NewThreadUnsafeSet[string]()}, {IntRanges{}, IntRanges{}, stringset.Make("abort", "all", "none"), make(stringset.StringSet)},
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("a-b"), mapset.NewThreadUnsafeSet("abort", "a-b")}, {IntRanges{}, IntRanges{}, stringset.Make("a-b"), stringset.Make("abort", "a-b")},
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("-9223372036854775809-9223372036854775809"), mapset.NewThreadUnsafeSet[string]()}, {IntRanges{}, IntRanges{}, stringset.Make("-9223372036854775809-9223372036854775809"), make(stringset.StringSet)},
{IntRanges{ {IntRanges{
makeIntRange(1, 1), makeIntRange(1, 1),
makeIntRange(2, 2), makeIntRange(2, 2),
makeIntRange(3, 3), makeIntRange(3, 3),
makeIntRange(4, 4), makeIntRange(4, 4),
makeIntRange(5, 5), makeIntRange(5, 5),
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()}, }, IntRanges{}, make(stringset.StringSet), make(stringset.StringSet)},
{IntRanges{ {IntRanges{
makeIntRange(1, 1), makeIntRange(1, 1),
makeIntRange(2, 2), makeIntRange(2, 2),
@ -83,20 +79,23 @@ func TestParseNumberMenu(t *testing.T) {
makeIntRange(6, 6), makeIntRange(6, 6),
makeIntRange(7, 7), makeIntRange(7, 7),
makeIntRange(8, 8), makeIntRange(8, 8),
}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()}, }, IntRanges{}, make(stringset.StringSet), make(stringset.StringSet)},
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()}, {IntRanges{}, IntRanges{}, make(stringset.StringSet), make(stringset.StringSet)},
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet[string](), mapset.NewThreadUnsafeSet[string]()}, {IntRanges{}, IntRanges{}, make(stringset.StringSet), make(stringset.StringSet)},
{IntRanges{}, IntRanges{}, mapset.NewThreadUnsafeSet("a", "b", "c", "d", "e"), mapset.NewThreadUnsafeSet[string]()}, {IntRanges{}, IntRanges{}, stringset.Make("a", "b", "c", "d", "e"), make(stringset.StringSet)},
} }
for n, in := range inputs { for n, in := range inputs {
res := expected[n] res := expected[n]
include, exclude, otherInclude, otherExclude := ParseNumberMenu(in) include, exclude, otherInclude, otherExclude := ParseNumberMenu(in)
assert.True(t, intRangesEqual(include, res.Include), "Test %d Failed: Expected: include=%+v got include=%+v", n+1, res.Include, include) if !intRangesEqual(include, res.Include) ||
assert.True(t, intRangesEqual(exclude, res.Exclude), "Test %d Failed: Expected: exclude=%+v got exclude=%+v", n+1, res.Exclude, exclude) !intRangesEqual(exclude, res.Exclude) ||
assert.True(t, otherInclude.Equal(res.OtherInclude), "Test %d Failed: Expected: otherInclude=%+v got otherInclude=%+v", n+1, res.OtherInclude, otherInclude) !stringset.Equal(otherInclude, res.OtherInclude) ||
assert.True(t, otherExclude.Equal(res.OtherExclude), "Test %d Failed: Expected: otherExclude=%+v got otherExclude=%+v", n+1, res.OtherExclude, otherExclude) !stringset.Equal(otherExclude, res.OtherExclude) {
t.Fatalf("Test %d Failed: Expected: include=%+v exclude=%+v otherInclude=%+v otherExclude=%+v got include=%+v excluive=%+v otherInclude=%+v otherExclude=%+v",
n+1, res.Include, res.Exclude, res.OtherInclude, res.OtherExclude, include, exclude, otherInclude, otherExclude)
}
} }
} }

View File

@ -1,78 +0,0 @@
// Clean Build Menu functions
package menus
import (
"context"
"io"
"os"
mapset "github.com/deckarep/golang-set/v2"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/runtime"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/text"
)
func anyExistInCache(pkgbuildDirs map[string]string) bool {
for _, dir := range pkgbuildDirs {
if _, err := os.Stat(dir); !os.IsNotExist(err) {
return true
}
}
return false
}
func CleanFn(ctx context.Context, run *runtime.Runtime, w io.Writer,
pkgbuildDirsByBase map[string]string, installed mapset.Set[string],
) error {
if len(pkgbuildDirsByBase) == 0 {
return nil // no work to do
}
if !anyExistInCache(pkgbuildDirsByBase) {
return nil
}
skipFunc := func(pkg string) bool {
dir := pkgbuildDirsByBase[pkg]
// TOFIX: new install engine dir will always exist, check if unclean instead
if _, err := os.Stat(dir); os.IsNotExist(err) {
return true
}
return false
}
bases := make([]string, 0, len(pkgbuildDirsByBase))
for pkg := range pkgbuildDirsByBase {
bases = append(bases, pkg)
}
toClean, errClean := selectionMenu(run.Logger, pkgbuildDirsByBase, bases, installed,
gotext.Get("Packages to cleanBuild?"),
settings.NoConfirm, run.Cfg.AnswerClean, skipFunc)
if errClean != nil {
return errClean
}
for i, base := range toClean {
dir := pkgbuildDirsByBase[base]
run.Logger.OperationInfoln(gotext.Get("Deleting (%d/%d): %s", i+1, len(toClean), text.Cyan(dir)))
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildGitCmd(ctx, dir, "reset", "--hard", "origin/HEAD")); err != nil {
run.Logger.Warnln(gotext.Get("Unable to clean:"), dir)
return err
}
if err := run.CmdBuilder.Show(run.CmdBuilder.BuildGitCmd(ctx, dir, "clean", "-fdx")); err != nil {
run.Logger.Warnln(gotext.Get("Unable to clean:"), dir)
return err
}
}
return nil
}

View File

@ -1,181 +0,0 @@
// file dedicated to diff menu
package menus
import (
"context"
"fmt"
"io"
"strings"
mapset "github.com/deckarep/golang-set/v2"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/multierror"
"github.com/Jguer/yay/v12/pkg/runtime"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/settings/exe"
"github.com/Jguer/yay/v12/pkg/text"
)
const (
gitEmptyTree = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"
gitDiffRefName = "AUR_SEEN"
)
func showPkgbuildDiffs(ctx context.Context, cmdBuilder exe.ICmdBuilder, logger *text.Logger,
pkgbuildDirs map[string]string, bases []string,
) error {
var errMulti multierror.MultiError
for _, pkg := range bases {
dir := pkgbuildDirs[pkg]
start, err := getLastSeenHash(ctx, cmdBuilder, dir)
if err != nil {
errMulti.Add(err)
continue
}
if start != gitEmptyTree {
hasDiff, err := gitHasDiff(ctx, cmdBuilder, dir)
if err != nil {
errMulti.Add(err)
continue
}
if !hasDiff {
logger.Warnln(gotext.Get("%s: No changes -- skipping", text.Cyan(pkg)))
continue
}
}
args := []string{
"diff",
start + "..HEAD@{upstream}", "--src-prefix",
dir + "/", "--dst-prefix", dir + "/", "--", ".", ":(exclude).SRCINFO",
}
if text.UseColor {
args = append(args, "--color=always")
} else {
args = append(args, "--color=never")
}
_ = cmdBuilder.Show(cmdBuilder.BuildGitCmd(ctx, dir, args...))
}
return errMulti.Return()
}
// Check whether or not a diff exists between the last reviewed diff and
// HEAD@{upstream}.
func gitHasDiff(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) (bool, error) {
if gitHasLastSeenRef(ctx, cmdBuilder, dir) {
stdout, stderr, err := cmdBuilder.Capture(
cmdBuilder.BuildGitCmd(ctx, dir, "rev-parse", gitDiffRefName, "HEAD@{upstream}"))
if err != nil {
return false, fmt.Errorf("%s%w", stderr, err)
}
lines := strings.Split(stdout, "\n")
lastseen := lines[0]
upstream := lines[1]
return lastseen != upstream, nil
}
// If AUR_SEEN does not exists, we have never reviewed a diff for this package
// and should display it.
return true, nil
}
// Return whether or not we have reviewed a diff yet. It checks for the existence of
// AUR_SEEN in the git ref-list.
func gitHasLastSeenRef(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) bool {
_, _, err := cmdBuilder.Capture(
cmdBuilder.BuildGitCmd(ctx,
dir, "rev-parse", "--quiet", "--verify", gitDiffRefName))
return err == nil
}
// Returns the last reviewed hash. If AUR_SEEN exists it will return this hash.
// If it does not it will return empty tree as no diff have been reviewed yet.
func getLastSeenHash(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) (string, error) {
if gitHasLastSeenRef(ctx, cmdBuilder, dir) {
stdout, stderr, err := cmdBuilder.Capture(
cmdBuilder.BuildGitCmd(ctx,
dir, "rev-parse", gitDiffRefName))
if err != nil {
return "", fmt.Errorf("%s %w", stderr, err)
}
lines := strings.Split(stdout, "\n")
return lines[0], nil
}
return gitEmptyTree, nil
}
// Update the AUR_SEEN ref to HEAD. We use this ref to determine which diff were
// reviewed by the user.
func gitUpdateSeenRef(ctx context.Context, cmdBuilder exe.ICmdBuilder, dir string) error {
_, stderr, err := cmdBuilder.Capture(
cmdBuilder.BuildGitCmd(ctx,
dir, "update-ref", gitDiffRefName, "HEAD"))
if err != nil {
return fmt.Errorf("%s %w", stderr, err)
}
return nil
}
func updatePkgbuildSeenRef(ctx context.Context, cmdBuilder exe.ICmdBuilder, pkgbuildDirs map[string]string, bases []string) error {
var errMulti multierror.MultiError
for _, pkg := range bases {
dir := pkgbuildDirs[pkg]
if err := gitUpdateSeenRef(ctx, cmdBuilder, dir); err != nil {
errMulti.Add(err)
}
}
return errMulti.Return()
}
func DiffFn(ctx context.Context, run *runtime.Runtime, w io.Writer,
pkgbuildDirsByBase map[string]string, installed mapset.Set[string],
) error {
if len(pkgbuildDirsByBase) == 0 {
return nil // no work to do
}
bases := make([]string, 0, len(pkgbuildDirsByBase))
for base := range pkgbuildDirsByBase {
bases = append(bases, base)
}
toDiff, errMenu := selectionMenu(run.Logger, pkgbuildDirsByBase, bases, installed, gotext.Get("Diffs to show?"),
settings.NoConfirm, run.Cfg.AnswerDiff, nil)
if errMenu != nil || len(toDiff) == 0 {
return errMenu
}
if errD := showPkgbuildDiffs(ctx, run.CmdBuilder, run.Logger, pkgbuildDirsByBase, toDiff); errD != nil {
return errD
}
run.Logger.Println()
if !run.Logger.ContinueTask(gotext.Get("Proceed with install?"), true, false) {
return settings.ErrUserAbort{}
}
if errUpd := updatePkgbuildSeenRef(ctx, run.CmdBuilder, pkgbuildDirsByBase, toDiff); errUpd != nil {
return errUpd
}
return nil
}

View File

@ -1,148 +0,0 @@
// edit menu
package menus
import (
"context"
"errors"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
gosrc "github.com/Morganamilo/go-srcinfo"
mapset "github.com/deckarep/golang-set/v2"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/runtime"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/text"
)
// Editor returns the preferred system editor.
func editor(log *text.Logger, editorConfig, editorFlags string, noConfirm bool) (editor string, args []string) {
switch {
case editorConfig != "":
editor, err := exec.LookPath(editorConfig)
if err != nil {
log.Errorln(err)
} else {
return editor, strings.Fields(editorFlags)
}
fallthrough
case os.Getenv("VISUAL") != "":
if editorArgs := strings.Fields(os.Getenv("VISUAL")); len(editorArgs) != 0 {
editor, err := exec.LookPath(editorArgs[0])
if err != nil {
log.Errorln(err)
} else {
return editor, editorArgs[1:]
}
}
fallthrough
case os.Getenv("EDITOR") != "":
if editorArgs := strings.Fields(os.Getenv("EDITOR")); len(editorArgs) != 0 {
editor, err := exec.LookPath(editorArgs[0])
if err != nil {
log.Errorln(err)
} else {
return editor, editorArgs[1:]
}
}
fallthrough
default:
log.Errorln("\n", gotext.Get("%s is not set", text.Bold(text.Cyan("$EDITOR"))))
log.Warnln(gotext.Get("Add %s or %s to your environment variables", text.Bold(text.Cyan("$EDITOR")), text.Bold(text.Cyan("$VISUAL"))))
for {
log.Infoln(gotext.Get("Edit PKGBUILD with?"))
editorInput, err := log.GetInput("", noConfirm)
if err != nil {
log.Errorln(err)
continue
}
editorArgs := strings.Fields(editorInput)
if len(editorArgs) == 0 {
continue
}
editor, err := exec.LookPath(editorArgs[0])
if err != nil {
log.Errorln(err)
continue
}
return editor, editorArgs[1:]
}
}
}
func editPkgbuilds(log *text.Logger, pkgbuildDirs map[string]string, bases []string, editorConfig,
editorFlags string, srcinfos map[string]*gosrc.Srcinfo, noConfirm bool,
) error {
pkgbuilds := make([]string, 0, len(bases))
for _, pkg := range bases {
dir := pkgbuildDirs[pkg]
pkgbuilds = append(pkgbuilds, filepath.Join(dir, "PKGBUILD"))
if srcinfos != nil {
for _, splitPkg := range srcinfos[pkg].SplitPackages() {
if splitPkg.Install != "" {
pkgbuilds = append(pkgbuilds, filepath.Join(dir, splitPkg.Install))
}
}
}
}
if len(pkgbuilds) > 0 {
editor, editorArgs := editor(log, editorConfig, editorFlags, noConfirm)
editorArgs = append(editorArgs, pkgbuilds...)
editcmd := exec.Command(editor, editorArgs...)
editcmd.Stdin, editcmd.Stdout, editcmd.Stderr = os.Stdin, os.Stdout, os.Stderr
if err := editcmd.Run(); err != nil {
return errors.New(gotext.Get("editor did not exit successfully, aborting: %s", err))
}
}
return nil
}
func EditFn(ctx context.Context, run *runtime.Runtime, w io.Writer,
pkgbuildDirsByBase map[string]string, installed mapset.Set[string],
) error {
if len(pkgbuildDirsByBase) == 0 {
return nil // no work to do
}
bases := make([]string, 0, len(pkgbuildDirsByBase))
for pkg := range pkgbuildDirsByBase {
bases = append(bases, pkg)
}
toEdit, errMenu := selectionMenu(run.Logger, pkgbuildDirsByBase, bases, installed,
gotext.Get("PKGBUILDs to edit?"), settings.NoConfirm, run.Cfg.AnswerEdit, nil)
if errMenu != nil || len(toEdit) == 0 {
return errMenu
}
// TOFIX: remove or use srcinfo data
if errEdit := editPkgbuilds(run.Logger, pkgbuildDirsByBase,
toEdit, run.Cfg.Editor, run.Cfg.EditorFlags, nil, settings.NoConfirm); errEdit != nil {
return errEdit
}
run.Logger.Println()
if !run.Logger.ContinueTask(gotext.Get("Proceed with install?"), true, false) {
return settings.ErrUserAbort{}
}
return nil
}

View File

@ -1,103 +0,0 @@
package menus
import (
"fmt"
"os"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/intrange"
"github.com/Jguer/yay/v12/pkg/settings"
"github.com/Jguer/yay/v12/pkg/text"
mapset "github.com/deckarep/golang-set/v2"
)
func pkgbuildNumberMenu(logger *text.Logger, pkgbuildDirs map[string]string,
bases []string, installed mapset.Set[string],
) {
toPrint := ""
for n, pkgBase := range bases {
dir := pkgbuildDirs[pkgBase]
toPrint += fmt.Sprintf(text.Magenta("%3d")+" %-40s", len(pkgbuildDirs)-n,
text.Bold(pkgBase))
if installed.Contains(pkgBase) {
toPrint += text.Bold(text.Green(gotext.Get(" (Installed)")))
}
// TODO: remove or refactor to check if git dir is unclean
if _, err := os.Stat(dir); !os.IsNotExist(err) {
toPrint += text.Bold(text.Green(gotext.Get(" (Build Files Exist)")))
}
toPrint += "\n"
}
logger.Print(toPrint)
}
func selectionMenu(logger *text.Logger, pkgbuildDirs map[string]string, bases []string, installed mapset.Set[string],
message string, noConfirm bool, defaultAnswer string, skipFunc func(string) bool,
) ([]string, error) {
selected := make([]string, 0)
pkgbuildNumberMenu(logger, pkgbuildDirs, bases, installed)
logger.Infoln(message)
logger.Infoln(gotext.Get("%s [A]ll [Ab]ort [I]nstalled [No]tInstalled or (1 2 3, 1-3, ^4)", text.Cyan(gotext.Get("[N]one"))))
selectInput, err := logger.GetInput(defaultAnswer, noConfirm)
if err != nil {
return nil, err
}
eInclude, eExclude, eOtherInclude, eOtherExclude := intrange.ParseNumberMenu(selectInput)
eIsInclude := len(eExclude) == 0 && eOtherExclude.Cardinality() == 0
if eOtherInclude.Contains("abort") || eOtherInclude.Contains("ab") {
return nil, settings.ErrUserAbort{}
}
if eOtherInclude.Contains("n") || eOtherInclude.Contains("none") {
return selected, nil
}
for i, pkgBase := range bases {
if skipFunc != nil && skipFunc(pkgBase) {
continue
}
anyInstalled := installed.Contains(pkgBase)
if !eIsInclude && eExclude.Get(len(bases)-i) {
continue
}
if anyInstalled && (eOtherInclude.Contains("i") || eOtherInclude.Contains("installed")) {
selected = append(selected, pkgBase)
continue
}
if !anyInstalled && (eOtherInclude.Contains("no") || eOtherInclude.Contains("notinstalled")) {
selected = append(selected, pkgBase)
continue
}
if eOtherInclude.Contains("a") || eOtherInclude.Contains("all") {
selected = append(selected, pkgBase)
continue
}
if eIsInclude && (eInclude.Get(len(bases)-i) || eOtherInclude.Contains(pkgBase)) {
selected = append(selected, pkgBase)
}
if !eIsInclude && (!eExclude.Get(len(bases)-i) && !eOtherExclude.Contains(pkgBase)) {
selected = append(selected, pkgBase)
}
}
return selected, nil
}

View File

@ -4,13 +4,16 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/xml" "encoding/xml"
"fmt"
"html" "html"
"io" "io"
"net/http" "net/http"
"os"
"strings" "strings"
"time" "time"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/settings"
"github.com/Jguer/yay/v11/pkg/text"
) )
type item struct { type item struct {
@ -21,13 +24,13 @@ type item struct {
Creator string `xml:"dc:creator"` Creator string `xml:"dc:creator"`
} }
func (item *item) printNews(logger *text.Logger, buildTime time.Time, all, quiet bool) { func (item *item) print(buildTime time.Time, all, quiet bool) {
var fd string var fd string
date, err := time.Parse(time.RFC1123Z, item.PubDate) date, err := time.Parse(time.RFC1123Z, item.PubDate)
if err != nil { if err != nil {
logger.Errorln(err) fmt.Fprintln(os.Stderr, err)
} else { } else {
fd = text.FormatTime(int(date.Unix())) fd = text.FormatTime(int(date.Unix()))
if !all && !buildTime.IsZero() { if !all && !buildTime.IsZero() {
@ -37,11 +40,11 @@ func (item *item) printNews(logger *text.Logger, buildTime time.Time, all, quiet
} }
} }
logger.Println(text.Bold(text.Magenta(fd)), text.Bold(strings.TrimSpace(item.Title))) fmt.Println(text.Bold(text.Magenta(fd)), text.Bold(strings.TrimSpace(item.Title)))
if !quiet { if !quiet {
desc := strings.TrimSpace(parseNews(item.Description)) desc := strings.TrimSpace(parseNews(item.Description))
logger.Println(desc) fmt.Println(desc)
} }
} }
@ -58,10 +61,8 @@ type rss struct {
Channel channel `xml:"channel"` Channel channel `xml:"channel"`
} }
func PrintNewsFeed(ctx context.Context, client *http.Client, logger *text.Logger, func PrintNewsFeed(ctx context.Context, client *http.Client, cutOffDate time.Time, sortMode int, all, quiet bool) error {
cutOffDate time.Time, bottomUp, all, quiet bool, req, err := http.NewRequestWithContext(ctx, "GET", "https://archlinux.org/feeds/news", nil)
) error {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, "https://archlinux.org/feeds/news", http.NoBody)
if err != nil { if err != nil {
return err return err
} }
@ -85,13 +86,13 @@ func PrintNewsFeed(ctx context.Context, client *http.Client, logger *text.Logger
return err return err
} }
if bottomUp { if sortMode == settings.BottomUp {
for i := len(rssGot.Channel.Items) - 1; i >= 0; i-- { for i := len(rssGot.Channel.Items) - 1; i >= 0; i-- {
rssGot.Channel.Items[i].printNews(logger, cutOffDate, all, quiet) rssGot.Channel.Items[i].print(cutOffDate, all, quiet)
} }
} else { } else {
for i := 0; i < len(rssGot.Channel.Items); i++ { for i := 0; i < len(rssGot.Channel.Items); i++ {
rssGot.Channel.Items[i].printNews(logger, cutOffDate, all, quiet) rssGot.Channel.Items[i].print(cutOffDate, all, quiet)
} }
} }

View File

@ -1,6 +1,3 @@
//go:build !integration
// +build !integration
package news package news
import ( import (
@ -8,15 +5,12 @@ import (
"io" "io"
"net/http" "net/http"
"os" "os"
"strings"
"testing" "testing"
"time" "time"
"github.com/bradleyjkemp/cupaloy" "github.com/bradleyjkemp/cupaloy"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"gopkg.in/h2non/gock.v1" "gopkg.in/h2non/gock.v1"
"github.com/Jguer/yay/v12/pkg/text"
) )
const lastNews = ` const lastNews = `
@ -113,7 +107,7 @@ func TestPrintNewsFeed(t *testing.T) {
type args struct { type args struct {
cutOffDate time.Time cutOffDate time.Time
bottomUp bool sortMode int
all bool all bool
quiet bool quiet bool
} }
@ -122,12 +116,11 @@ func TestPrintNewsFeed(t *testing.T) {
args args args args
wantErr bool wantErr bool
}{ }{
{name: "all-verbose", args: args{bottomUp: true, cutOffDate: time.Now(), all: true, quiet: false}, wantErr: false}, {name: "all-verbose", args: args{cutOffDate: time.Now(), all: true, quiet: false}, wantErr: false},
{name: "all-quiet", args: args{bottomUp: true, cutOffDate: lastNewsTime, all: true, quiet: true}, wantErr: false}, {name: "all-quiet", args: args{cutOffDate: lastNewsTime, all: true, quiet: true}, wantErr: false},
{name: "latest-quiet", args: args{bottomUp: true, cutOffDate: lastNewsTime, all: false, quiet: true}, wantErr: false}, {name: "latest-quiet", args: args{cutOffDate: lastNewsTime, all: false, quiet: true}, wantErr: false},
{name: "latest-quiet-topdown", args: args{bottomUp: false, cutOffDate: lastNewsTime, all: false, quiet: true}, wantErr: false}, {name: "latest-quiet-topdown", args: args{sortMode: 1, cutOffDate: lastNewsTime, all: false, quiet: true}, wantErr: false},
} }
t.Setenv("TZ", "UTC")
for _, tt := range tests { for _, tt := range tests {
tt := tt tt := tt
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
@ -138,16 +131,17 @@ func TestPrintNewsFeed(t *testing.T) {
defer gock.Off() defer gock.Off()
rescueStdout := os.Stdout
r, w, _ := os.Pipe() r, w, _ := os.Pipe()
logger := text.NewLogger(w, w, strings.NewReader(""), false, "logger") os.Stdout = w
err := PrintNewsFeed(context.Background(), &http.Client{}, logger, err := PrintNewsFeed(context.TODO(), &http.Client{}, tt.args.cutOffDate, tt.args.sortMode, tt.args.all, tt.args.quiet)
tt.args.cutOffDate, tt.args.bottomUp, tt.args.all, tt.args.quiet)
assert.NoError(t, err) assert.NoError(t, err)
w.Close() w.Close()
out, _ := io.ReadAll(r) out, _ := io.ReadAll(r)
cupaloy.SnapshotT(t, out) cupaloy.SnapshotT(t, out)
os.Stdout = rescueStdout
}) })
} }
} }
@ -166,14 +160,15 @@ func TestPrintNewsFeedSameDay(t *testing.T) {
defer gock.Off() defer gock.Off()
rescueStdout := os.Stdout
r, w, _ := os.Pipe() r, w, _ := os.Pipe()
logger := text.NewLogger(w, w, strings.NewReader(""), false, "logger") os.Stdout = w
err := PrintNewsFeed(context.Background(), &http.Client{}, logger, err := PrintNewsFeed(context.TODO(), &http.Client{}, lastNewsTime, 0, false, false)
lastNewsTime, true, false, false)
assert.NoError(t, err) assert.NoError(t, err)
w.Close() w.Close()
out, _ := io.ReadAll(r) out, _ := io.ReadAll(r)
cupaloy.SnapshotT(t, out) cupaloy.SnapshotT(t, out)
os.Stdout = rescueStdout
} }

View File

@ -0,0 +1,5 @@
 -> ABAF11C65A2970B130ABE3C479BE3E4300411886, required by: dummy-1 (dummy-1 dummy-2)
:: Importing keys with gpg...
:: PGP keys need importing:

View File

@ -0,0 +1,5 @@
 -> 487EACC08557AD082088DABA1EB2638FF56C0C53, required by: cower
:: Importing keys with gpg...
:: PGP keys need importing:

View File

@ -0,0 +1,5 @@
 -> C52048C0C0748FEE227D47A2702353E0F7E48EDB, required by: dummy-3
:: Importing keys with gpg...
:: PGP keys need importing:

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,6 @@
 -> 11E521D646982372EB577A1F8F0871F202119294, required by: libc++
 -> B6C8F98282B944E3B0D5C2530FC3042E345AD05D, required by: libc++
:: Importing keys with gpg...
:: PGP keys need importing:

View File

@ -2,21 +2,22 @@ package pgp
import ( import (
"bytes" "bytes"
"context"
"errors" "errors"
"fmt"
"os"
"os/exec" "os/exec"
"strings" "strings"
gosrc "github.com/Morganamilo/go-srcinfo" gosrc "github.com/Morganamilo/go-srcinfo"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/settings/exe" "github.com/Jguer/yay/v11/pkg/dep"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/text"
) )
// pgpKeySet maps a PGP key with a list of PKGBUILDs that require it. // pgpKeySet maps a PGP key with a list of PKGBUILDs that require it.
// This is similar to stringSet, used throughout the code. // This is similar to stringSet, used throughout the code.
type pgpKeySet map[string][]string type pgpKeySet map[string][]dep.Base
func (set pgpKeySet) toSlice() []string { func (set pgpKeySet) toSlice() []string {
slice := make([]string, 0, len(set)) slice := make([]string, 0, len(set))
@ -27,7 +28,7 @@ func (set pgpKeySet) toSlice() []string {
return slice return slice
} }
func (set pgpKeySet) set(key, p string) { func (set pgpKeySet) set(key string, p dep.Base) {
// Using ToUpper to make sure keys with a different case will be // Using ToUpper to make sure keys with a different case will be
// considered the same. // considered the same.
upperKey := strings.ToUpper(key) upperKey := strings.ToUpper(key)
@ -41,62 +42,65 @@ func (set pgpKeySet) get(key string) bool {
return exists return exists
} }
type GPGCmdBuilder interface {
exe.Runner
BuildGPGCmd(ctx context.Context, extraArgs ...string) *exec.Cmd
}
// CheckPgpKeys iterates through the keys listed in the PKGBUILDs and if needed, // CheckPgpKeys iterates through the keys listed in the PKGBUILDs and if needed,
// asks the user whether yay should try to import them. // asks the user whether yay should try to import them.
func CheckPgpKeys(ctx context.Context, logger *text.Logger, pkgbuildDirsByBase map[string]string, srcinfos map[string]*gosrc.Srcinfo, func CheckPgpKeys(bases []dep.Base, srcinfos map[string]*gosrc.Srcinfo,
cmdBuilder GPGCmdBuilder, noConfirm bool, gpgBin, gpgFlags string, noConfirm bool) error {
) ([]string, error) {
// Let's check the keys individually, and then we can offer to import // Let's check the keys individually, and then we can offer to import
// the problematic ones. // the problematic ones.
problematic := make(pgpKeySet) problematic := make(pgpKeySet)
args := append(strings.Fields(gpgFlags), "--list-keys")
// Mapping all the keys. // Mapping all the keys.
for pkg := range pkgbuildDirsByBase { for _, base := range bases {
pkg := base.Pkgbase()
srcinfo := srcinfos[pkg] srcinfo := srcinfos[pkg]
for _, key := range srcinfo.ValidPGPKeys { for _, key := range srcinfo.ValidPGPKeys {
// If key already marked as problematic, indicate the current // If key already marked as problematic, indicate the current
// PKGBUILD requires it. // PKGBUILD requires it.
if problematic.get(key) { if problematic.get(key) {
problematic.set(key, pkg) problematic.set(key, base)
continue continue
} }
if err := cmdBuilder.Show(cmdBuilder.BuildGPGCmd(ctx, "--list-keys", key)); err != nil { cmd := exec.Command(gpgBin, append(args, key)...)
problematic.set(key, pkg) if err := cmd.Run(); err != nil {
problematic.set(key, base)
} }
} }
} }
// No key issues! // No key issues!
if len(problematic) == 0 { if len(problematic) == 0 {
return []string{}, nil return nil
} }
str, err := formatKeysToImport(logger, problematic) str, err := formatKeysToImport(problematic)
if err != nil { if err != nil {
return nil, err return err
} }
logger.Println("\n", str) fmt.Println()
fmt.Println(str)
if logger.ContinueTask(gotext.Get("Import?"), true, noConfirm) { if text.ContinueTask(gotext.Get("Import?"), true, noConfirm) {
return problematic.toSlice(), importKeys(ctx, logger, cmdBuilder, problematic.toSlice()) return importKeys(problematic.toSlice(), gpgBin, gpgFlags)
} }
return problematic.toSlice(), nil return nil
} }
// importKeys tries to import the list of keys specified in its argument. // importKeys tries to import the list of keys specified in its argument.
func importKeys(ctx context.Context, logger *text.Logger, cmdBuilder GPGCmdBuilder, keys []string) error { func importKeys(keys []string, gpgBin, gpgFlags string) error {
logger.OperationInfoln(gotext.Get("Importing keys with gpg...")) args := append(strings.Fields(gpgFlags), "--recv-keys")
cmd := exec.Command(gpgBin, append(args, keys...)...)
cmd.Stdin, cmd.Stdout, cmd.Stderr = os.Stdin, os.Stdout, os.Stderr
if err := cmdBuilder.Show(cmdBuilder.BuildGPGCmd(ctx, append([]string{"--recv-keys"}, keys...)...)); err != nil { text.OperationInfoln(gotext.Get("Importing keys with gpg..."))
if err := cmd.Run(); err != nil {
return errors.New(gotext.Get("problem importing keys")) return errors.New(gotext.Get("problem importing keys"))
} }
@ -105,23 +109,23 @@ func importKeys(ctx context.Context, logger *text.Logger, cmdBuilder GPGCmdBuild
// formatKeysToImport receives a set of keys and returns a string containing the // formatKeysToImport receives a set of keys and returns a string containing the
// question asking the user wants to import the problematic keys. // question asking the user wants to import the problematic keys.
func formatKeysToImport(logger *text.Logger, keys pgpKeySet) (string, error) { func formatKeysToImport(keys pgpKeySet) (string, error) {
if len(keys) == 0 { if len(keys) == 0 {
return "", errors.New(gotext.Get("no keys to import")) return "", errors.New(gotext.Get("no keys to import"))
} }
var buffer bytes.Buffer var buffer bytes.Buffer
buffer.WriteString(logger.SprintOperationInfo(gotext.Get("PGP keys need importing:"))) buffer.WriteString(text.SprintOperationInfo(gotext.Get("PGP keys need importing:")))
for key, bases := range keys { for key, bases := range keys {
pkglist := "" pkglist := ""
for _, base := range bases { for _, base := range bases {
pkglist += base + " " pkglist += base.String() + " "
} }
pkglist = strings.TrimRight(pkglist, " ") pkglist = strings.TrimRight(pkglist, " ")
buffer.WriteString("\n" + logger.SprintWarn(gotext.Get("%s, required by: %s", text.Cyan(key), text.Cyan(pkglist)))) buffer.WriteString("\n" + text.SprintWarn(gotext.Get("%s, required by: %s", text.Cyan(key), text.Cyan(pkglist))))
} }
return buffer.String(), nil return buffer.String(), nil

273
pkg/pgp/keys_test.go Normal file
View File

@ -0,0 +1,273 @@
package pgp
import (
"bytes"
"context"
"fmt"
"io"
"net/http"
"os"
"path"
"regexp"
"sort"
"strings"
"testing"
aur "github.com/Jguer/aur"
gosrc "github.com/Morganamilo/go-srcinfo"
"github.com/bradleyjkemp/cupaloy"
"github.com/Jguer/yay/v11/pkg/dep"
)
const (
// The default port used by the PGP key server.
gpgServerPort = 11371
)
func init() {
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
regex := regexp.MustCompile(`search=0[xX]([a-fA-F0-9]+)`)
matches := regex.FindStringSubmatch(r.RequestURI)
data := ""
if matches != nil {
data = getPgpKey(matches[1])
}
w.Header().Set("Content-Type", "application/pgp-keys")
_, err := w.Write([]byte(data))
if err != nil {
fmt.Fprintln(os.Stderr, err)
}
})
}
func newPkg(basename string) *aur.Pkg {
return &aur.Pkg{Name: basename, PackageBase: basename}
}
func getPgpKey(key string) string {
var buffer bytes.Buffer
if contents, err := os.ReadFile(path.Join("testdata", key)); err == nil {
buffer.WriteString("-----BEGIN PGP PUBLIC KEY BLOCK-----\n")
buffer.WriteString("Version: SKS 1.1.6\n")
buffer.WriteString("Comment: Hostname: yay\n\n")
buffer.Write(contents)
buffer.WriteString("\n-----END PGP PUBLIC KEY BLOCK-----\n")
}
return buffer.String()
}
func startPgpKeyServer() *http.Server {
srv := &http.Server{Addr: fmt.Sprintf("127.0.0.1:%d", gpgServerPort)}
go func() {
err := srv.ListenAndServe()
if err != nil {
fmt.Fprintln(os.Stderr, err)
}
}()
return srv
}
func TestImportKeys(t *testing.T) {
keyringDir, err := os.MkdirTemp("/tmp", "yay-test-keyring")
if err != nil {
t.Fatalf("Unable to init test keyring %q: %v\n", keyringDir, err)
}
defer os.RemoveAll(keyringDir)
server := startPgpKeyServer()
defer func() {
err := server.Shutdown(context.TODO())
if err != nil {
fmt.Fprintln(os.Stderr, err)
}
}()
casetests := []struct {
keys []string
wantError bool
}{
// Single key, should succeed.
// C52048C0C0748FEE227D47A2702353E0F7E48EDB: Thomas Dickey.
{
keys: []string{"C52048C0C0748FEE227D47A2702353E0F7E48EDB"},
wantError: false,
},
// Two keys, should succeed as well.
// 11E521D646982372EB577A1F8F0871F202119294: Tom Stellard.
// B6C8F98282B944E3B0D5C2530FC3042E345AD05D: Hans Wennborg.
{
keys: []string{
"11E521D646982372EB577A1F8F0871F202119294",
"B6C8F98282B944E3B0D5C2530FC3042E345AD05D",
},
wantError: false,
},
// Single invalid key, should fail.
{
keys: []string{"THIS-SHOULD-FAIL"},
wantError: true,
},
// Two invalid keys, should fail.
{
keys: []string{"THIS-SHOULD-FAIL", "THIS-ONE-SHOULD-FAIL-TOO"},
wantError: true,
},
// Invalid + valid key. Should fail as well.
// 647F28654894E3BD457199BE38DBBDC86092693E: Greg Kroah-Hartman.
{
keys: []string{
"THIS-SHOULD-FAIL",
"647F28654894E3BD457199BE38DBBDC86092693E",
},
wantError: true,
},
}
for _, tt := range casetests {
err := importKeys(tt.keys, "gpg", fmt.Sprintf("--homedir %s --keyserver 127.0.0.1", keyringDir))
if !tt.wantError {
if err != nil {
t.Fatalf("Got error %q, want no error", err)
}
continue
}
// Here, we want to see the error.
if err == nil {
t.Fatalf("Got no error; want error")
}
}
}
func makeSrcinfo(pkgbase string, pgpkeys ...string) *gosrc.Srcinfo {
srcinfo := gosrc.Srcinfo{}
srcinfo.Pkgbase = pkgbase
srcinfo.ValidPGPKeys = pgpkeys
return &srcinfo
}
func TestCheckPgpKeys(t *testing.T) {
keyringDir, err := os.MkdirTemp("/tmp", "yay-test-keyring")
if err != nil {
t.Fatalf("Unable to init test keyring: %v\n", err)
}
defer os.RemoveAll(keyringDir)
server := startPgpKeyServer()
defer func() {
err := server.Shutdown(context.TODO())
if err != nil {
fmt.Fprintln(os.Stderr, err)
}
}()
casetests := []struct {
name string
pkgs dep.Base
srcinfos map[string]*gosrc.Srcinfo
wantError bool
}{
// cower: single package, one valid key not yet in the keyring.
// 487EACC08557AD082088DABA1EB2638FF56C0C53: Dave Reisner.
{
name: " one valid key not yet in the keyring",
pkgs: dep.Base{newPkg("cower")},
srcinfos: map[string]*gosrc.Srcinfo{"cower": makeSrcinfo("cower", "487EACC08557AD082088DABA1EB2638FF56C0C53")},
wantError: false,
},
// libc++: single package, two valid keys not yet in the keyring.
// 11E521D646982372EB577A1F8F0871F202119294: Tom Stellard.
// B6C8F98282B944E3B0D5C2530FC3042E345AD05D: Hans Wennborg.
{
name: "two valid keys not yet in the keyring",
pkgs: dep.Base{newPkg("libc++")},
srcinfos: map[string]*gosrc.Srcinfo{
"libc++": makeSrcinfo("libc++", "11E521D646982372EB577A1F8F0871F202119294", "B6C8F98282B944E3B0D5C2530FC3042E345AD05D"),
},
wantError: false,
},
// Two dummy packages requiring the same key.
// ABAF11C65A2970B130ABE3C479BE3E4300411886: Linus Torvalds.
{
name: "Two dummy packages requiring the same key",
pkgs: dep.Base{newPkg("dummy-1"), newPkg("dummy-2")},
srcinfos: map[string]*gosrc.Srcinfo{
"dummy-1": makeSrcinfo("dummy-1",
"ABAF11C65A2970B130ABE3C479BE3E4300411886"),
"dummy-2": makeSrcinfo("dummy-2", "ABAF11C65A2970B130ABE3C479BE3E4300411886"),
},
wantError: false,
},
// dummy package: single package, two valid keys, one of them already
// in the keyring.
// 11E521D646982372EB577A1F8F0871F202119294: Tom Stellard.
// C52048C0C0748FEE227D47A2702353E0F7E48EDB: Thomas Dickey.
{
name: "one already in keyring",
pkgs: dep.Base{newPkg("dummy-3")},
srcinfos: map[string]*gosrc.Srcinfo{
"dummy-3": makeSrcinfo("dummy-3", "11E521D646982372EB577A1F8F0871F202119294", "C52048C0C0748FEE227D47A2702353E0F7E48EDB"),
},
wantError: false,
},
// Two dummy packages with existing keys.
{
name: "two existing",
pkgs: dep.Base{newPkg("dummy-4"), newPkg("dummy-5")},
srcinfos: map[string]*gosrc.Srcinfo{
"dummy-4": makeSrcinfo("dummy-4", "11E521D646982372EB577A1F8F0871F202119294"),
"dummy-5": makeSrcinfo("dummy-5", "C52048C0C0748FEE227D47A2702353E0F7E48EDB"),
},
wantError: false,
},
// Dummy package with invalid key, should fail.
{
name: "one invalid",
pkgs: dep.Base{newPkg("dummy-7")},
srcinfos: map[string]*gosrc.Srcinfo{"dummy-7": makeSrcinfo("dummy-7", "THIS-SHOULD-FAIL")},
wantError: true,
},
// Dummy package with both an invalid an another valid key, should fail.
// A314827C4E4250A204CE6E13284FC34C8E4B1A25: Thomas Bächler.
{
name: "one invalid, one valid",
pkgs: dep.Base{newPkg("dummy-8")},
srcinfos: map[string]*gosrc.Srcinfo{"dummy-8": makeSrcinfo("dummy-8", "A314827C4E4250A204CE6E13284FC34C8E4B1A25", "THIS-SHOULD-FAIL")},
wantError: true,
},
}
for _, tt := range casetests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
rescueStdout := os.Stdout
r, w, _ := os.Pipe()
os.Stdout = w
err := CheckPgpKeys([]dep.Base{tt.pkgs}, tt.srcinfos, "gpg",
fmt.Sprintf("--homedir %s --keyserver 127.0.0.1", keyringDir), true)
if !tt.wantError {
if err != nil {
t.Fatalf("Got error %q, want no error", err)
}
w.Close()
out, _ := io.ReadAll(r)
os.Stdout = rescueStdout
splitLines := strings.Split(string(out), "\n")
sort.Strings(splitLines)
cupaloy.SnapshotT(t, strings.Join(splitLines, "\n"))
return
}
// Here, we want to see the error.
if err == nil {
t.Fatalf("Got no error; want error")
}
})
}
}

100
pkg/query/aur_info.go Normal file
View File

@ -0,0 +1,100 @@
package query
import (
"context"
"sync"
"github.com/Jguer/aur"
"github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v11/pkg/intrange"
"github.com/Jguer/yay/v11/pkg/multierror"
"github.com/Jguer/yay/v11/pkg/text"
)
type Pkg = aur.Pkg
// Queries the aur for information about specified packages.
// All packages should be queried in a single aur request except when the number
// of packages exceeds the number set in config.RequestSplitN.
// If the number does exceed config.RequestSplitN multiple aur requests will be
// performed concurrently.
func AURInfo(ctx context.Context, aurClient *aur.Client, names []string, warnings *AURWarnings, splitN int) ([]*Pkg, error) {
info := make([]*Pkg, 0, len(names))
seen := make(map[string]int)
var (
mux sync.Mutex
wg sync.WaitGroup
errs multierror.MultiError
)
makeRequest := func(n, max int) {
defer wg.Done()
tempInfo, requestErr := aurClient.Info(ctx, names[n:max])
if requestErr != nil {
errs.Add(requestErr)
return
}
mux.Lock()
for i := range tempInfo {
info = append(info, &tempInfo[i])
}
mux.Unlock()
}
for n := 0; n < len(names); n += splitN {
max := intrange.Min(len(names), n+splitN)
wg.Add(1)
go makeRequest(n, max)
}
wg.Wait()
if err := errs.Return(); err != nil {
return info, err
}
for k, pkg := range info {
seen[pkg.Name] = k
}
for _, name := range names {
i, ok := seen[name]
if !ok && !warnings.Ignore.Get(name) {
warnings.Missing = append(warnings.Missing, name)
continue
}
pkg := info[i]
if pkg.Maintainer == "" && !warnings.Ignore.Get(name) {
warnings.Orphans = append(warnings.Orphans, name)
}
if pkg.OutOfDate != 0 && !warnings.Ignore.Get(name) {
warnings.OutOfDate = append(warnings.OutOfDate, name)
}
}
return info, nil
}
func AURInfoPrint(ctx context.Context, aurClient *aur.Client, names []string, splitN int) ([]*Pkg, error) {
text.OperationInfoln(gotext.Get("Querying AUR..."))
warnings := &AURWarnings{}
info, err := AURInfo(ctx, aurClient, names, warnings, splitN)
if err != nil {
return info, err
}
warnings.Print()
return info, nil
}

View File

@ -1,92 +1,47 @@
package query package query
import ( import (
"fmt"
"strings" "strings"
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/aur" "github.com/Jguer/yay/v11/pkg/stringset"
"github.com/Jguer/go-alpm/v2" "github.com/Jguer/yay/v11/pkg/text"
"github.com/Jguer/yay/v12/pkg/db"
"github.com/Jguer/yay/v12/pkg/text"
) )
type AURWarnings struct { type AURWarnings struct {
Orphans []string Orphans []string
OutOfDate []string OutOfDate []string
Missing []string Missing []string
LocalNewer []string Ignore stringset.StringSet
log *text.Logger
} }
func NewWarnings(logger *text.Logger) *AURWarnings { func NewWarnings() *AURWarnings {
return &AURWarnings{log: logger} return &AURWarnings{Ignore: make(stringset.StringSet)}
}
func (warnings *AURWarnings) AddToWarnings(remote map[string]alpm.IPackage, aurPkg *aur.Pkg) {
name := aurPkg.Name
pkg, ok := remote[name]
if !ok {
return
}
if aurPkg.Maintainer == "" && !pkg.ShouldIgnore() {
warnings.Orphans = append(warnings.Orphans, name)
}
if aurPkg.OutOfDate != 0 && !pkg.ShouldIgnore() {
warnings.OutOfDate = append(warnings.OutOfDate, name)
}
if !pkg.ShouldIgnore() && !isDevelPackage(pkg) && db.VerCmp(pkg.Version(), aurPkg.Version) > 0 {
left, right := GetVersionDiff(pkg.Version(), aurPkg.Version)
newerMsg := gotext.Get("%s: local (%s) is newer than AUR (%s)",
text.Cyan(name),
left, right,
)
warnings.LocalNewer = append(warnings.LocalNewer, newerMsg)
}
}
func (warnings *AURWarnings) CalculateMissing(remoteNames []string,
remote map[string]alpm.IPackage, aurData map[string]*aur.Pkg,
) {
for _, name := range remoteNames {
if _, ok := aurData[name]; !ok && !remote[name].ShouldIgnore() {
if _, ok := aurData[strings.TrimSuffix(name, "-debug")]; !ok {
warnings.Missing = append(warnings.Missing, name)
}
}
}
} }
func (warnings *AURWarnings) Print() { func (warnings *AURWarnings) Print() {
normalMissing, debugMissing := filterDebugPkgs(warnings.Missing) normalMissing, debugMissing := filterDebugPkgs(warnings.Missing)
if len(normalMissing) > 0 { if len(normalMissing) > 0 {
warnings.log.Warnln(gotext.Get("Packages not in AUR:"), formatNames(normalMissing)) text.Warn(gotext.Get("Missing AUR Packages:"))
printRange(normalMissing)
} }
if len(debugMissing) > 0 { if len(debugMissing) > 0 {
warnings.log.Warnln(gotext.Get("Missing AUR Debug Packages:"), formatNames(debugMissing)) text.Warn(gotext.Get("Missing AUR Debug Packages:"))
printRange(debugMissing)
} }
if len(warnings.Orphans) > 0 { if len(warnings.Orphans) > 0 {
warnings.log.Warnln(gotext.Get("Orphan (unmaintained) AUR Packages:"), formatNames(warnings.Orphans)) text.Warn(gotext.Get("Orphaned AUR Packages:"))
printRange(warnings.Orphans)
} }
if len(warnings.OutOfDate) > 0 { if len(warnings.OutOfDate) > 0 {
warnings.log.Warnln(gotext.Get("Flagged Out Of Date AUR Packages:"), formatNames(warnings.OutOfDate)) text.Warn(gotext.Get("Flagged Out Of Date AUR Packages:"))
} printRange(warnings.OutOfDate)
if len(warnings.LocalNewer) > 0 {
for _, newer := range warnings.LocalNewer {
warnings.log.Warnln(newer)
}
} }
} }
@ -105,6 +60,10 @@ func filterDebugPkgs(names []string) (normal, debug []string) {
return return
} }
func formatNames(names []string) string { func printRange(names []string) {
return " " + text.Cyan(strings.Join(names, " ")) for _, name := range names {
fmt.Print(" " + text.Cyan(name))
}
fmt.Println()
} }

View File

@ -1,21 +0,0 @@
package query
import (
"github.com/leonelquinteros/gotext"
)
// ErrAURSearch means that it was not possible to connect to the AUR.
type ErrAURSearch struct {
inner error
}
func (e ErrAURSearch) Error() string {
return gotext.Get("Error during AUR search: %s\n", e.inner.Error())
}
// ErrNoQuery means that query was not executed.
type ErrNoQuery struct{}
func (e ErrNoQuery) Error() string {
return gotext.Get("no query was executed")
}

View File

@ -3,23 +3,53 @@ package query
import ( import (
"github.com/leonelquinteros/gotext" "github.com/leonelquinteros/gotext"
"github.com/Jguer/yay/v12/pkg/settings/parser" "github.com/Jguer/yay/v11/pkg/db"
"github.com/Jguer/yay/v12/pkg/text" "github.com/Jguer/yay/v11/pkg/settings/parser"
"github.com/Jguer/yay/v11/pkg/text"
) )
func RemoveInvalidTargets(logger *text.Logger, targets []string, mode parser.TargetMode) []string { // GetPackageNamesBySource returns package names with and without correspondence in SyncDBS respectively.
func GetPackageNamesBySource(dbExecutor db.Executor) (local, remote []string, err error) {
for _, localpkg := range dbExecutor.LocalPackages() {
pkgName := localpkg.Name()
if dbExecutor.SyncPackage(pkgName) != nil {
local = append(local, pkgName)
} else {
remote = append(remote, pkgName)
}
}
return local, remote, err
}
// GetRemotePackages returns packages with no correspondence in SyncDBS.
func GetRemotePackages(dbExecutor db.Executor) (
remote []db.IPackage,
remoteNames []string) {
for _, localpkg := range dbExecutor.LocalPackages() {
pkgName := localpkg.Name()
if dbExecutor.SyncPackage(pkgName) == nil {
remote = append(remote, localpkg)
remoteNames = append(remoteNames, pkgName)
}
}
return remote, remoteNames
}
func RemoveInvalidTargets(targets []string, mode parser.TargetMode) []string {
filteredTargets := make([]string, 0) filteredTargets := make([]string, 0)
for _, target := range targets { for _, target := range targets {
dbName, _ := text.SplitDBFromName(target) dbName, _ := text.SplitDBFromName(target)
if dbName == "aur" && !mode.AtLeastAUR() { if dbName == "aur" && !mode.AtLeastAUR() {
logger.Warnln(gotext.Get("%s: can't use target with option --repo -- skipping", text.Cyan(target))) text.Warnln(gotext.Get("%s: can't use target with option --repo -- skipping", text.Cyan(target)))
continue continue
} }
if dbName != "aur" && dbName != "" && !mode.AtLeastRepo() { if dbName != "aur" && dbName != "" && !mode.AtLeastRepo() {
logger.Warnln(gotext.Get("%s: can't use target with option --aur -- skipping", text.Cyan(target))) text.Warnln(gotext.Get("%s: can't use target with option --aur -- skipping", text.Cyan(target)))
continue continue
} }

Some files were not shown because too many files have changed in this diff Show More