Compare commits

..

No commits in common. "main" and "v3.0" have entirely different histories.
main ... v3.0

57 changed files with 54022 additions and 134134 deletions

4
.eslintignore Normal file
View file

@ -0,0 +1,4 @@
dist/
lib/
node_modules/
jest.config.js

61
.eslintrc.json Normal file
View file

@ -0,0 +1,61 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/recommended"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"no-shadow": "off",
"@typescript-eslint/no-shadow": ["error"],
"i18n-text/no-en": "off",
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": [
"error",
{ "accessibility": "no-public" }
],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-comment": "error",
"camelcase": "off",
"@typescript-eslint/consistent-type-assertions": "error",
"@typescript-eslint/explicit-function-return-type": [
"error",
{ "allowExpressions": true }
],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View file

View file

@ -1,9 +0,0 @@
self-hosted-runner:
# Custom labels of self-hosted or large GitHub hosted runners
# so that actionlint knows that they are not a typo
labels:
- selfhosted-ubuntu-arm64
# Configuration variables in array of strings defined in your repository or
# organization. `null` means disabling configuration variables check.
# Empty array means no configuration variable is allowed.
config-variables: null

49
.github/workflows/check-dist.yml vendored Normal file
View file

@ -0,0 +1,49 @@
# `dist/index.js` is a special file in Actions.
# When you reference an action with `uses:` in a workflow,
# `index.js` is the code that will run.
# For our project, we generate this file through a build process from other source files.
# We need to make sure the checked-in `index.js` actually matches what we expect it to be.
name: Check dist/
on:
push:
branches:
- main
pull_request:
workflow_dispatch:
jobs:
check-dist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Node.js 20
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install dependencies
run: npm ci
- name: Rebuild the dist/ directory
run: |
npm run build
npm run package
- name: Compare the expected and actual dist/ directories
run: |
if [ "$(git diff --ignore-space-at-eol dist/ | wc -l)" -gt "0" ]; then
echo "Detected uncommitted changes after build. See status below:"
git diff --text -v
exit 1
fi
id: diff
# If index.js was different than expected, upload the expected version as an artifact
- uses: actions/upload-artifact@v4
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
with:
name: dist
path: dist/

View file

@ -12,14 +12,13 @@
name: "CodeQL"
on:
workflow_dispatch:
push:
branches:
- main
branches: [main]
pull_request:
# The branches below must be a subset of the branches above
branches:
- main
branches: [main]
schedule:
- cron: "31 7 * * 3"
jobs:
analyze:

View file

@ -3,20 +3,17 @@ name: Release Drafter
# yamllint disable-line rule:truthy
on:
workflow_dispatch:
push:
branches:
- main
workflow_dispatch:
jobs:
update_release_draft:
name: ✏️ Draft release
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: read
steps:
- name: 🚀 Run Release Drafter
uses: release-drafter/release-drafter@v6.1.0
uses: release-drafter/release-drafter@v6.0.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -0,0 +1,49 @@
name: "test-cache-windows"
on:
pull_request:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test-setup-cache:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- run: uv sync
working-directory: __tests__\fixtures\uv-project
test-restore-cache:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- name: Cache was hit
run: |
if ($env:CACHE_HIT -ne "true") {
exit 1
}
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: uv sync
working-directory: __tests__\fixtures\uv-project

123
.github/workflows/test-cache.yml vendored Normal file
View file

@ -0,0 +1,123 @@
name: "test-cache"
on:
pull_request:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test-setup-cache:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14]
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14]
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- name: Cache was hit
run: |
if [ "$CACHE_HIT" != "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-setup-cache-dependency-glob:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache-dependency-glob:
runs-on: ubuntu-latest
needs: test-setup-cache-dependency-glob
steps:
- uses: actions/checkout@v4
- name: Change pyproject.toml
run: |
echo '[tool.uv]' >> __tests__/fixtures/uv-project/pyproject.toml
echo 'dev-dependencies = []' >> __tests__/fixtures/uv-project/pyproject.toml
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- name: Cache was not hit
run: |
if [ "$CACHE_HIT" == "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
test-setup-cache-local:
runs-on: oracle-aarch64
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
cache-local-path: /tmp/uv-cache
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache-local:
runs-on: oracle-aarch64
needs: test-setup-cache-local
steps:
- uses: actions/checkout@v4
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
cache-local-path: /tmp/uv-cache
- name: Cache was hit
run: |
if [ "$CACHE_HIT" != "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project

27
.github/workflows/test-windows.yml vendored Normal file
View file

@ -0,0 +1,27 @@
name: "test-windows"
on:
pull_request:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test-default-version:
runs-on: windows-latest
steps:
- uses: actions/checkout@v4
- name: Should not be on path
run: |
if (!(Get-Command -Name "uv" -ErrorAction SilentlyContinue)) {
exit 0
} else {
exit 1
}
- name: Setup uv
uses: ./
- run: uv sync
working-directory: __tests__\fixtures\uv-project

View file

@ -1,9 +1,6 @@
name: "test"
on:
workflow_dispatch:
pull_request:
branches:
- main
push:
branches:
- main
@ -12,16 +9,11 @@ concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
lint:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Actionlint
uses: eifinger/actionlint-action@23c85443d840cd73bbecb9cddfc933cc21649a38 # v1.9.1
- uses: actions/setup-node@v4
with:
node-version: "20"
@ -31,31 +23,24 @@ jobs:
npm run all
- name: Make sure no changes from linters are detected
run: |
git diff --exit-code || (echo "::error::Please run 'npm run all' to fix the issues" && exit 1)
git diff --exit-code
test-default-version:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14, windows-latest]
os: [ubuntu-latest, macos-latest, macos-14, oracle-aarch64]
steps:
- uses: actions/checkout@v4
- name: Install latest version
id: setup-uv
- name: Install default version
uses: ./
- run: uv sync
working-directory: __tests__/fixtures/uv-project
shell: bash
- name: Check uv-path is set
run: ${{ steps.setup-uv.outputs.uv-path }} --version
- name: Check uvx-path is set
run: ${{ steps.setup-uv.outputs.uvx-path }} --version
test-specific-version:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
uv-version: ["0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
os: [ubuntu-latest, macos-latest, macos-14, oracle-aarch64]
uv-version: ["latest", "0.3.0", "0.3.2"]
steps:
- uses: actions/checkout@v4
- name: Install version ${{ matrix.uv-version }}
@ -64,107 +49,26 @@ jobs:
version: ${{ matrix.uv-version }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-semver-range:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install version 0.3
id: setup-uv
uses: ./
with:
version: "0.3"
- name: Correct version gets installed
run: |
if [ "$UV_VERSION" != "0.3.5" ]; then
exit 1
fi
env:
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
test-pep440-version:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install version 0.4.30
id: setup-uv
uses: ./
with:
version: ">=0.4.25,<0.5"
- name: Correct version gets installed
run: |
if [ "$UV_VERSION" != "0.4.30" ]; then
exit 1
fi
env:
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
test-pyproject-file-version:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install version 0.5.14
id: setup-uv
uses: ./
with:
pyproject-file: "__tests__/fixtures/pyproject-toml-project/pyproject.toml"
- name: Correct version gets installed
run: |
if [ "$UV_VERSION" != "0.5.14" ]; then
exit 1
fi
env:
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
test-malformed-pyproject-file-fallback:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install using malformed pyproject.toml
id: setup-uv
uses: ./
with:
pyproject-file: "__tests__/fixtures/malformed-pyproject-toml-project/pyproject.toml"
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-uv-file-version:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install version 0.5.15
id: setup-uv
uses: ./
with:
pyproject-file: "__tests__/fixtures/uv-toml-project/pyproject.toml"
uv-file: "__tests__/fixtures/uv-toml-project/uv.toml"
- name: Correct version gets installed
run: |
if [ "$UV_VERSION" != "0.5.15" ]; then
exit 1
fi
env:
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
test-checksum:
runs-on: ${{ matrix.inputs.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
inputs:
- os: ubuntu-latest
os: [ubuntu-latest, oracle-aarch64]
checksum:
["4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"]
exclude:
- os: oracle-aarch64
checksum: "4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"
- os: macos-latest
checksum: "a70cbfbf3bb5c08b2f84963b4f12c94e08fbb2468ba418a3bfe1066fbe9e7218"
include:
- os: oracle-aarch64
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
steps:
- uses: actions/checkout@v4
- name: Checksum matches expected
uses: ./
with:
version: "0.3.2"
checksum: ${{ matrix.inputs.checksum }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
checksum: ${{ matrix.checksum }}
test-with-explicit-token:
runs-on: ubuntu-latest
steps:
@ -175,7 +79,6 @@ jobs:
github-token: ${{ secrets.GITHUB_TOKEN }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-uvx:
runs-on: ubuntu-latest
steps:
@ -183,338 +86,3 @@ jobs:
- name: Install default version
uses: ./
- run: uvx ruff --version
test-tool-install:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
[
ubuntu-latest,
macos-latest,
macos-14,
windows-latest,
]
steps:
- uses: actions/checkout@v4
- name: Install default version
uses: ./
- run: uv tool install ruff
- run: ruff --version
test-tilde-expansion-tool-dirs:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
tool-bin-dir: "~/tool-bin-dir"
tool-dir: "~/tool-dir"
- name: "Check if tool dirs are expanded"
run: |
if ! echo "$PATH" | grep -q "/home/ubuntu/tool-bin-dir"; then
echo "PATH does not contain /home/ubuntu/tool-bin-dir: $PATH"
exit 1
fi
if [ "$UV_TOOL_DIR" != "/home/ubuntu/tool-dir" ]; then
echo "UV_TOOL_DIR does not contain /home/ubuntu/tool-dir: $UV_TOOL_DIR"
exit 1
fi
test-python-version:
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
steps:
- uses: actions/checkout@v4
- name: Install latest version
uses: ./
with:
python-version: 3.13.1t
- name: Verify UV_PYTHON is set to correct version
run: |
echo "$UV_PYTHON"
if [ "$UV_PYTHON" != "3.13.1t" ]; then
exit 1
fi
shell: bash
- name: Verify packages can be installed
run: uv pip install --python=3.13.1t pip
shell: bash
- name: Verify python version is correct
run: |
python --version
if [ "$(python --version)" != "Python 3.13.1" ]; then
exit 1
fi
shell: bash
test-musl:
runs-on: ubuntu-latest
container: alpine
steps:
- uses: actions/checkout@v4
- name: Install latest version
uses: ./
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-setup-cache:
runs-on: ${{ matrix.os }}
strategy:
matrix:
enable-cache: [ "true", "false", "auto" ]
os: [ "ubuntu-latest", "selfhosted-ubuntu-arm64", "windows-latest" ]
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: ${{ matrix.enable-cache }}
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-${{ matrix.os }}-${{ matrix.enable-cache }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
shell: bash
test-restore-cache:
runs-on: ${{ matrix.os }}
strategy:
matrix:
enable-cache: [ "true", "false", "auto" ]
os: [ "ubuntu-latest", "selfhosted-ubuntu-arm64", "windows-latest" ]
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: ${{ matrix.enable-cache }}
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-${{ matrix.os }}-${{ matrix.enable-cache }}
- name: Cache was hit
if: ${{ matrix.enable-cache == 'true' || (matrix.enable-cache == 'auto' && matrix.os == 'ubuntu-latest') }}
run: |
if [ "$CACHE_HIT" != "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
shell: bash
- name: Cache was not hit
if: ${{ matrix.enable-cache == 'false' || (matrix.enable-cache == 'auto' && matrix.os == 'selfhosted-ubuntu-arm64') }}
run: |
if [ "$CACHE_HIT" == "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
shell: bash
- run: uv sync
working-directory: __tests__/fixtures/uv-project
shell: bash
test-setup-cache-requirements-txt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-requirements-txt
- run: |
uv venv
uv pip install -r requirements.txt
working-directory: __tests__/fixtures/requirements-txt-project
test-restore-cache-requirements-txt:
runs-on: ubuntu-latest
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-requirements-txt
- name: Cache was hit
run: |
if [ "$CACHE_HIT" != "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: |
uv venv
uv pip install -r requirements.txt
working-directory: __tests__/fixtures/requirements-txt-project
test-setup-cache-dependency-glob:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache-dependency-glob:
runs-on: ubuntu-latest
needs: test-setup-cache-dependency-glob
steps:
- uses: actions/checkout@v4
- name: Change pyproject.toml
run: |
echo '[tool.uv]' >> __tests__/fixtures/uv-project/pyproject.toml
echo 'dev-dependencies = []' >> __tests__/fixtures/uv-project/pyproject.toml
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
ignore-nothing-to-cache: true
- name: Cache was not hit
run: |
if [ "$CACHE_HIT" == "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
test-setup-cache-local:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
cache-local-path: /tmp/uv-cache
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache-local:
runs-on: selfhosted-ubuntu-arm64
needs: test-setup-cache-local
steps:
- uses: actions/checkout@v4
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
cache-local-path: /tmp/uv-cache
- name: Cache was hit
run: |
if [ "$CACHE_HIT" != "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-tilde-expansion-cache-local-path:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Create cache directory
run: mkdir -p ~/uv-cache
shell: bash
- name: Setup with cache
uses: ./
with:
cache-local-path: ~/uv-cache/cache-local-path
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-tilde-expansion-cache-dependency-glob:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Create cache directory
run: mkdir -p ~/uv-cache
shell: bash
- name: Create cache dependency glob file
run: touch ~/uv-cache.glob
shell: bash
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-local-path: ~/uv-cache/cache-dependency-glob
cache-dependency-glob: "~/uv-cache.glob"
- run: uv sync
working-directory: __tests__/fixtures/uv-project
cleanup-tilde-expansion-tests:
needs:
- test-tilde-expansion-cache-local-path
- test-tilde-expansion-cache-dependency-glob
if: always()
runs-on: selfhosted-ubuntu-arm64
steps:
- name: Remove cache directory
run: rm -rf ~/uv-cache
shell: bash
- name: Remove cache dependency glob file
run: rm -f ~/uv-cache.glob
shell: bash
test-no-python-version:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Fake pyproject.toml at root
run: cp __tests__/fixtures/old-python-constraint-project/pyproject.toml pyproject.toml
- name: Setup with cache
uses: ./
with:
enable-cache: true
- run: uv sync
working-directory: __tests__/fixtures/old-python-constraint-project
all-tests-passed:
runs-on: ubuntu-latest
needs:
- lint
- test-default-version
- test-specific-version
- test-semver-range
- test-pep440-version
- test-pyproject-file-version
- test-malformed-pyproject-file-fallback
- test-uv-file-version
- test-checksum
- test-with-explicit-token
- test-uvx
- test-tool-install
- test-tilde-expansion-tool-dirs
- test-python-version
- test-musl
- test-restore-cache
- test-restore-cache-requirements-txt
- test-restore-cache-dependency-glob
- test-restore-cache-local
- test-tilde-expansion-cache-local-path
- test-tilde-expansion-cache-dependency-glob
- cleanup-tilde-expansion-tests
- test-no-python-version
if: always()
steps:
- name: All tests passed
run: |
echo "All jobs passed: ${{ !contains(needs.*.result, 'failure') }}"
# shellcheck disable=SC2242
exit ${{ contains(needs.*.result, 'failure') && 1 || 0 }}

View file

@ -1,15 +1,10 @@
name: "Update known checksums"
on:
workflow_dispatch:
schedule:
- cron: "0 4 * * *" # Run every day at 4am UTC
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
@ -22,7 +17,7 @@ jobs:
src/download/checksum/known-checksums.ts ${{ secrets.GITHUB_TOKEN }}
- run: npm install && npm run all
- name: Create Pull Request
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
uses: peter-evans/create-pull-request@6cd32fd93684475c31847837f87bb135d40a2b79 # v7.0.3
with:
commit-message: "chore: update known checksums"
title:

View file

@ -1,6 +1,7 @@
---
name: Update Major Minor Tags
# yamllint disable-line rule:truthy
on:
push:
branches-ignore:
@ -12,36 +13,7 @@ jobs:
update_major_minor_tags:
name: Make sure major and minor tags are up to date on a patch release
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- uses: actions/checkout@v4
- name: Update Major Minor Tags
run: |
set -x
cd "${GITHUB_WORKSPACE}" || exit
# Set up variables.
TAG="${GITHUB_REF#refs/tags/}" # v1.2.3
MINOR="${TAG%.*}" # v1.2
MAJOR="${MINOR%.*}" # v1
if [ "${GITHUB_REF}" = "${TAG}" ]; then
echo "This workflow is not triggered by tag push: GITHUB_REF=${GITHUB_REF}"
exit 1
fi
MESSAGE="Release ${TAG}"
# Set up Git.
git config user.name "${GITHUB_ACTOR}"
git config user.email "${GITHUB_ACTOR}@users.noreply.github.com"
# Update MAJOR/MINOR tag
git tag -fa "${MAJOR}" -m "${MESSAGE}"
git tag -fa "${MINOR}" -m "${MESSAGE}"
# Push
git push --force origin "${MINOR}"
git push --force origin "${MAJOR}"
- name: Run Update semver
uses: haya14busa/action-update-semver@v1.2.1

3
.prettierignore Normal file
View file

@ -0,0 +1,3 @@
dist/
lib/
node_modules/

4
.prettierrc.json Normal file
View file

@ -0,0 +1,4 @@
{
"trailingComma": "all",
"proseWrap": "always"
}

331
README.md
View file

@ -11,150 +11,54 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
## Contents
- [Usage](#usage)
- [Install a required-version or latest (default)](#install-a-required-version-or-latest-default)
- [Install the latest version](#install-the-latest-version)
- [Install the latest version (default)](#install-the-latest-version-default)
- [Install a specific version](#install-a-specific-version)
- [Install a version by supplying a semver range or pep440 specifier](#install-a-version-by-supplying-a-semver-range-or-pep440-specifier)
- [Install a required-version](#install-a-required-version)
- [Python version](#python-version)
- [Validate checksum](#validate-checksum)
- [Enable Caching](#enable-caching)
- [Cache dependency glob](#cache-dependency-glob)
- [Local cache path](#local-cache-path)
- [Disable cache pruning](#disable-cache-pruning)
- [Ignore nothing to cache](#ignore-nothing-to-cache)
- [GitHub authentication token](#github-authentication-token)
- [UV_TOOL_DIR](#uv_tool_dir)
- [UV_TOOL_BIN_DIR](#uv_tool_bin_dir)
- [Tilde Expansion](#tilde-expansion)
- [How it works](#how-it-works)
- [FAQ](#faq)
## Usage
### Install a required-version or latest (default)
### Install the latest version (default)
```yaml
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
version: "latest"
```
If you do not specify a version, this action will look for a [required-version](https://docs.astral.sh/uv/reference/settings/#required-version)
in a `uv.toml` or `pyproject.toml` file in the repository root. If none is found, the latest version will be installed.
For an example workflow, see
[here](https://github.com/charliermarsh/autobot/blob/e42c66659bf97b90ca9ff305a19cc99952d0d43f/.github/workflows/ci.yaml).
### Install the latest version
```yaml
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v5
with:
version: "latest"
```
> [!TIP]
>
> Using `latest` requires that uv download the executable on every run, which incurs a cost
> (especially on self-hosted runners). As a best practice, consider pinning the version to a
> specific release.
### Install a specific version
```yaml
- name: Install a specific version of uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
version: "0.4.4"
```
### Install a version by supplying a semver range or pep440 specifier
You can specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
or [pep440 specifier](https://peps.python.org/pep-0440/#version-specifiers)
to install the latest version that satisfies the range.
```yaml
- name: Install a semver range of uv
uses: astral-sh/setup-uv@v5
with:
version: ">=0.4.0"
```
```yaml
- name: Pinning a minor version of uv
uses: astral-sh/setup-uv@v5
with:
version: "0.4.x"
```
```yaml
- name: Install a pep440-specifier-satisfying version of uv
uses: astral-sh/setup-uv@v5
with:
version: ">=0.4.25,<0.5"
```
### Install a required-version
You can specify a [required-version](https://docs.astral.sh/uv/reference/settings/#required-version)
in either a `uv.toml` or `pyproject.toml` file:
```yaml
- name: Install required-version defined in uv.toml
uses: astral-sh/setup-uv@v5
with:
uv-file: "path/to/uv.toml"
```
```yaml
- name: Install required-version defined in pyproject.toml
uses: astral-sh/setup-uv@v5
with:
pyproject-file: "path/to/pyproject.toml"
```
### Python version
You can use the input `python-version` to
- set the environment variable `UV_PYTHON` for the rest of your workflow
- create a new virtual environment with the specified python version
- activate the virtual environment for the rest of your workflow
This will override any python version specifications in `pyproject.toml` and `.python-version`
```yaml
- name: Install the latest version of uv and set the python version to 3.13t
uses: astral-sh/setup-uv@v5
with:
python-version: 3.13t
- run: uv pip install --python=3.13t pip
```
You can combine this with a matrix to test multiple python versions:
```yaml
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
- name: Install the latest version of uv and set the python version
uses: astral-sh/setup-uv@v5
with:
python-version: ${{ matrix.python-version }}
- name: Test with python ${{ matrix.python-version }}
run: uv run --frozen pytest
```
### Validate checksum
You can specify a checksum to validate the downloaded executable. Checksums up to the default version
are automatically verified by this action. The sha256 hashes can be found on the
You can also specify a checksum to validate the downloaded file. Checksums up to the default version
are automatically verified by this action. The sha265 hashes can be found on the
[releases page](https://github.com/astral-sh/uv/releases) of the uv repo.
```yaml
- name: Install a specific version and validate the checksum
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
version: "0.3.1"
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
@ -162,9 +66,8 @@ are automatically verified by this action. The sha256 hashes can be found on the
### Enable caching
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be uploaded to
the GitHub Actions cache. This can speed up runs that reuse the cache by several minutes.
Caching is enabled by default on GitHub-hosted runners.
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be cached to
the GitHub Actions Cache. This can speed up runs that reuse the cache by several minutes.
> [!TIP]
>
@ -176,7 +79,7 @@ You can optionally define a custom cache key suffix.
```yaml
- name: Enable caching and define a custom cache key suffix
id: setup-uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
enable-cache: true
cache-suffix: "optional-suffix"
@ -193,32 +96,23 @@ use it in subsequent steps. For example, to use the cache in the above case:
#### Cache dependency glob
If you want to control when the GitHub Actions cache is invalidated, specify a glob pattern with the
`cache-dependency-glob` input. The GitHub Actions cache will be invalidated if any file matching the glob pattern
changes. If you use relative paths, they are relative to the repository root.
If you want to control when the cache is invalidated, specify a glob pattern with the
`cache-dependency-glob` input. The cache will be invalidated if any file matching the glob pattern
changes. The glob matches files relative to the repository root.
> [!NOTE]
>
> You can look up supported patterns [here](https://github.com/actions/toolkit/tree/main/packages/glob#patterns)
>
> The default is
> ```yaml
> cache-dependency-glob: |
> **/requirements*.txt
> **/uv.lock
> ```
> [!NOTE] The default is `**/uv.lock`.
```yaml
- name: Define a cache dependency glob
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
enable-cache: true
cache-dependency-glob: "**/pyproject.toml"
cache-dependency-glob: "**/requirements*.txt"
```
```yaml
- name: Define a list of cache dependency globs
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
enable-cache: true
cache-dependency-glob: |
@ -226,17 +120,9 @@ changes. If you use relative paths, they are relative to the repository root.
**/pyproject.toml
```
```yaml
- name: Define an absolute cache dependency glob
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
cache-dependency-glob: "/tmp/my-folder/requirements*.txt"
```
```yaml
- name: Never invalidate the cache
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
enable-cache: true
cache-dependency-glob: ""
@ -244,65 +130,16 @@ changes. If you use relative paths, they are relative to the repository root.
### Local cache path
This action controls where uv stores its cache on the runner's filesystem by setting `UV_CACHE_DIR`.
It defaults to `setup-uv-cache` in the `TMP` dir, `D:\a\_temp\uv-tool-dir` on Windows and
`/tmp/setup-uv-cache` on Linux/macOS. You can change the default by specifying the path with the
`cache-local-path` input.
This action controls where uv stores its cache on the runner's filesystem. You can change the
default (`/tmp/setup-uv-cache`) by specifying the path with the `cache-local-path` input.
```yaml
- name: Define a custom uv cache path
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
cache-local-path: "/path/to/cache"
```
### Disable cache pruning
By default, the uv cache is pruned after every run, removing pre-built wheels, but retaining any
wheels that were built from source. On GitHub-hosted runners, it's typically faster to omit those
pre-built wheels from the cache (and instead re-download them from the registry on each run).
However, on self-hosted or local runners, preserving the cache may be more efficient. See
the [documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
more information.
If you want to persist the entire cache across runs, disable cache pruning with the `prune-cache`
input.
```yaml
- name: Don't prune the cache before saving it
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
prune-cache: false
```
### Ignore nothing to cache
By default, the action will fail if caching is enabled but there is nothing to upload (the uv cache directory does not exist).
If you want to ignore this, set the `ignore-nothing-to-cache` input to `true`.
```yaml
- name: Ignore nothing to cache
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
ignore-nothing-to-cache: true
```
### Ignore empty workdir
By default, the action will warn if the workdir is empty, because this is usually the case when
`actions/checkout` is configured to run after `setup-uv`, which is not supported.
If you want to ignore this, set the `ignore-empty-workdir` input to `true`.
```yaml
- name: Ignore empty workdir
uses: astral-sh/setup-uv@v5
with:
ignore-empty-workdir: true
```
### GitHub authentication token
This action uses the GitHub API to fetch the uv release artifacts. To avoid hitting the GitHub API
@ -315,67 +152,11 @@ are not sufficient, you can provide a custom GitHub token with the necessary per
```yaml
- name: Install the latest version of uv with a custom GitHub token
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
github-token: ${{ secrets.CUSTOM_GITHUB_TOKEN }}
```
### UV_TOOL_DIR
On Windows `UV_TOOL_DIR` is set to `uv-tool-dir` in the `TMP` dir (e.g. `D:\a\_temp\uv-tool-dir`).
On GitHub hosted runners this is on the much faster `D:` drive.
On all other platforms the tool environments are placed in the
[default location](https://docs.astral.sh/uv/concepts/tools/#tools-directory).
If you want to change this behaviour (especially on self-hosted runners) you can use the `tool-dir`
input:
```yaml
- name: Install the latest version of uv with a custom tool dir
uses: astral-sh/setup-uv@v5
with:
tool-dir: "/path/to/tool/dir"
```
### UV_TOOL_BIN_DIR
On Windows `UV_TOOL_BIN_DIR` is set to `uv-tool-bin-dir` in the `TMP` dir (e.g.
`D:\a\_temp\uv-tool-bin-dir`). On GitHub hosted runners this is on the much faster `D:` drive. This
path is also automatically added to the PATH.
On all other platforms the tool binaries get installed to the
[default location](https://docs.astral.sh/uv/concepts/tools/#the-bin-directory).
If you want to change this behaviour (especially on self-hosted runners) you can use the
`tool-bin-dir` input:
```yaml
- name: Install the latest version of uv with a custom tool bin dir
uses: astral-sh/setup-uv@v5
with:
tool-bin-dir: "/path/to/tool-bin/dir"
```
### Tilde Expansion
This action supports expanding the `~` character to the user's home directory for the following inputs:
- `cache-local-path`
- `tool-dir`
- `tool-bin-dir`
- `cache-dependency-glob`
```yaml
- name: Expand the tilde character
uses: astral-sh/setup-uv@v5
with:
cache-local-path: "~/path/to/cache"
tool-dir: "~/path/to/tool/dir"
tool-bin-dir: "~/path/to/tool-bin/dir"
cache-dependency-glob: "~/my-cache-buster"
```
## How it works
This action downloads uv from the uv repo's official
@ -390,22 +171,21 @@ by name (`uv`).
### Do I still need `actions/setup-python` alongside `setup-uv`?
With `setup-uv`, you can install a specific version of Python using `uv python install` rather than
No. This action is modelled as a drop-in replacement for `actions/setup-python` when using uv. With
`setup-uv`, you can install a specific version of Python using `uv python install` rather than
relying on `actions/setup-python`.
Using `actions/setup-python` can be faster, because GitHub caches the Python versions alongside the runner.
For example:
```yaml
- name: Checkout the repository
uses: actions/checkout@main
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
enable-cache: true
- name: Test
run: uv run --frozen pytest # Uses the Python version automatically installed by uv
run: uv run --frozen pytest
```
To install a specific version of Python, use
@ -413,7 +193,7 @@ To install a specific version of Python, use
```yaml
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
with:
enable-cache: true
- name: Install Python 3.12
@ -432,52 +212,11 @@ output:
uses: actions/checkout@main
- name: Install the default version of uv
id: setup-uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v2
- name: Print the installed version
run: echo "Installed uv version is ${{ steps.setup-uv.outputs.uv-version }}"
```
### Should I include the resolution strategy in the cache key?
**Yes!**
The cache key gets computed by using the [cache-dependency-glob](#cache-dependency-glob).
If you
have jobs which use the same dependency definitions from `requirements.txt` or
`pyproject.toml` but different
[resolution strategies](https://docs.astral.sh/uv/concepts/resolution/#resolution-strategy),
each job will have different dependencies or dependency versions.
But if you do not add the resolution strategy as a [cache-suffix](#enable-caching),
they will have the same cache key.
This means the first job which starts uploading its cache will win and all other job will fail
uploading the cache,
because they try to upload with the same cache key.
You might see errors like
`Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists`
### Why do I see warnings like `No GitHub Actions cache found for key`
When a workflow runs for the first time on a branch and has a new cache key, because the
[cache-dependency-glob](#cache-dependency-glob) found changed files (changed dependencies),
the cache will not be found and the warning `No GitHub Actions cache found for key` will be printed.
While this might be irritating at first, it is expected behaviour and the cache will be created
and reused in later workflows.
The reason for the warning is, that we have to way to know if this is the first run of a new
cache key or the user accidentally misconfigured the [cache-dependency-glob](#cache-dependency-glob)
or [cache-suffix](#enable-caching) and the cache never gets used.
### Do I have to run `actions/checkout` before or after `setup-uv`?
Some workflows need uv but do not need to access the repository content.
But **if** you need to access the repository content, you have run `actions/checkout` before running `setup-uv`.
Running `actions/checkout` after `setup-uv` **is not supported**.
## Acknowledgements
`setup-uv` was initially written and published by [Kevin Stillhammer](https://github.com/eifinger)

View file

@ -1,6 +0,0 @@
def main():
print("Hello from malformed-pyproject-toml-project!")
if __name__ == "__main__":
main()

View file

@ -1,9 +0,0 @@
[project]
name = "malformed-pyproject-toml-project"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
dependencies = []
[malformed-toml

View file

@ -1,13 +0,0 @@
[project]
name = "old-python-constraint-project"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.8,<=3.9"
dependencies = [
"ruff>=0.6.2",
]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"

View file

@ -1,2 +0,0 @@
def hello() -> str:
return "Hello from uv-project!"

View file

@ -1,38 +0,0 @@
version = 1
requires-python = ">=3.12"
[[package]]
name = "ruff"
version = "0.6.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/23/f4/279d044f66b79261fd37df76bf72b64471afab5d3b7906a01499c4451910/ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be", size = 2460281 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/72/4b/47dd7a69287afb4069fa42c198e899463605460a58120196711bfcf0446b/ruff-0.6.2-py3-none-linux_armv6l.whl", hash = "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c", size = 9695871 },
{ url = "https://files.pythonhosted.org/packages/ae/c3/8aac62ac4638c14a740ee76a755a925f2d0d04580ab790a9887accb729f6/ruff-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570", size = 9459354 },
{ url = "https://files.pythonhosted.org/packages/2f/cf/77fbd8d4617b9b9c503f9bffb8552c4e3ea1a58dc36975e7a9104ffb0f85/ruff-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158", size = 9163871 },
{ url = "https://files.pythonhosted.org/packages/05/1c/765192bab32b79efbb498b06f0b9dcb3629112b53b8777ae1d19b8209e09/ruff-0.6.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534", size = 10096250 },
{ url = "https://files.pythonhosted.org/packages/08/d0/86f3cb0f6934c99f759c232984a5204d67a26745cad2d9edff6248adf7d2/ruff-0.6.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b", size = 9475376 },
{ url = "https://files.pythonhosted.org/packages/cd/cc/4c8d0e225b559a3fae6092ec310d7150d3b02b4669e9223f783ef64d82c0/ruff-0.6.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d", size = 10295634 },
{ url = "https://files.pythonhosted.org/packages/db/96/d2699cfb1bb5a01c68122af43454c76c31331e1c8a9bd97d653d7c82524b/ruff-0.6.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66", size = 11024941 },
{ url = "https://files.pythonhosted.org/packages/8b/a9/6ecd66af8929e0f2a1ed308a4137f3521789f28f0eb97d32c2ca3aa7000c/ruff-0.6.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8", size = 10606894 },
{ url = "https://files.pythonhosted.org/packages/e4/73/2ee4cd19f44992fedac1cc6db9e3d825966072f6dcbd4032f21cbd063170/ruff-0.6.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1", size = 11552886 },
{ url = "https://files.pythonhosted.org/packages/60/4c/c0f1cd35ce4a93c54a6bb1ee6934a3a205fa02198dd076678193853ceea1/ruff-0.6.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1", size = 10264945 },
{ url = "https://files.pythonhosted.org/packages/c4/89/e45c9359b9cdd4245512ea2b9f2bb128a997feaa5f726fc9e8c7a66afadf/ruff-0.6.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23", size = 10100007 },
{ url = "https://files.pythonhosted.org/packages/06/74/0bd4e0a7ed5f6908df87892f9bf60a2356c0fd74102d8097298bd9b4f346/ruff-0.6.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a", size = 9559267 },
{ url = "https://files.pythonhosted.org/packages/54/03/3dc6dc9419f276f05805bf888c279e3e0b631284abd548d9e87cebb93aec/ruff-0.6.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c", size = 9905304 },
{ url = "https://files.pythonhosted.org/packages/5c/5b/d6a72a6a6bbf097c09de468326ef5fa1c9e7aa5e6e45979bc0d984b0dbe7/ruff-0.6.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56", size = 10341480 },
{ url = "https://files.pythonhosted.org/packages/79/a9/0f2f21fe15ba537c46598f96aa9ae4a3d4b9ec64926664617ca6a8c772f4/ruff-0.6.2-py3-none-win32.whl", hash = "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da", size = 7961901 },
{ url = "https://files.pythonhosted.org/packages/b0/80/fff12ffe11853d9f4ea3e5221e6dd2e93640a161c05c9579833e09ad40a7/ruff-0.6.2-py3-none-win_amd64.whl", hash = "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2", size = 8783320 },
{ url = "https://files.pythonhosted.org/packages/56/91/577cdd64cce5e74d3f8b5ecb93f29566def569c741eb008aed4f331ef821/ruff-0.6.2-py3-none-win_arm64.whl", hash = "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9", size = 8225886 },
]
[[package]]
name = "uv-project"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "ruff" },
]
[package.metadata]
requires-dist = [{ name = "ruff" }]

View file

@ -1,6 +0,0 @@
def main():
print("Hello from pyproject-toml-project!")
if __name__ == "__main__":
main()

View file

@ -1,19 +0,0 @@
[project]
name = "pyproject-toml-project"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
dependencies = []
[dependency-groups]
dev = [
"reuse==5.0.2",
{include-group = "lint"},
]
lint = [
"flake8==4.0.1",
]
[tool.uv]
required-version = "==0.5.14"

View file

@ -1 +0,0 @@
print("Hello world")

View file

@ -1 +0,0 @@
ruff>=0.6.2

View file

@ -1,6 +0,0 @@
def main():
print("Hello from uv-toml-project!")
if __name__ == "__main__":
main()

View file

@ -1,10 +0,0 @@
[project]
name = "uv-toml-project"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
dependencies = []
[tool.uv]
required-version = "==0.5.14"

View file

@ -1 +0,0 @@
required-version = "==0.5.15"

View file

@ -1,66 +1,36 @@
name: "astral-sh/setup-uv"
description:
"Set up your GitHub Actions workflow with a specific version of uv."
author: "astral-sh"
name: "Python setup uv"
description: "Set up your GitHub Actions workflow with a specific version of uv"
author: "eifinger"
inputs:
version:
description: "The version of uv to install e.g., `0.5.0` Defaults to the version in pyproject.toml or 'latest'."
default: ""
pyproject-file:
description: "Path to a pyproject.toml"
default: ""
uv-file:
description: "Path to a uv.toml"
default: ""
python-version:
description: "The version of Python to set UV_PYTHON to"
required: false
description: "The version of uv to install"
default: "latest"
checksum:
description: "The checksum of the uv version to install"
required: false
github-token:
description:
"Used to increase the rate limit when retrieving versions and downloading uv."
"Used to increase the rate limit when retrieving versions and downloading
uv."
required: false
default: ${{ github.token }}
enable-cache:
description: "Enable uploading of the uv cache"
default: "auto"
description: "Enable caching of the uv cache"
default: "false"
cache-dependency-glob:
description:
"Glob pattern to match files relative to the repository root to control
the cache."
default: |
**/uv.lock
**/requirements*.txt
default: "**/uv.lock"
cache-suffix:
description: "Suffix for the cache key"
required: false
cache-local-path:
description: "Local path to store the cache."
default: ""
prune-cache:
description: "Prune cache before saving."
default: "true"
ignore-nothing-to-cache:
description: "Ignore when nothing is found to cache."
default: "false"
ignore-empty-workdir:
description: "Ignore when the working directory is empty."
default: "false"
tool-dir:
description: "Custom path to set UV_TOOL_DIR to."
required: false
tool-bin-dir:
description: "Custom path to set UV_TOOL_BIN_DIR to."
required: false
outputs:
uv-version:
description: "The installed uv version. Useful when using latest."
uv-path:
description: "The path to the installed uv binary."
uvx-path:
description: "The path to the installed uvx binary."
cache-hit:
description: "A boolean value to indicate a cache entry was found"
runs:
@ -70,4 +40,4 @@ runs:
post-if: success()
branding:
icon: "package"
color: "black"
color: "blue"

View file

@ -1,31 +0,0 @@
{
"$schema": "https://biomejs.dev/schemas/1.9.2/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",
"useIgnoreFile": false
},
"files": {
"ignoreUnknown": false,
"ignore": ["dist", "lib", "node_modules"]
},
"formatter": {
"enabled": true,
"indentStyle": "space"
},
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
},
"javascript": {
"formatter": {
"quoteStyle": "double",
"trailingCommas": "all"
}
}
}

53689
dist/save-cache/index.js generated vendored

File diff suppressed because one or more lines are too long

71437
dist/setup/index.js generated vendored

File diff suppressed because one or more lines are too long

50859
dist/update-known-checksums/index.js generated vendored

File diff suppressed because one or more lines are too long

7486
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -6,44 +6,52 @@
"main": "dist/index.js",
"scripts": {
"build": "tsc",
"format": "biome format --fix",
"format-check": "biome format",
"lint": "biome lint --fix",
"format": "prettier --write .",
"format-check": "prettier --check .",
"lint": "eslint src/**/*.ts --fix",
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-known-checksums src/update-known-checksums.ts",
"test": "jest",
"act": "act pull_request -W .github/workflows/test.yml --container-architecture linux/amd64 -s GITHUB_TOKEN=\"$(gh auth token)\"",
"update-known-checksums": "RUNNER_TEMP=known_checksums node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
"update-known-checksums": "node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
"all": "npm run build && npm run format && npm run lint && npm run package && npm test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/astral-sh/setup-uv.git"
},
"keywords": ["actions", "python", "setup", "uv"],
"keywords": [
"actions",
"python",
"setup",
"uv"
],
"author": "@eifinger",
"license": "MIT",
"dependencies": {
"@actions/cache": "^4.0.3",
"@actions/core": "^1.11.1",
"@actions/cache": "^3.2.4",
"@actions/core": "^1.10.1",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0",
"@actions/glob": "^0.5.0",
"@actions/io": "^1.1.3",
"@actions/tool-cache": "^2.0.2",
"@octokit/core": "^6.1.4",
"@octokit/plugin-paginate-rest": "^11.4.3",
"@octokit/plugin-rest-endpoint-methods": "^13.3.1",
"@renovatebot/pep440": "^4.1.0",
"smol-toml": "^1.3.1",
"undici": "^7.5.0"
"@actions/tool-cache": "^2.0.1",
"@octokit/rest": "^21.0.2"
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@types/node": "^22.13.10",
"@types/node": "^22.5.5",
"@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.3",
"@typescript-eslint/eslint-plugin": "^7.15.0",
"@typescript-eslint/parser": "^7.18.0",
"@vercel/ncc": "^0.38.1",
"eslint": "^8.57.0",
"eslint-plugin-github": "^5.0.2",
"eslint-plugin-import": "^2.30.0",
"eslint-plugin-jest": "^28.8.3",
"eslint-plugin-prettier": "^5.2.1",
"jest": "^29.7.0",
"js-yaml": "^4.1.0",
"ts-jest": "^29.2.6",
"typescript": "^5.8.2"
"prettier": "^3.3.3",
"ts-jest": "^29.2.5",
"typescript": "^5.6.2"
}
}

View file

@ -1,21 +1,19 @@
import * as cache from "@actions/cache";
import * as glob from "@actions/glob";
import * as core from "@actions/core";
import {
cacheDependencyGlob,
cacheLocalPath,
cacheSuffix,
pythonVersion as pythonVersionInput,
} from "../utils/inputs";
import { getArch, getPlatform } from "../utils/platforms";
import { hashFiles } from "../hash/hash-files";
import * as exec from "@actions/exec";
export const STATE_CACHE_KEY = "cache-key";
export const STATE_CACHE_MATCHED_KEY = "cache-matched-key";
const CACHE_VERSION = "1";
export async function restoreCache(): Promise<void> {
const cacheKey = await computeKeys();
export async function restoreCache(version: string): Promise<void> {
const cacheKey = await computeKeys(version);
let matchedKey: string | undefined;
core.info(
@ -35,57 +33,28 @@ export async function restoreCache(): Promise<void> {
handleMatchResult(matchedKey, cacheKey);
}
async function computeKeys(): Promise<string> {
async function computeKeys(version: string): Promise<string> {
let cacheDependencyPathHash = "-";
if (cacheDependencyGlob !== "") {
core.info(
`Searching files using cache dependency glob: ${cacheDependencyGlob.split("\n").join(",")}`,
);
cacheDependencyPathHash += await hashFiles(cacheDependencyGlob, true);
cacheDependencyPathHash += await glob.hashFiles(
cacheDependencyGlob,
undefined,
undefined,
true,
);
if (cacheDependencyPathHash === "-") {
core.warning(
`No file matched to [${cacheDependencyGlob.split("\n").join(",")}]. The cache will never get invalidated. Make sure you have checked out the target repository and configured the cache-dependency-glob input correctly.`,
throw new Error(
`No file in ${process.cwd()} matched to [${cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`,
);
}
}
if (cacheDependencyPathHash === "-") {
cacheDependencyPathHash = "-no-dependency-glob";
} else {
cacheDependencyPathHash += "no-dependency-glob";
}
const suffix = cacheSuffix ? `-${cacheSuffix}` : "";
const pythonVersion = await getPythonVersion();
const platform = await getPlatform();
return `setup-uv-${CACHE_VERSION}-${getArch()}-${platform}-${pythonVersion}${cacheDependencyPathHash}${suffix}`;
}
async function getPythonVersion(): Promise<string> {
if (pythonVersionInput !== "") {
return pythonVersionInput;
}
let output = "";
const options: exec.ExecOptions = {
silent: !core.isDebug(),
listeners: {
stdout: (data: Buffer) => {
output += data.toString();
},
},
};
try {
const execArgs = ["python", "find"];
await exec.exec("uv", execArgs, options);
const pythonPath = output.trim();
output = "";
await exec.exec(pythonPath, ["--version"], options);
// output is like "Python 3.8.10"
return output.split(" ")[1].trim();
} catch (error) {
const err = error as Error;
core.debug(`Failed to get python version from uv. Error: ${err.message}`);
return "unknown";
}
return `setup-uv-${CACHE_VERSION}-${getArch()}-${getPlatform()}-${version}${cacheDependencyPathHash}${suffix}`;
}
function handleMatchResult(

View file

@ -1,9 +1,9 @@
import * as fs from "node:fs";
import * as crypto from "node:crypto";
import * as fs from "fs";
import * as crypto from "crypto";
import * as core from "@actions/core";
import { KNOWN_CHECKSUMS } from "./known-checksums";
import type { Architecture, Platform } from "../../utils/platforms";
import { Architecture, Platform } from "../../utils/platforms";
export async function validateChecksum(
checkSum: string | undefined,
@ -12,11 +12,11 @@ export async function validateChecksum(
platform: Platform,
version: string,
): Promise<void> {
let isValid: boolean | undefined = undefined;
let isValid = true;
if (checkSum !== undefined && checkSum !== "") {
isValid = await validateFileCheckSum(downloadPath, checkSum);
} else {
core.debug("Checksum not provided. Checking known checksums.");
core.debug(`Checksum not provided. Checking known checksums.`);
const key = `${arch}-${platform}-${version}`;
if (key in KNOWN_CHECKSUMS) {
const knownChecksum = KNOWN_CHECKSUMS[`${arch}-${platform}-${version}`];
@ -27,12 +27,10 @@ export async function validateChecksum(
}
}
if (isValid === false) {
if (!isValid) {
throw new Error(`Checksum for ${downloadPath} did not match ${checkSum}.`);
}
if (isValid === true) {
core.debug(`Checksum for ${downloadPath} is valid.`);
}
core.debug(`Checksum for ${downloadPath} is valid.`);
}
async function validateFileCheckSum(

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,5 @@
import { promises as fs } from "node:fs";
import { promises as fs } from "fs";
import * as tc from "@actions/tool-cache";
import { KNOWN_CHECKSUMS } from "./known-checksums";
export async function updateChecksums(
filePath: string,
downloadUrls: string[],
@ -8,50 +7,31 @@ export async function updateChecksums(
await fs.rm(filePath);
await fs.appendFile(
filePath,
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: { [key: string]: string } = {\n",
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: {[key: string]: string} = {\n",
);
let firstLine = true;
for (const downloadUrl of downloadUrls) {
const content = await downloadAssetContent(downloadUrl);
const checksum = content.split(" ")[0].trim();
const key = getKey(downloadUrl);
if (key === undefined) {
continue;
}
const checksum = await getOrDownloadChecksum(key, downloadUrl);
if (!firstLine) {
await fs.appendFile(filePath, ",\n");
}
await fs.appendFile(filePath, ` "${key}":\n "${checksum}"`);
await fs.appendFile(filePath, ` '${key}':\n '${checksum}'`);
firstLine = false;
}
await fs.appendFile(filePath, ",\n};\n");
await fs.appendFile(filePath, "}\n");
}
function getKey(downloadUrl: string): string | undefined {
function getKey(downloadUrl: string): string {
// https://github.com/astral-sh/uv/releases/download/0.3.2/uv-aarch64-apple-darwin.tar.gz.sha256
const parts = downloadUrl.split("/");
const fileName = parts[parts.length - 1];
if (fileName.startsWith("source")) {
return undefined;
}
const name = fileName.split(".")[0].split("uv-")[1];
const version = parts[parts.length - 2];
return `${name}-${version}`;
}
async function getOrDownloadChecksum(
key: string,
downloadUrl: string,
): Promise<string> {
let checksum = "";
if (key in KNOWN_CHECKSUMS) {
checksum = KNOWN_CHECKSUMS[key];
} else {
const content = await downloadAssetContent(downloadUrl);
checksum = content.split(" ")[0].trim();
}
return checksum;
}
async function downloadAssetContent(downloadUrl: string): Promise<string> {
const downloadPath = await tc.downloadTool(downloadUrl);
const content = await fs.readFile(downloadPath, "utf8");

View file

@ -0,0 +1,71 @@
import * as core from "@actions/core";
import * as tc from "@actions/tool-cache";
import * as exec from "@actions/exec";
import * as path from "path";
import { Architecture, Platform } from "../utils/platforms";
import { validateChecksum } from "./checksum/checksum";
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
export async function downloadLatest(
platform: Platform,
arch: Architecture,
checkSum: string | undefined,
githubToken: string | undefined,
): Promise<{ cachedToolDir: string; version: string }> {
const artifact = `uv-${arch}-${platform}`;
let downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/latest/download/${artifact}`;
if (platform === "pc-windows-msvc") {
downloadUrl += ".zip";
} else {
downloadUrl += ".tar.gz";
}
core.info(`Downloading uv from "${downloadUrl}" ...`);
const downloadPath = await tc.downloadTool(
downloadUrl,
undefined,
githubToken,
);
let uvExecutablePath: string;
let uvDir: string;
if (platform === "pc-windows-msvc") {
uvDir = await tc.extractZip(downloadPath);
// On windows extracting the zip does not create an intermediate directory
uvExecutablePath = path.join(uvDir, "uv.exe");
} else {
const extractedDir = await tc.extractTar(downloadPath);
uvDir = path.join(extractedDir, artifact);
uvExecutablePath = path.join(uvDir, "uv");
}
const version = await getVersion(uvExecutablePath);
await validateChecksum(checkSum, downloadPath, arch, platform, version);
const cachedToolDir = await tc.cacheDir(
uvDir,
TOOL_CACHE_NAME,
version,
arch,
);
return { cachedToolDir, version };
}
async function getVersion(uvExecutablePath: string): Promise<string> {
// Parse the output of `uv --version` to get the version
// The output looks like
// uv 0.3.1 (be17d132a 2024-08-21)
const options: exec.ExecOptions = {
silent: !core.isDebug(),
};
const execArgs = ["--version"];
let output = "";
options.listeners = {
stdout: (data: Buffer) => {
output += data.toString();
},
};
await exec.exec(uvExecutablePath, execArgs, options);
const parts = output.split(" ");
return parts[1];
}

View file

@ -1,26 +1,18 @@
import * as core from "@actions/core";
import * as tc from "@actions/tool-cache";
import * as path from "node:path";
import * as pep440 from "@renovatebot/pep440";
import { promises as fs } from "node:fs";
import * as path from "path";
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
import type { Architecture, Platform } from "../utils/platforms";
import { Architecture, Platform } from "../utils/platforms";
import { validateChecksum } from "./checksum/checksum";
import { Octokit } from "../utils/octokit";
export function tryGetFromToolCache(
arch: Architecture,
version: string,
): { version: string; installedPath: string | undefined } {
): string | undefined {
core.debug(`Trying to get uv from tool cache for ${version}...`);
const cachedVersions = tc.findAllVersions(TOOL_CACHE_NAME, arch);
core.debug(`Cached versions: ${cachedVersions}`);
let resolvedVersion = tc.evaluateVersions(cachedVersions, version);
if (resolvedVersion === "") {
resolvedVersion = version;
}
const installedPath = tc.find(TOOL_CACHE_NAME, resolvedVersion, arch);
return { version: resolvedVersion, installedPath };
return tc.find(TOOL_CACHE_NAME, version, arch);
}
export async function downloadVersion(
@ -28,15 +20,15 @@ export async function downloadVersion(
arch: Architecture,
version: string,
checkSum: string | undefined,
githubToken: string,
): Promise<{ version: string; cachedToolDir: string }> {
const resolvedVersion = await resolveVersion(version, githubToken);
githubToken: string | undefined,
): Promise<string> {
const artifact = `uv-${arch}-${platform}`;
let extension = ".tar.gz";
let downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/download/${version}/${artifact}`;
if (platform === "pc-windows-msvc") {
extension = ".zip";
downloadUrl += ".zip";
} else {
downloadUrl += ".tar.gz";
}
const downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/download/${resolvedVersion}/${artifact}${extension}`;
core.info(`Downloading uv from "${downloadUrl}" ...`);
const downloadPath = await tc.downloadTool(
@ -44,133 +36,16 @@ export async function downloadVersion(
undefined,
githubToken,
);
await validateChecksum(
checkSum,
downloadPath,
arch,
platform,
resolvedVersion,
);
await validateChecksum(checkSum, downloadPath, arch, platform, version);
let uvDir: string;
if (platform === "pc-windows-msvc") {
const fullPathWithExtension = `${downloadPath}${extension}`;
await fs.copyFile(downloadPath, fullPathWithExtension);
uvDir = await tc.extractZip(fullPathWithExtension);
uvDir = await tc.extractZip(downloadPath);
// On windows extracting the zip does not create an intermediate directory
} else {
const extractedDir = await tc.extractTar(downloadPath);
uvDir = path.join(extractedDir, artifact);
}
const cachedToolDir = await tc.cacheDir(
uvDir,
TOOL_CACHE_NAME,
resolvedVersion,
arch,
);
return { version: resolvedVersion, cachedToolDir };
}
export async function resolveVersion(
versionInput: string,
githubToken: string,
): Promise<string> {
core.debug(`Resolving version: ${versionInput}`);
const version =
versionInput === "latest"
? await getLatestVersion(githubToken)
: versionInput;
if (tc.isExplicitVersion(version)) {
core.debug(`Version ${version} is an explicit version.`);
return version;
}
const availableVersions = await getAvailableVersions(githubToken);
core.debug(`Available versions: ${availableVersions}`);
const resolvedVersion = maxSatisfying(availableVersions, version);
if (resolvedVersion === undefined) {
throw new Error(`No version found for ${version}`);
}
return resolvedVersion;
}
async function getAvailableVersions(githubToken: string): Promise<string[]> {
try {
const octokit = new Octokit({
auth: githubToken,
});
return await getReleaseTagNames(octokit);
} catch (err) {
if ((err as Error).message.includes("Bad credentials")) {
core.info(
"No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.",
);
const octokit = new Octokit();
return await getReleaseTagNames(octokit);
}
throw err;
}
}
async function getReleaseTagNames(
octokit: InstanceType<typeof Octokit>,
): Promise<string[]> {
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
owner: OWNER,
repo: REPO,
});
return response.map((release) => release.tag_name);
}
async function getLatestVersion(githubToken: string) {
core.debug("Getting latest version...");
const octokit = new Octokit({
auth: githubToken,
});
let latestRelease: { tag_name: string } | undefined;
try {
latestRelease = await getLatestRelease(octokit);
} catch (err) {
core.info(
"No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.",
);
if (err instanceof Error) {
core.debug(err.message);
}
const octokit = new Octokit();
latestRelease = await getLatestRelease(octokit);
}
if (!latestRelease) {
throw new Error("Could not determine latest release.");
}
core.debug(`Latest version: ${latestRelease.tag_name}`);
return latestRelease.tag_name;
}
async function getLatestRelease(octokit: InstanceType<typeof Octokit>) {
const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({
owner: OWNER,
repo: REPO,
});
return latestRelease;
}
function maxSatisfying(
versions: string[],
version: string,
): string | undefined {
const maxSemver = tc.evaluateVersions(versions, version);
if (maxSemver !== "") {
core.debug(`Found a version that satisfies the semver range: ${maxSemver}`);
return maxSemver;
}
const maxPep440 = pep440.maxSatisfying(versions, version);
if (maxPep440 !== null) {
core.debug(
`Found a version that satisfies the pep440 specifier: ${maxPep440}`,
);
return maxPep440;
}
return undefined;
return await tc.cacheDir(uvDir, TOOL_CACHE_NAME, version, arch);
}

View file

@ -1,48 +0,0 @@
import * as crypto from "node:crypto";
import * as core from "@actions/core";
import * as fs from "node:fs";
import * as stream from "node:stream";
import * as util from "node:util";
import { create } from "@actions/glob";
/**
* Hashes files matching the given glob pattern.
*
* Copied from https://github.com/actions/toolkit/blob/20ed2908f19538e9dfb66d8083f1171c0a50a87c/packages/glob/src/internal-hash-files.ts#L9-L49
* But supports hashing files outside the GITHUB_WORKSPACE.
* @param pattern The glob pattern to match files.
* @param verbose Whether to log the files being hashed.
*/
export async function hashFiles(
pattern: string,
verbose = false,
): Promise<string> {
const globber = await create(pattern);
let hasMatch = false;
const writeDelegate = verbose ? core.info : core.debug;
const result = crypto.createHash("sha256");
let count = 0;
for await (const file of globber.globGenerator()) {
writeDelegate(file);
if (fs.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto.createHash("sha256");
const pipeline = util.promisify(stream.pipeline);
await pipeline(fs.createReadStream(file), hash);
result.write(hash.digest());
count++;
if (!hasMatch) {
hasMatch = true;
}
}
result.end();
if (hasMatch) {
writeDelegate(`Found ${count} files to hash.`);
return result.digest("hex");
}
writeDelegate("No matches found for glob");
return "";
}

View file

@ -1,33 +1,22 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as fs from "node:fs";
import {
STATE_CACHE_MATCHED_KEY,
STATE_CACHE_KEY,
} from "./cache/restore-cache";
import {
cacheLocalPath,
enableCache,
ignoreNothingToCache,
pruneCache as shouldPruneCache,
} from "./utils/inputs";
import { cacheLocalPath, enableCache } from "./utils/inputs";
export async function run(): Promise<void> {
try {
if (enableCache) {
await saveCache();
// node will stay alive if any promises are not resolved,
// which is a possibility if HTTP requests are dangling
// due to retries or timeouts. We know that if we got here
// that all promises that we care about have successfully
// resolved, so simply exit with success.
process.exit(0);
}
} catch (error) {
const err = error as Error;
core.setFailed(err.message);
}
process.exit(0);
}
async function saveCache(): Promise<void> {
@ -37,38 +26,17 @@ async function saveCache(): Promise<void> {
if (!cacheKey) {
core.warning("Error retrieving cache key from state.");
return;
}
if (matchedKey === cacheKey) {
} else if (matchedKey === cacheKey) {
core.info(`Cache hit occurred on key ${cacheKey}, not saving cache.`);
return;
}
if (shouldPruneCache) {
await pruneCache();
}
await pruneCache();
core.info(`Saving cache path: ${cacheLocalPath}`);
if (!fs.existsSync(cacheLocalPath) && !ignoreNothingToCache) {
throw new Error(
`Cache path ${cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`,
);
}
try {
await cache.saveCache([cacheLocalPath], cacheKey);
core.info(`cache saved with the key: ${cacheKey}`);
} catch (e) {
if (
e instanceof Error &&
e.message ===
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved."
) {
core.info(
"No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.",
);
} else {
throw e;
}
}
await cache.saveCache([cacheLocalPath], cacheKey);
core.info(`cache saved with the key: ${cacheKey}`);
}
async function pruneCache(): Promise<void> {

View file

@ -1,38 +1,28 @@
import * as core from "@actions/core";
import * as path from "node:path";
import * as path from "path";
import {
downloadVersion,
tryGetFromToolCache,
resolveVersion,
} from "./download/download-version";
import { restoreCache } from "./cache/restore-cache";
import { downloadLatest } from "./download/download-latest";
import {
type Architecture,
Architecture,
getArch,
getPlatform,
type Platform,
Platform,
} from "./utils/platforms";
import {
cacheLocalPath,
checkSum,
ignoreEmptyWorkdir,
enableCache,
githubToken,
pyProjectFile,
pythonVersion,
toolBinDir,
toolDir,
uvFile,
version as versionInput,
version,
} from "./utils/inputs";
import * as exec from "@actions/exec";
import fs from "node:fs";
import { getUvVersionFromConfigFile } from "./utils/pyproject";
async function run(): Promise<void> {
detectEmptyWorkdir();
const platform = await getPlatform();
const platform = getPlatform();
const arch = getArch();
try {
@ -42,148 +32,68 @@ async function run(): Promise<void> {
if (arch === undefined) {
throw new Error(`Unsupported architecture: ${process.arch}`);
}
const setupResult = await setupUv(platform, arch, checkSum, githubToken);
const setupResult = await setupUv(
platform,
arch,
version,
checkSum,
githubToken,
);
addUvToPath(setupResult.uvDir);
core.setOutput("uv-version", version);
core.info(`Successfully installed uv version ${version}`);
addUvToPathAndOutput(setupResult.uvDir);
addToolBinToPath();
setToolDir();
await setupPython();
addMatchers();
setCacheDir(cacheLocalPath);
core.setOutput("uv-version", setupResult.version);
core.info(`Successfully installed uv version ${setupResult.version}`);
if (enableCache) {
await restoreCache();
await restoreCache(setupResult.version);
}
process.exit(0);
} catch (err) {
core.setFailed((err as Error).message);
}
}
function detectEmptyWorkdir(): void {
if (fs.readdirSync(".").length === 0) {
if (ignoreEmptyWorkdir) {
core.info(
"Empty workdir detected. Ignoring because ignore-empty-workdir is enabled",
);
} else {
core.warning(
"Empty workdir detected. This may cause unexpected behavior. You can enable ignore-empty-workdir to mute this warning.",
);
}
}
process.exit(0);
}
async function setupUv(
platform: Platform,
arch: Architecture,
versionInput: string,
checkSum: string | undefined,
githubToken: string,
githubToken: string | undefined,
): Promise<{ uvDir: string; version: string }> {
const resolvedVersion = await determineVersion();
const toolCacheResult = tryGetFromToolCache(arch, resolvedVersion);
if (toolCacheResult.installedPath) {
core.info(`Found uv in tool-cache for ${toolCacheResult.version}`);
return {
uvDir: toolCacheResult.installedPath,
version: toolCacheResult.version,
};
}
const downloadVersionResult = await downloadVersion(
platform,
arch,
resolvedVersion,
checkSum,
githubToken,
);
return {
uvDir: downloadVersionResult.cachedToolDir,
version: downloadVersionResult.version,
};
}
async function determineVersion(): Promise<string> {
if (versionInput !== "") {
return await resolveVersion(versionInput, githubToken);
}
const configFile = uvFile !== "" ? uvFile : pyProjectFile;
if (configFile !== "") {
const versionFromConfigFile = getUvVersionFromConfigFile(configFile);
if (versionFromConfigFile === undefined) {
core.warning(
`Could not find required-version under [tool.uv] in ${configFile}. Falling back to latest`,
);
let installedPath: string | undefined;
let cachedToolDir: string;
let version: string;
if (versionInput === "latest") {
const result = await downloadLatest(platform, arch, checkSum, githubToken);
version = result.version;
cachedToolDir = result.cachedToolDir;
} else {
version = versionInput;
installedPath = tryGetFromToolCache(arch, versionInput);
if (installedPath) {
core.info(`Found uv in tool-cache for ${versionInput}`);
return { uvDir: installedPath, version };
}
return await resolveVersion(versionFromConfigFile || "latest", githubToken);
cachedToolDir = await downloadVersion(
platform,
arch,
versionInput,
checkSum,
githubToken,
);
}
if (!fs.existsSync("uv.toml") && !fs.existsSync("pyproject.toml")) {
return await resolveVersion("latest", githubToken);
}
const versionFile = fs.existsSync("uv.toml") ? "uv.toml" : "pyproject.toml";
const versionFromConfigFile = getUvVersionFromConfigFile(versionFile);
return await resolveVersion(versionFromConfigFile || "latest", githubToken);
return { uvDir: cachedToolDir, version };
}
function addUvToPathAndOutput(cachedPath: string): void {
core.setOutput("uv-path", `${cachedPath}${path.sep}uv`);
core.setOutput("uvx-path", `${cachedPath}${path.sep}uvx`);
function addUvToPath(cachedPath: string): void {
core.addPath(cachedPath);
core.info(`Added ${cachedPath} to the path`);
}
function addToolBinToPath(): void {
if (toolBinDir !== undefined) {
core.exportVariable("UV_TOOL_BIN_DIR", toolBinDir);
core.info(`Set UV_TOOL_BIN_DIR to ${toolBinDir}`);
core.addPath(toolBinDir);
core.info(`Added ${toolBinDir} to the path`);
} else {
if (process.env.XDG_BIN_HOME !== undefined) {
core.addPath(process.env.XDG_BIN_HOME);
core.info(`Added ${process.env.XDG_BIN_HOME} to the path`);
} else if (process.env.XDG_DATA_HOME !== undefined) {
core.addPath(`${process.env.XDG_DATA_HOME}/../bin`);
core.info(`Added ${process.env.XDG_DATA_HOME}/../bin to the path`);
} else {
core.addPath(`${process.env.HOME}/.local/bin`);
core.info(`Added ${process.env.HOME}/.local/bin to the path`);
}
}
}
function setToolDir(): void {
if (toolDir !== undefined) {
core.exportVariable("UV_TOOL_DIR", toolDir);
core.info(`Set UV_TOOL_DIR to ${toolDir}`);
}
}
async function setupPython(): Promise<void> {
if (pythonVersion !== "") {
core.exportVariable("UV_PYTHON", pythonVersion);
core.info(`Set UV_PYTHON to ${pythonVersion}`);
const options: exec.ExecOptions = {
silent: !core.isDebug(),
};
const execArgs = ["venv", "--python", pythonVersion];
core.info("Activating python venv...");
await exec.exec("uv", execArgs, options);
let venvBinPath = ".venv/bin";
if (process.platform === "win32") {
venvBinPath = ".venv/Scripts";
}
core.addPath(path.resolve(venvBinPath));
core.exportVariable("VIRTUAL_ENV", path.resolve(".venv"));
}
}
function setCacheDir(cacheLocalPath: string): void {
core.exportVariable("UV_CACHE_DIR", cacheLocalPath);
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath}`);

View file

@ -1,8 +1,8 @@
import * as semver from "semver";
import * as github from "@actions/github";
import * as core from "@actions/core";
import { Octokit } from "./utils/octokit";
import { OWNER, REPO } from "./utils/constants";
import * as semver from "semver";
import { updateChecksums } from "./download/checksum/update-known-checksums";
@ -10,9 +10,7 @@ async function run(): Promise<void> {
const checksumFilePath = process.argv.slice(2)[0];
const github_token = process.argv.slice(2)[1];
const octokit = new Octokit({
auth: github_token,
});
const octokit = github.getOctokit(github_token);
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
owner: OWNER,

View file

@ -1,80 +1,24 @@
import * as core from "@actions/core";
import path from "node:path";
import path from "path";
export const version = core.getInput("version");
export const pyProjectFile = core.getInput("pyproject-file");
export const uvFile = core.getInput("uv-file");
export const pythonVersion = core.getInput("python-version");
export const checkSum = core.getInput("checksum");
export const enableCache = getEnableCache();
export const enableCache = core.getInput("enable-cache") === "true";
export const cacheSuffix = core.getInput("cache-suffix") || "";
export const cacheLocalPath = getCacheLocalPath();
export const cacheDependencyGlob = core.getInput("cache-dependency-glob");
export const pruneCache = core.getInput("prune-cache") === "true";
export const ignoreNothingToCache =
core.getInput("ignore-nothing-to-cache") === "true";
export const ignoreEmptyWorkdir =
core.getInput("ignore-empty-workdir") === "true";
export const toolBinDir = getToolBinDir();
export const toolDir = getToolDir();
export const githubToken = core.getInput("github-token");
function getEnableCache(): boolean {
const enableCacheInput = core.getInput("enable-cache");
if (enableCacheInput === "auto") {
return process.env.RUNNER_ENVIRONMENT === "github-hosted";
}
return enableCacheInput === "true";
}
function getToolBinDir(): string | undefined {
const toolBinDirInput = core.getInput("tool-bin-dir");
if (toolBinDirInput !== "") {
return expandTilde(toolBinDirInput);
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${path.sep}uv-tool-bin-dir`;
}
throw Error(
"Could not determine UV_TOOL_BIN_DIR. Please make sure RUNNER_TEMP is set or provide the tool-bin-dir input",
);
}
return undefined;
}
function getToolDir(): string | undefined {
const toolDirInput = core.getInput("tool-dir");
if (toolDirInput !== "") {
return expandTilde(toolDirInput);
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${path.sep}uv-tool-dir`;
}
throw Error(
"Could not determine UV_TOOL_DIR. Please make sure RUNNER_TEMP is set or provide the tool-dir input",
);
}
return undefined;
}
export const cacheDependencyGlob = core.getInput("cache-dependency-glob");
function getCacheLocalPath(): string {
const cacheLocalPathInput = core.getInput("cache-local-path");
if (cacheLocalPathInput !== "") {
return expandTilde(cacheLocalPathInput);
return cacheLocalPathInput;
}
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${path.sep}setup-uv-cache`;
}
throw Error(
"Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input",
);
}
function expandTilde(input: string): string {
if (input.startsWith("~")) {
return `${process.env.HOME}${input.substring(1)}`;
if (process.platform === "win32") {
return "D:\\a\\_temp\\setup-uv-cache";
}
return input;
return "/tmp/setup-uv-cache";
}

View file

@ -1,58 +0,0 @@
import { Octokit as Core } from "@octokit/core";
import type {
Constructor,
OctokitOptions,
} from "@octokit/core/dist-types/types";
import {
paginateRest,
type PaginateInterface,
} from "@octokit/plugin-paginate-rest";
import { legacyRestEndpointMethods } from "@octokit/plugin-rest-endpoint-methods";
import { fetch as undiciFetch, ProxyAgent, type RequestInit } from "undici";
export type { RestEndpointMethodTypes } from "@octokit/plugin-rest-endpoint-methods";
const DEFAULTS = {
baseUrl: "https://api.github.com",
userAgent: "setup-uv",
};
export function getProxyAgent() {
const httpProxy = process.env.HTTP_PROXY || process.env.http_prox;
if (httpProxy) {
return new ProxyAgent(httpProxy);
}
const httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy;
if (httpsProxy) {
return new ProxyAgent(httpsProxy);
}
return undefined;
}
export const customFetch = async (url: string, opts: RequestInit) =>
await undiciFetch(url, {
dispatcher: getProxyAgent(),
...opts,
});
export const Octokit: typeof Core &
Constructor<
{
paginate: PaginateInterface;
} & ReturnType<typeof legacyRestEndpointMethods>
> = Core.plugin(paginateRest, legacyRestEndpointMethods).defaults(
function buildDefaults(options: OctokitOptions): OctokitOptions {
return {
...DEFAULTS,
...options,
request: {
fetch: customFetch,
...options.request,
},
};
},
);
export type Octokit = InstanceType<typeof Octokit>;

View file

@ -1,17 +1,10 @@
import * as exec from "@actions/exec";
import * as core from "@actions/core";
export type Platform =
| "unknown-linux-gnu"
| "unknown-linux-musl"
| "unknown-linux-musleabihf"
| "apple-darwin"
| "pc-windows-msvc";
export type Architecture =
| "i686"
| "x86_64"
| "aarch64"
| "s390x"
| "powerpc64le";
export type Architecture = "i686" | "x86_64" | "aarch64";
export function getArch(): Architecture | undefined {
const arch = process.arch;
@ -19,8 +12,6 @@ export function getArch(): Architecture | undefined {
ia32: "i686",
x64: "x86_64",
arm64: "aarch64",
s390x: "s390x",
ppc64: "powerpc64le",
};
if (arch in archMapping) {
@ -28,49 +19,15 @@ export function getArch(): Architecture | undefined {
}
}
export async function getPlatform(): Promise<Platform | undefined> {
const processPlatform = process.platform;
export function getPlatform(): Platform | undefined {
const platform = process.platform;
const platformMapping: { [key: string]: Platform } = {
linux: "unknown-linux-gnu",
darwin: "apple-darwin",
win32: "pc-windows-msvc",
};
if (processPlatform in platformMapping) {
const platform = platformMapping[processPlatform];
if (platform === "unknown-linux-gnu") {
const isMusl = await isMuslOs();
return isMusl ? "unknown-linux-musl" : platform;
}
return platform;
}
}
async function isMuslOs(): Promise<boolean> {
let stdOutput = "";
let errOutput = "";
const options: exec.ExecOptions = {
silent: !core.isDebug(),
listeners: {
stdout: (data: Buffer) => {
stdOutput += data.toString();
},
stderr: (data: Buffer) => {
errOutput += data.toString();
},
},
ignoreReturnCode: true,
};
try {
const execArgs = ["--version"];
await exec.exec("ldd", execArgs, options);
return stdOutput.includes("musl") || errOutput.includes("musl");
} catch (error) {
const err = error as Error;
core.warning(
`Failed to determine glibc or musl. Falling back to glibc. Error: ${err.message}`,
);
return false;
if (platform in platformMapping) {
return platformMapping[platform];
}
}

View file

@ -1,46 +0,0 @@
import fs from "node:fs";
import * as core from "@actions/core";
import * as toml from "smol-toml";
export function getUvVersionFromConfigFile(
filePath: string,
): string | undefined {
core.debug(`Trying to find required-version for uv in: ${filePath}`);
if (!fs.existsSync(filePath)) {
core.warning(`Could not find file: ${filePath}`);
return undefined;
}
let requiredVersion: string | undefined;
try {
requiredVersion = getRequiredVersion(filePath);
} catch (err) {
const message = (err as Error).message;
core.warning(`Error while parsing ${filePath}: ${message}`);
return undefined;
}
if (requiredVersion?.startsWith("==")) {
requiredVersion = requiredVersion.slice(2);
}
if (requiredVersion !== undefined) {
core.info(
`Found required-version for uv in ${filePath}: ${requiredVersion}`,
);
}
return requiredVersion;
}
function getRequiredVersion(filePath: string): string | undefined {
const fileContent = fs.readFileSync(filePath, "utf-8");
if (filePath.endsWith("pyproject.toml")) {
const tomlContent = toml.parse(fileContent) as {
tool?: { uv?: { "required-version"?: string } };
};
return tomlContent?.tool?.uv?.["required-version"];
}
const tomlContent = toml.parse(fileContent) as {
"required-version"?: string;
};
return tomlContent["required-version"];
}

View file

@ -1,6 +1,6 @@
{
"compilerOptions": {
"target": "ES2022" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
"target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
"outDir": "./lib" /* Redirect output structure to the directory. */,
"rootDir": "./src" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,