mirror of
https://github.com/astral-sh/setup-uv.git
synced 2025-04-05 23:25:19 -04:00
Compare commits
116 commits
Author | SHA1 | Date | |
---|---|---|---|
|
9e2c33a082 | ||
|
839076380b | ||
|
9bf3815166 | ||
|
0c5e2b8115 | ||
|
794ea9455c | ||
|
2d49baf2b6 | ||
|
4fa25599ce | ||
|
224dce1d79 | ||
|
22695119d7 | ||
|
bf8ec1ea35 | ||
|
1fb7cdfc29 | ||
|
57fe17c2c5 | ||
|
72002e8b87 | ||
|
19df292e24 | ||
|
7d9a2d93c4 | ||
|
389b596663 | ||
|
04c950a723 | ||
|
d02c4c2d68 | ||
|
a4fd982317 | ||
|
a05a582c56 | ||
|
0e855c90d0 | ||
|
d8a276f11f | ||
|
59ae1ec55b | ||
|
f94ec6bedd | ||
|
0313224678 | ||
|
754a7d4c2d | ||
|
b498c74bf4 | ||
|
b9ef7bd2eb | ||
|
1edb52594c | ||
|
a4fbf7b827 | ||
|
c122541d0b | ||
|
7c47ef9ebd | ||
|
e2e9087257 | ||
|
bb8d247e1a | ||
|
1ffa6dc3ad | ||
|
ee84cf5cb8 | ||
|
f95cd8710c | ||
|
61ee7954c6 | ||
|
cad8337f4e | ||
|
a4c8ae423e | ||
|
afa3c8c42b | ||
|
4db96194c3 | ||
|
2625dd350b | ||
|
f9e15a1be8 | ||
|
1c21f62d98 | ||
|
982fbca0f8 | ||
|
35cf70845a | ||
|
7cf65ded99 | ||
|
6ade4fc248 | ||
|
6e6e5a74f6 | ||
|
20980170aa | ||
|
02dfe76bef | ||
|
3548439624 | ||
|
9d3a8b144e | ||
|
14dc0be27c | ||
|
b5f58b2abc | ||
|
4e3dbecc19 | ||
|
2487ffc9aa | ||
|
118b7214ec | ||
|
d942048030 | ||
|
77cc1aee22 | ||
|
169ed2a5f2 | ||
|
9fffe05b88 | ||
|
5ce9ee0011 | ||
|
d577e74f98 | ||
|
7174288630 | ||
|
94a861f4b5 | ||
|
e9f61537d9 | ||
|
4cd05096c3 | ||
|
7768fe6bf0 | ||
|
7b290f7b85 | ||
|
949720bc7f | ||
|
d837751086 | ||
|
9869cbc19a | ||
|
03fe035094 | ||
|
887a942a15 | ||
|
d174a24c07 | ||
|
12c852e6ba | ||
|
180f8b4439 | ||
|
e3fb95a689 | ||
|
2af22b5b2d | ||
|
dd578776bb | ||
|
85aa0bf0c1 | ||
|
1f2cbfa7bb | ||
|
25b3ce6330 | ||
|
856099c958 | ||
|
e3017a763c | ||
|
3460fe1a9a | ||
|
884a30e33c | ||
|
f064c84ddb | ||
|
be4207d29e | ||
|
bdcda7e77f | ||
|
1e4d4ea9ff | ||
|
f0b64e0d53 | ||
|
38f3f10444 | ||
|
8bdd012be5 | ||
|
5f42d5af6c | ||
|
26ddfef6e1 | ||
|
ee4fa33003 | ||
|
420915557e | ||
|
9839fa9fb5 | ||
|
196fe5f098 | ||
|
49d8a3d9a8 | ||
|
d8db0a86d3 | ||
|
ed171c292b | ||
|
691a091485 | ||
|
9b71657bb2 | ||
|
caf0cab7a6 | ||
|
7c238111e6 | ||
|
3eca4c2715 | ||
|
aee2e918ee | ||
|
4ffb6d766c | ||
|
e779db7426 | ||
|
cb1ce8a914 | ||
|
cf7bbf8f13 | ||
|
2e657c127d |
51 changed files with 130398 additions and 49127 deletions
0
.git-blame-ignore-revs
Normal file
0
.git-blame-ignore-revs
Normal file
9
.github/actionlint.yaml
vendored
Normal file
9
.github/actionlint.yaml
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
self-hosted-runner:
|
||||
# Custom labels of self-hosted or large GitHub hosted runners
|
||||
# so that actionlint knows that they are not a typo
|
||||
labels:
|
||||
- selfhosted-ubuntu-arm64
|
||||
# Configuration variables in array of strings defined in your repository or
|
||||
# organization. `null` means disabling configuration variables check.
|
||||
# Empty array means no configuration variable is allowed.
|
||||
config-variables: null
|
49
.github/workflows/check-dist.yml
vendored
49
.github/workflows/check-dist.yml
vendored
|
@ -1,49 +0,0 @@
|
|||
# `dist/index.js` is a special file in Actions.
|
||||
# When you reference an action with `uses:` in a workflow,
|
||||
# `index.js` is the code that will run.
|
||||
# For our project, we generate this file through a build process from other source files.
|
||||
# We need to make sure the checked-in `index.js` actually matches what we expect it to be.
|
||||
name: Check dist/
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-dist:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js 20
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Rebuild the dist/ directory
|
||||
run: |
|
||||
npm run build
|
||||
npm run package
|
||||
|
||||
- name: Compare the expected and actual dist/ directories
|
||||
run: |
|
||||
if [ "$(git diff --ignore-space-at-eol dist/ | wc -l)" -gt "0" ]; then
|
||||
echo "Detected uncommitted changes after build. See status below:"
|
||||
git diff --text -v
|
||||
exit 1
|
||||
fi
|
||||
id: diff
|
||||
|
||||
# If index.js was different than expected, upload the expected version as an artifact
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||
with:
|
||||
name: dist
|
||||
path: dist/
|
9
.github/workflows/codeql-analysis.yml
vendored
9
.github/workflows/codeql-analysis.yml
vendored
|
@ -12,13 +12,14 @@
|
|||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches: [main]
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: "31 7 * * 3"
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
|
|
7
.github/workflows/release-drafter.yml
vendored
7
.github/workflows/release-drafter.yml
vendored
|
@ -3,17 +3,20 @@ name: Release Drafter
|
|||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
name: ✏️ Draft release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: read
|
||||
steps:
|
||||
- name: 🚀 Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v6.0.0
|
||||
uses: release-drafter/release-drafter@v6.1.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
49
.github/workflows/test-cache-windows.yml
vendored
49
.github/workflows/test-cache-windows.yml
vendored
|
@ -1,49 +0,0 @@
|
|||
name: "test-cache-windows"
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test-setup-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__\fixtures\uv-project
|
||||
test-restore-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
needs: test-setup-cache
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- name: Cache was hit
|
||||
run: |
|
||||
if ($env:CACHE_HIT -ne "true") {
|
||||
exit 1
|
||||
}
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__\fixtures\uv-project
|
123
.github/workflows/test-cache.yml
vendored
123
.github/workflows/test-cache.yml
vendored
|
@ -1,123 +0,0 @@
|
|||
name: "test-cache"
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test-setup-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14]
|
||||
needs: test-setup-cache
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- name: Cache was hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" != "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-setup-cache-dependency-glob:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache-dependency-glob:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-setup-cache-dependency-glob
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Change pyproject.toml
|
||||
run: |
|
||||
echo '[tool.uv]' >> __tests__/fixtures/uv-project/pyproject.toml
|
||||
echo 'dev-dependencies = []' >> __tests__/fixtures/uv-project/pyproject.toml
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- name: Cache was not hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" == "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
|
||||
test-setup-cache-local:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-local-path: /tmp/uv-cache
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache-local:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
needs: test-setup-cache-local
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-local-path: /tmp/uv-cache
|
||||
- name: Cache was hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" != "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
27
.github/workflows/test-windows.yml
vendored
27
.github/workflows/test-windows.yml
vendored
|
@ -1,27 +0,0 @@
|
|||
name: "test-windows"
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test-default-version:
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Should not be on path
|
||||
run: |
|
||||
if (!(Get-Command -Name "uv" -ErrorAction SilentlyContinue)) {
|
||||
exit 0
|
||||
} else {
|
||||
exit 1
|
||||
}
|
||||
- name: Setup uv
|
||||
uses: ./
|
||||
- run: uv sync
|
||||
working-directory: __tests__\fixtures\uv-project
|
439
.github/workflows/test.yml
vendored
439
.github/workflows/test.yml
vendored
|
@ -1,6 +1,9 @@
|
|||
name: "test"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
@ -9,11 +12,16 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Actionlint
|
||||
uses: eifinger/actionlint-action@23c85443d840cd73bbecb9cddfc933cc21649a38 # v1.9.1
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
|
@ -23,24 +31,31 @@ jobs:
|
|||
npm run all
|
||||
- name: Make sure no changes from linters are detected
|
||||
run: |
|
||||
git diff --exit-code
|
||||
git diff --exit-code || (echo "::error::Please run 'npm run all' to fix the issues" && exit 1)
|
||||
|
||||
test-default-version:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14, selfhosted-ubuntu-arm64]
|
||||
os: [ubuntu-latest, macos-latest, macos-14, windows-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install default version
|
||||
- name: Install latest version
|
||||
id: setup-uv
|
||||
uses: ./
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
shell: bash
|
||||
- name: Check uv-path is set
|
||||
run: ${{ steps.setup-uv.outputs.uv-path }} --version
|
||||
- name: Check uvx-path is set
|
||||
run: ${{ steps.setup-uv.outputs.uvx-path }} --version
|
||||
|
||||
test-specific-version:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14, selfhosted-ubuntu-arm64]
|
||||
uv-version: ["latest", "0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
|
||||
uv-version: ["0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install version ${{ matrix.uv-version }}
|
||||
|
@ -49,11 +64,9 @@ jobs:
|
|||
version: ${{ matrix.uv-version }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-semver-range:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, selfhosted-ubuntu-arm64]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install version 0.3
|
||||
|
@ -68,26 +81,90 @@ jobs:
|
|||
fi
|
||||
env:
|
||||
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
|
||||
|
||||
test-pep440-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install version 0.4.30
|
||||
id: setup-uv
|
||||
uses: ./
|
||||
with:
|
||||
version: ">=0.4.25,<0.5"
|
||||
- name: Correct version gets installed
|
||||
run: |
|
||||
if [ "$UV_VERSION" != "0.4.30" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
|
||||
|
||||
test-pyproject-file-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install version 0.5.14
|
||||
id: setup-uv
|
||||
uses: ./
|
||||
with:
|
||||
pyproject-file: "__tests__/fixtures/pyproject-toml-project/pyproject.toml"
|
||||
- name: Correct version gets installed
|
||||
run: |
|
||||
if [ "$UV_VERSION" != "0.5.14" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
|
||||
|
||||
test-malformed-pyproject-file-fallback:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install using malformed pyproject.toml
|
||||
id: setup-uv
|
||||
uses: ./
|
||||
with:
|
||||
pyproject-file: "__tests__/fixtures/malformed-pyproject-toml-project/pyproject.toml"
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-uv-file-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install version 0.5.15
|
||||
id: setup-uv
|
||||
uses: ./
|
||||
with:
|
||||
pyproject-file: "__tests__/fixtures/uv-toml-project/pyproject.toml"
|
||||
uv-file: "__tests__/fixtures/uv-toml-project/uv.toml"
|
||||
- name: Correct version gets installed
|
||||
run: |
|
||||
if [ "$UV_VERSION" != "0.5.15" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
|
||||
|
||||
test-checksum:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.inputs.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, selfhosted-ubuntu-arm64]
|
||||
checksum:
|
||||
["4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"]
|
||||
exclude:
|
||||
- os: selfhosted-ubuntu-arm64
|
||||
inputs:
|
||||
- os: ubuntu-latest
|
||||
checksum: "4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"
|
||||
include:
|
||||
- os: selfhosted-ubuntu-arm64
|
||||
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
|
||||
- os: macos-latest
|
||||
checksum: "a70cbfbf3bb5c08b2f84963b4f12c94e08fbb2468ba418a3bfe1066fbe9e7218"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checksum matches expected
|
||||
uses: ./
|
||||
with:
|
||||
version: "0.3.2"
|
||||
checksum: ${{ matrix.checksum }}
|
||||
checksum: ${{ matrix.inputs.checksum }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-with-explicit-token:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
@ -98,6 +175,7 @@ jobs:
|
|||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-uvx:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
@ -105,6 +183,7 @@ jobs:
|
|||
- name: Install default version
|
||||
uses: ./
|
||||
- run: uvx ruff --version
|
||||
|
||||
test-tool-install:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
|
@ -115,7 +194,6 @@ jobs:
|
|||
macos-latest,
|
||||
macos-14,
|
||||
windows-latest,
|
||||
selfhosted-ubuntu-arm64,
|
||||
]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
@ -123,3 +201,320 @@ jobs:
|
|||
uses: ./
|
||||
- run: uv tool install ruff
|
||||
- run: ruff --version
|
||||
|
||||
test-tilde-expansion-tool-dirs:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
tool-bin-dir: "~/tool-bin-dir"
|
||||
tool-dir: "~/tool-dir"
|
||||
- name: "Check if tool dirs are expanded"
|
||||
run: |
|
||||
if ! echo "$PATH" | grep -q "/home/ubuntu/tool-bin-dir"; then
|
||||
echo "PATH does not contain /home/ubuntu/tool-bin-dir: $PATH"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$UV_TOOL_DIR" != "/home/ubuntu/tool-dir" ]; then
|
||||
echo "UV_TOOL_DIR does not contain /home/ubuntu/tool-dir: $UV_TOOL_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
test-python-version:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install latest version
|
||||
uses: ./
|
||||
with:
|
||||
python-version: 3.13.1t
|
||||
- name: Verify UV_PYTHON is set to correct version
|
||||
run: |
|
||||
echo "$UV_PYTHON"
|
||||
if [ "$UV_PYTHON" != "3.13.1t" ]; then
|
||||
exit 1
|
||||
fi
|
||||
shell: bash
|
||||
- name: Verify packages can be installed
|
||||
run: uv pip install --python=3.13.1t pip
|
||||
shell: bash
|
||||
- name: Verify python version is correct
|
||||
run: |
|
||||
python --version
|
||||
if [ "$(python --version)" != "Python 3.13.1" ]; then
|
||||
exit 1
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
test-musl:
|
||||
runs-on: ubuntu-latest
|
||||
container: alpine
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install latest version
|
||||
uses: ./
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-setup-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
enable-cache: [ "true", "false", "auto" ]
|
||||
os: [ "ubuntu-latest", "selfhosted-ubuntu-arm64", "windows-latest" ]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: ${{ matrix.enable-cache }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-${{ matrix.os }}-${{ matrix.enable-cache }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
shell: bash
|
||||
test-restore-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
enable-cache: [ "true", "false", "auto" ]
|
||||
os: [ "ubuntu-latest", "selfhosted-ubuntu-arm64", "windows-latest" ]
|
||||
needs: test-setup-cache
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: ${{ matrix.enable-cache }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-${{ matrix.os }}-${{ matrix.enable-cache }}
|
||||
- name: Cache was hit
|
||||
if: ${{ matrix.enable-cache == 'true' || (matrix.enable-cache == 'auto' && matrix.os == 'ubuntu-latest') }}
|
||||
run: |
|
||||
if [ "$CACHE_HIT" != "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
shell: bash
|
||||
- name: Cache was not hit
|
||||
if: ${{ matrix.enable-cache == 'false' || (matrix.enable-cache == 'auto' && matrix.os == 'selfhosted-ubuntu-arm64') }}
|
||||
run: |
|
||||
if [ "$CACHE_HIT" == "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
shell: bash
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
shell: bash
|
||||
|
||||
test-setup-cache-requirements-txt:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-requirements-txt
|
||||
- run: |
|
||||
uv venv
|
||||
uv pip install -r requirements.txt
|
||||
working-directory: __tests__/fixtures/requirements-txt-project
|
||||
test-restore-cache-requirements-txt:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-setup-cache
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-requirements-txt
|
||||
- name: Cache was hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" != "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
- run: |
|
||||
uv venv
|
||||
uv pip install -r requirements.txt
|
||||
working-directory: __tests__/fixtures/requirements-txt-project
|
||||
|
||||
test-setup-cache-dependency-glob:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache-dependency-glob:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-setup-cache-dependency-glob
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Change pyproject.toml
|
||||
run: |
|
||||
echo '[tool.uv]' >> __tests__/fixtures/uv-project/pyproject.toml
|
||||
echo 'dev-dependencies = []' >> __tests__/fixtures/uv-project/pyproject.toml
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
|
||||
ignore-nothing-to-cache: true
|
||||
- name: Cache was not hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" == "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
|
||||
test-setup-cache-local:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
|
||||
cache-local-path: /tmp/uv-cache
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache-local:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
needs: test-setup-cache-local
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
|
||||
cache-local-path: /tmp/uv-cache
|
||||
- name: Cache was hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" != "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-tilde-expansion-cache-local-path:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Create cache directory
|
||||
run: mkdir -p ~/uv-cache
|
||||
shell: bash
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
cache-local-path: ~/uv-cache/cache-local-path
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-tilde-expansion-cache-dependency-glob:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Create cache directory
|
||||
run: mkdir -p ~/uv-cache
|
||||
shell: bash
|
||||
- name: Create cache dependency glob file
|
||||
run: touch ~/uv-cache.glob
|
||||
shell: bash
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-local-path: ~/uv-cache/cache-dependency-glob
|
||||
cache-dependency-glob: "~/uv-cache.glob"
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
cleanup-tilde-expansion-tests:
|
||||
needs:
|
||||
- test-tilde-expansion-cache-local-path
|
||||
- test-tilde-expansion-cache-dependency-glob
|
||||
if: always()
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- name: Remove cache directory
|
||||
run: rm -rf ~/uv-cache
|
||||
shell: bash
|
||||
- name: Remove cache dependency glob file
|
||||
run: rm -f ~/uv-cache.glob
|
||||
shell: bash
|
||||
|
||||
test-no-python-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Fake pyproject.toml at root
|
||||
run: cp __tests__/fixtures/old-python-constraint-project/pyproject.toml pyproject.toml
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/old-python-constraint-project
|
||||
|
||||
all-tests-passed:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- lint
|
||||
- test-default-version
|
||||
- test-specific-version
|
||||
- test-semver-range
|
||||
- test-pep440-version
|
||||
- test-pyproject-file-version
|
||||
- test-malformed-pyproject-file-fallback
|
||||
- test-uv-file-version
|
||||
- test-checksum
|
||||
- test-with-explicit-token
|
||||
- test-uvx
|
||||
- test-tool-install
|
||||
- test-tilde-expansion-tool-dirs
|
||||
- test-python-version
|
||||
- test-musl
|
||||
- test-restore-cache
|
||||
- test-restore-cache-requirements-txt
|
||||
- test-restore-cache-dependency-glob
|
||||
- test-restore-cache-local
|
||||
- test-tilde-expansion-cache-local-path
|
||||
- test-tilde-expansion-cache-dependency-glob
|
||||
- cleanup-tilde-expansion-tests
|
||||
- test-no-python-version
|
||||
if: always()
|
||||
steps:
|
||||
- name: All tests passed
|
||||
run: |
|
||||
echo "All jobs passed: ${{ !contains(needs.*.result, 'failure') }}"
|
||||
# shellcheck disable=SC2242
|
||||
exit ${{ contains(needs.*.result, 'failure') && 1 || 0 }}
|
||||
|
|
7
.github/workflows/update-known-checksums.yml
vendored
7
.github/workflows/update-known-checksums.yml
vendored
|
@ -1,10 +1,15 @@
|
|||
name: "Update known checksums"
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 4 * * *" # Run every day at 4am UTC
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
|
@ -17,7 +22,7 @@ jobs:
|
|||
src/download/checksum/known-checksums.ts ${{ secrets.GITHUB_TOKEN }}
|
||||
- run: npm install && npm run all
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7.0.5
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
commit-message: "chore: update known checksums"
|
||||
title:
|
||||
|
|
34
.github/workflows/update-major-minor-tags.yml
vendored
34
.github/workflows/update-major-minor-tags.yml
vendored
|
@ -1,7 +1,6 @@
|
|||
---
|
||||
name: Update Major Minor Tags
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
|
@ -13,7 +12,36 @@ jobs:
|
|||
update_major_minor_tags:
|
||||
name: Make sure major and minor tags are up to date on a patch release
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Run Update semver
|
||||
uses: haya14busa/action-update-semver@v1.2.1
|
||||
- name: Update Major Minor Tags
|
||||
run: |
|
||||
set -x
|
||||
|
||||
cd "${GITHUB_WORKSPACE}" || exit
|
||||
|
||||
# Set up variables.
|
||||
TAG="${GITHUB_REF#refs/tags/}" # v1.2.3
|
||||
MINOR="${TAG%.*}" # v1.2
|
||||
MAJOR="${MINOR%.*}" # v1
|
||||
|
||||
if [ "${GITHUB_REF}" = "${TAG}" ]; then
|
||||
echo "This workflow is not triggered by tag push: GITHUB_REF=${GITHUB_REF}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MESSAGE="Release ${TAG}"
|
||||
|
||||
# Set up Git.
|
||||
git config user.name "${GITHUB_ACTOR}"
|
||||
git config user.email "${GITHUB_ACTOR}@users.noreply.github.com"
|
||||
|
||||
# Update MAJOR/MINOR tag
|
||||
git tag -fa "${MAJOR}" -m "${MESSAGE}"
|
||||
git tag -fa "${MINOR}" -m "${MESSAGE}"
|
||||
|
||||
# Push
|
||||
git push --force origin "${MINOR}"
|
||||
git push --force origin "${MAJOR}"
|
||||
|
|
262
README.md
262
README.md
|
@ -11,76 +11,150 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
|
|||
## Contents
|
||||
|
||||
- [Usage](#usage)
|
||||
- [Install the latest version (default)](#install-the-latest-version-default)
|
||||
- [Install a required-version or latest (default)](#install-a-required-version-or-latest-default)
|
||||
- [Install the latest version](#install-the-latest-version)
|
||||
- [Install a specific version](#install-a-specific-version)
|
||||
- [Install a version by supplying a semver range](#install-a-version-by-supplying-a-semver-range)
|
||||
- [Install a version by supplying a semver range or pep440 specifier](#install-a-version-by-supplying-a-semver-range-or-pep440-specifier)
|
||||
- [Install a required-version](#install-a-required-version)
|
||||
- [Python version](#python-version)
|
||||
- [Validate checksum](#validate-checksum)
|
||||
- [Enable Caching](#enable-caching)
|
||||
- [Cache dependency glob](#cache-dependency-glob)
|
||||
- [Local cache path](#local-cache-path)
|
||||
- [Disable cache pruning](#disable-cache-pruning)
|
||||
- [Ignore nothing to cache](#ignore-nothing-to-cache)
|
||||
- [GitHub authentication token](#github-authentication-token)
|
||||
- [UV_TOOL_DIR](#uv_tool_dir)
|
||||
- [UV_TOOL_BIN_DIR](#uv_tool_bin_dir)
|
||||
- [Tilde Expansion](#tilde-expansion)
|
||||
- [How it works](#how-it-works)
|
||||
- [FAQ](#faq)
|
||||
|
||||
## Usage
|
||||
|
||||
### Install the latest version (default)
|
||||
### Install a required-version or latest (default)
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
version: "latest"
|
||||
uses: astral-sh/setup-uv@v5
|
||||
```
|
||||
|
||||
If you do not specify a version, this action will look for a [required-version](https://docs.astral.sh/uv/reference/settings/#required-version)
|
||||
in a `uv.toml` or `pyproject.toml` file in the repository root. If none is found, the latest version will be installed.
|
||||
|
||||
For an example workflow, see
|
||||
[here](https://github.com/charliermarsh/autobot/blob/e42c66659bf97b90ca9ff305a19cc99952d0d43f/.github/workflows/ci.yaml).
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
> Using `latest` requires that uv download the executable on every run, which incurs a cost
|
||||
> (especially on self-hosted runners). As a best practice, consider pinning the version to a
|
||||
> specific release.
|
||||
### Install the latest version
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: "latest"
|
||||
```
|
||||
|
||||
### Install a specific version
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: "0.4.4"
|
||||
```
|
||||
|
||||
### Install a version by supplying a semver range
|
||||
### Install a version by supplying a semver range or pep440 specifier
|
||||
|
||||
You can also specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
|
||||
You can specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
|
||||
or [pep440 specifier](https://peps.python.org/pep-0440/#version-specifiers)
|
||||
to install the latest version that satisfies the range.
|
||||
|
||||
```yaml
|
||||
- name: Install a semver range of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ">=0.4.0"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Pinning a minor version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: "0.4.x"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Install a pep440-specifier-satisfying version of uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: ">=0.4.25,<0.5"
|
||||
```
|
||||
|
||||
### Install a required-version
|
||||
|
||||
You can specify a [required-version](https://docs.astral.sh/uv/reference/settings/#required-version)
|
||||
in either a `uv.toml` or `pyproject.toml` file:
|
||||
|
||||
```yaml
|
||||
- name: Install required-version defined in uv.toml
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
uv-file: "path/to/uv.toml"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Install required-version defined in pyproject.toml
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
pyproject-file: "path/to/pyproject.toml"
|
||||
```
|
||||
|
||||
### Python version
|
||||
|
||||
You can use the input `python-version` to
|
||||
|
||||
- set the environment variable `UV_PYTHON` for the rest of your workflow
|
||||
- create a new virtual environment with the specified python version
|
||||
- activate the virtual environment for the rest of your workflow
|
||||
|
||||
This will override any python version specifications in `pyproject.toml` and `.python-version`
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv and set the python version to 3.13t
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
python-version: 3.13t
|
||||
- run: uv pip install --python=3.13t pip
|
||||
```
|
||||
|
||||
You can combine this with a matrix to test multiple python versions:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install the latest version of uv and set the python version
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Test with python ${{ matrix.python-version }}
|
||||
run: uv run --frozen pytest
|
||||
```
|
||||
|
||||
### Validate checksum
|
||||
|
||||
You can also specify a checksum to validate the downloaded file. Checksums up to the default version
|
||||
You can specify a checksum to validate the downloaded executable. Checksums up to the default version
|
||||
are automatically verified by this action. The sha256 hashes can be found on the
|
||||
[releases page](https://github.com/astral-sh/uv/releases) of the uv repo.
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version and validate the checksum
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
version: "0.3.1"
|
||||
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
|
||||
|
@ -88,8 +162,9 @@ are automatically verified by this action. The sha256 hashes can be found on the
|
|||
|
||||
### Enable caching
|
||||
|
||||
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be cached to
|
||||
the GitHub Actions Cache. This can speed up runs that reuse the cache by several minutes.
|
||||
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be uploaded to
|
||||
the GitHub Actions cache. This can speed up runs that reuse the cache by several minutes.
|
||||
Caching is enabled by default on GitHub-hosted runners.
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
|
@ -101,7 +176,7 @@ You can optionally define a custom cache key suffix.
|
|||
```yaml
|
||||
- name: Enable caching and define a custom cache key suffix
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: "optional-suffix"
|
||||
|
@ -118,25 +193,32 @@ use it in subsequent steps. For example, to use the cache in the above case:
|
|||
|
||||
#### Cache dependency glob
|
||||
|
||||
If you want to control when the cache is invalidated, specify a glob pattern with the
|
||||
`cache-dependency-glob` input. The cache will be invalidated if any file matching the glob pattern
|
||||
changes. The glob matches files relative to the repository root.
|
||||
If you want to control when the GitHub Actions cache is invalidated, specify a glob pattern with the
|
||||
`cache-dependency-glob` input. The GitHub Actions cache will be invalidated if any file matching the glob pattern
|
||||
changes. If you use relative paths, they are relative to the repository root.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The default is `**/uv.lock`.
|
||||
> You can look up supported patterns [here](https://github.com/actions/toolkit/tree/main/packages/glob#patterns)
|
||||
>
|
||||
> The default is
|
||||
> ```yaml
|
||||
> cache-dependency-glob: |
|
||||
> **/requirements*.txt
|
||||
> **/uv.lock
|
||||
> ```
|
||||
|
||||
```yaml
|
||||
- name: Define a cache dependency glob
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "**/requirements*.txt"
|
||||
cache-dependency-glob: "**/pyproject.toml"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define a list of cache dependency globs
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
|
@ -144,9 +226,17 @@ changes. The glob matches files relative to the repository root.
|
|||
**/pyproject.toml
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define an absolute cache dependency glob
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "/tmp/my-folder/requirements*.txt"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Never invalidate the cache
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: ""
|
||||
|
@ -161,7 +251,7 @@ It defaults to `setup-uv-cache` in the `TMP` dir, `D:\a\_temp\uv-tool-dir` on Wi
|
|||
|
||||
```yaml
|
||||
- name: Define a custom uv cache path
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
cache-local-path: "/path/to/cache"
|
||||
```
|
||||
|
@ -172,20 +262,47 @@ By default, the uv cache is pruned after every run, removing pre-built wheels, b
|
|||
wheels that were built from source. On GitHub-hosted runners, it's typically faster to omit those
|
||||
pre-built wheels from the cache (and instead re-download them from the registry on each run).
|
||||
However, on self-hosted or local runners, preserving the cache may be more efficient. See
|
||||
the[documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
|
||||
more.
|
||||
the [documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
|
||||
more information.
|
||||
|
||||
If you want to persist the entire cache across runs, disable cache pruning with the `prune-cache`
|
||||
input.
|
||||
|
||||
```yaml
|
||||
- name: Don't prune the cache before saving it
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
prune-cache: false
|
||||
```
|
||||
|
||||
### Ignore nothing to cache
|
||||
|
||||
By default, the action will fail if caching is enabled but there is nothing to upload (the uv cache directory does not exist).
|
||||
If you want to ignore this, set the `ignore-nothing-to-cache` input to `true`.
|
||||
|
||||
```yaml
|
||||
- name: Ignore nothing to cache
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
ignore-nothing-to-cache: true
|
||||
```
|
||||
|
||||
### Ignore empty workdir
|
||||
|
||||
By default, the action will warn if the workdir is empty, because this is usually the case when
|
||||
`actions/checkout` is configured to run after `setup-uv`, which is not supported.
|
||||
|
||||
If you want to ignore this, set the `ignore-empty-workdir` input to `true`.
|
||||
|
||||
```yaml
|
||||
- name: Ignore empty workdir
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
ignore-empty-workdir: true
|
||||
```
|
||||
|
||||
### GitHub authentication token
|
||||
|
||||
This action uses the GitHub API to fetch the uv release artifacts. To avoid hitting the GitHub API
|
||||
|
@ -198,7 +315,7 @@ are not sufficient, you can provide a custom GitHub token with the necessary per
|
|||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom GitHub token
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
github-token: ${{ secrets.CUSTOM_GITHUB_TOKEN }}
|
||||
```
|
||||
|
@ -216,7 +333,7 @@ input:
|
|||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool dir
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
tool-dir: "/path/to/tool/dir"
|
||||
```
|
||||
|
@ -235,11 +352,30 @@ If you want to change this behaviour (especially on self-hosted runners) you can
|
|||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool bin dir
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
tool-bin-dir: "/path/to/tool-bin/dir"
|
||||
```
|
||||
|
||||
### Tilde Expansion
|
||||
|
||||
This action supports expanding the `~` character to the user's home directory for the following inputs:
|
||||
|
||||
- `cache-local-path`
|
||||
- `tool-dir`
|
||||
- `tool-bin-dir`
|
||||
- `cache-dependency-glob`
|
||||
|
||||
```yaml
|
||||
- name: Expand the tilde character
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
cache-local-path: "~/path/to/cache"
|
||||
tool-dir: "~/path/to/tool/dir"
|
||||
tool-bin-dir: "~/path/to/tool-bin/dir"
|
||||
cache-dependency-glob: "~/my-cache-buster"
|
||||
```
|
||||
|
||||
## How it works
|
||||
|
||||
This action downloads uv from the uv repo's official
|
||||
|
@ -254,21 +390,22 @@ by name (`uv`).
|
|||
|
||||
### Do I still need `actions/setup-python` alongside `setup-uv`?
|
||||
|
||||
No. This action is modelled as a drop-in replacement for `actions/setup-python` when using uv. With
|
||||
`setup-uv`, you can install a specific version of Python using `uv python install` rather than
|
||||
With `setup-uv`, you can install a specific version of Python using `uv python install` rather than
|
||||
relying on `actions/setup-python`.
|
||||
|
||||
Using `actions/setup-python` can be faster, because GitHub caches the Python versions alongside the runner.
|
||||
|
||||
For example:
|
||||
|
||||
```yaml
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@main
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Test
|
||||
run: uv run --frozen pytest
|
||||
run: uv run --frozen pytest # Uses the Python version automatically installed by uv
|
||||
```
|
||||
|
||||
To install a specific version of Python, use
|
||||
|
@ -276,7 +413,7 @@ To install a specific version of Python, use
|
|||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Install Python 3.12
|
||||
|
@ -295,11 +432,52 @@ output:
|
|||
uses: actions/checkout@main
|
||||
- name: Install the default version of uv
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Print the installed version
|
||||
run: echo "Installed uv version is ${{ steps.setup-uv.outputs.uv-version }}"
|
||||
```
|
||||
|
||||
### Should I include the resolution strategy in the cache key?
|
||||
|
||||
**Yes!**
|
||||
|
||||
The cache key gets computed by using the [cache-dependency-glob](#cache-dependency-glob).
|
||||
|
||||
If you
|
||||
have jobs which use the same dependency definitions from `requirements.txt` or
|
||||
`pyproject.toml` but different
|
||||
[resolution strategies](https://docs.astral.sh/uv/concepts/resolution/#resolution-strategy),
|
||||
each job will have different dependencies or dependency versions.
|
||||
But if you do not add the resolution strategy as a [cache-suffix](#enable-caching),
|
||||
they will have the same cache key.
|
||||
|
||||
This means the first job which starts uploading its cache will win and all other job will fail
|
||||
uploading the cache,
|
||||
because they try to upload with the same cache key.
|
||||
|
||||
You might see errors like
|
||||
`Failed to save: Failed to CreateCacheEntry: Received non-retryable error: Failed request: (409) Conflict: cache entry with the same key, version, and scope already exists`
|
||||
|
||||
### Why do I see warnings like `No GitHub Actions cache found for key`
|
||||
|
||||
When a workflow runs for the first time on a branch and has a new cache key, because the
|
||||
[cache-dependency-glob](#cache-dependency-glob) found changed files (changed dependencies),
|
||||
the cache will not be found and the warning `No GitHub Actions cache found for key` will be printed.
|
||||
|
||||
While this might be irritating at first, it is expected behaviour and the cache will be created
|
||||
and reused in later workflows.
|
||||
|
||||
The reason for the warning is, that we have to way to know if this is the first run of a new
|
||||
cache key or the user accidentally misconfigured the [cache-dependency-glob](#cache-dependency-glob)
|
||||
or [cache-suffix](#enable-caching) and the cache never gets used.
|
||||
|
||||
### Do I have to run `actions/checkout` before or after `setup-uv`?
|
||||
|
||||
Some workflows need uv but do not need to access the repository content.
|
||||
|
||||
But **if** you need to access the repository content, you have run `actions/checkout` before running `setup-uv`.
|
||||
Running `actions/checkout` after `setup-uv` **is not supported**.
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
`setup-uv` was initially written and published by [Kevin Stillhammer](https://github.com/eifinger)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
3.11
|
|
@ -0,0 +1,6 @@
|
|||
def main():
|
||||
print("Hello from malformed-pyproject-toml-project!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,9 @@
|
|||
[project]
|
||||
name = "malformed-pyproject-toml-project"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = []
|
||||
|
||||
[malformed-toml
|
|
@ -0,0 +1,13 @@
|
|||
[project]
|
||||
name = "old-python-constraint-project"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.8,<=3.9"
|
||||
dependencies = [
|
||||
"ruff>=0.6.2",
|
||||
]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
|
@ -0,0 +1,2 @@
|
|||
def hello() -> str:
|
||||
return "Hello from uv-project!"
|
38
__tests__/fixtures/old-python-constraint-project/uv.lock
generated
Normal file
38
__tests__/fixtures/old-python-constraint-project/uv.lock
generated
Normal file
|
@ -0,0 +1,38 @@
|
|||
version = 1
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.6.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/23/f4/279d044f66b79261fd37df76bf72b64471afab5d3b7906a01499c4451910/ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be", size = 2460281 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/72/4b/47dd7a69287afb4069fa42c198e899463605460a58120196711bfcf0446b/ruff-0.6.2-py3-none-linux_armv6l.whl", hash = "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c", size = 9695871 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/c3/8aac62ac4638c14a740ee76a755a925f2d0d04580ab790a9887accb729f6/ruff-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570", size = 9459354 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/cf/77fbd8d4617b9b9c503f9bffb8552c4e3ea1a58dc36975e7a9104ffb0f85/ruff-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158", size = 9163871 },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/1c/765192bab32b79efbb498b06f0b9dcb3629112b53b8777ae1d19b8209e09/ruff-0.6.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534", size = 10096250 },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/d0/86f3cb0f6934c99f759c232984a5204d67a26745cad2d9edff6248adf7d2/ruff-0.6.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b", size = 9475376 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/cc/4c8d0e225b559a3fae6092ec310d7150d3b02b4669e9223f783ef64d82c0/ruff-0.6.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d", size = 10295634 },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/96/d2699cfb1bb5a01c68122af43454c76c31331e1c8a9bd97d653d7c82524b/ruff-0.6.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66", size = 11024941 },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/a9/6ecd66af8929e0f2a1ed308a4137f3521789f28f0eb97d32c2ca3aa7000c/ruff-0.6.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8", size = 10606894 },
|
||||
{ url = "https://files.pythonhosted.org/packages/e4/73/2ee4cd19f44992fedac1cc6db9e3d825966072f6dcbd4032f21cbd063170/ruff-0.6.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1", size = 11552886 },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/4c/c0f1cd35ce4a93c54a6bb1ee6934a3a205fa02198dd076678193853ceea1/ruff-0.6.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1", size = 10264945 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/89/e45c9359b9cdd4245512ea2b9f2bb128a997feaa5f726fc9e8c7a66afadf/ruff-0.6.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23", size = 10100007 },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/74/0bd4e0a7ed5f6908df87892f9bf60a2356c0fd74102d8097298bd9b4f346/ruff-0.6.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a", size = 9559267 },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/03/3dc6dc9419f276f05805bf888c279e3e0b631284abd548d9e87cebb93aec/ruff-0.6.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c", size = 9905304 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/5b/d6a72a6a6bbf097c09de468326ef5fa1c9e7aa5e6e45979bc0d984b0dbe7/ruff-0.6.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56", size = 10341480 },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/a9/0f2f21fe15ba537c46598f96aa9ae4a3d4b9ec64926664617ca6a8c772f4/ruff-0.6.2-py3-none-win32.whl", hash = "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da", size = 7961901 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/80/fff12ffe11853d9f4ea3e5221e6dd2e93640a161c05c9579833e09ad40a7/ruff-0.6.2-py3-none-win_amd64.whl", hash = "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2", size = 8783320 },
|
||||
{ url = "https://files.pythonhosted.org/packages/56/91/577cdd64cce5e74d3f8b5ecb93f29566def569c741eb008aed4f331ef821/ruff-0.6.2-py3-none-win_arm64.whl", hash = "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9", size = 8225886 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uv-project"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "ruff" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [{ name = "ruff" }]
|
|
@ -0,0 +1 @@
|
|||
3.11
|
0
__tests__/fixtures/pyproject-toml-project/README.md
Normal file
0
__tests__/fixtures/pyproject-toml-project/README.md
Normal file
6
__tests__/fixtures/pyproject-toml-project/hello.py
Normal file
6
__tests__/fixtures/pyproject-toml-project/hello.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
def main():
|
||||
print("Hello from pyproject-toml-project!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
19
__tests__/fixtures/pyproject-toml-project/pyproject.toml
Normal file
19
__tests__/fixtures/pyproject-toml-project/pyproject.toml
Normal file
|
@ -0,0 +1,19 @@
|
|||
[project]
|
||||
name = "pyproject-toml-project"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = []
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"reuse==5.0.2",
|
||||
{include-group = "lint"},
|
||||
]
|
||||
lint = [
|
||||
"flake8==4.0.1",
|
||||
]
|
||||
|
||||
[tool.uv]
|
||||
required-version = "==0.5.14"
|
|
@ -0,0 +1 @@
|
|||
print("Hello world")
|
|
@ -0,0 +1 @@
|
|||
ruff>=0.6.2
|
1
__tests__/fixtures/uv-toml-project/.python-version
Normal file
1
__tests__/fixtures/uv-toml-project/.python-version
Normal file
|
@ -0,0 +1 @@
|
|||
3.11
|
0
__tests__/fixtures/uv-toml-project/README.md
Normal file
0
__tests__/fixtures/uv-toml-project/README.md
Normal file
6
__tests__/fixtures/uv-toml-project/hello.py
Normal file
6
__tests__/fixtures/uv-toml-project/hello.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
def main():
|
||||
print("Hello from uv-toml-project!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
10
__tests__/fixtures/uv-toml-project/pyproject.toml
Normal file
10
__tests__/fixtures/uv-toml-project/pyproject.toml
Normal file
|
@ -0,0 +1,10 @@
|
|||
[project]
|
||||
name = "uv-toml-project"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = []
|
||||
|
||||
[tool.uv]
|
||||
required-version = "==0.5.14"
|
1
__tests__/fixtures/uv-toml-project/uv.toml
Normal file
1
__tests__/fixtures/uv-toml-project/uv.toml
Normal file
|
@ -0,0 +1 @@
|
|||
required-version = "==0.5.15"
|
36
action.yml
36
action.yml
|
@ -4,25 +4,35 @@ description:
|
|||
author: "astral-sh"
|
||||
inputs:
|
||||
version:
|
||||
description: "The version of uv to install"
|
||||
default: "latest"
|
||||
description: "The version of uv to install e.g., `0.5.0` Defaults to the version in pyproject.toml or 'latest'."
|
||||
default: ""
|
||||
pyproject-file:
|
||||
description: "Path to a pyproject.toml"
|
||||
default: ""
|
||||
uv-file:
|
||||
description: "Path to a uv.toml"
|
||||
default: ""
|
||||
python-version:
|
||||
description: "The version of Python to set UV_PYTHON to"
|
||||
required: false
|
||||
checksum:
|
||||
description: "The checksum of the uv version to install"
|
||||
required: false
|
||||
github-token:
|
||||
description:
|
||||
"Used to increase the rate limit when retrieving versions and downloading
|
||||
uv."
|
||||
"Used to increase the rate limit when retrieving versions and downloading uv."
|
||||
required: false
|
||||
default: ${{ github.token }}
|
||||
enable-cache:
|
||||
description: "Enable caching of the uv cache"
|
||||
default: "false"
|
||||
description: "Enable uploading of the uv cache"
|
||||
default: "auto"
|
||||
cache-dependency-glob:
|
||||
description:
|
||||
"Glob pattern to match files relative to the repository root to control
|
||||
the cache."
|
||||
default: "**/uv.lock"
|
||||
default: |
|
||||
**/uv.lock
|
||||
**/requirements*.txt
|
||||
cache-suffix:
|
||||
description: "Suffix for the cache key"
|
||||
required: false
|
||||
|
@ -31,7 +41,13 @@ inputs:
|
|||
default: ""
|
||||
prune-cache:
|
||||
description: "Prune cache before saving."
|
||||
default: true
|
||||
default: "true"
|
||||
ignore-nothing-to-cache:
|
||||
description: "Ignore when nothing is found to cache."
|
||||
default: "false"
|
||||
ignore-empty-workdir:
|
||||
description: "Ignore when the working directory is empty."
|
||||
default: "false"
|
||||
tool-dir:
|
||||
description: "Custom path to set UV_TOOL_DIR to."
|
||||
required: false
|
||||
|
@ -41,6 +57,10 @@ inputs:
|
|||
outputs:
|
||||
uv-version:
|
||||
description: "The installed uv version. Useful when using latest."
|
||||
uv-path:
|
||||
description: "The path to the installed uv binary."
|
||||
uvx-path:
|
||||
description: "The path to the installed uvx binary."
|
||||
cache-hit:
|
||||
description: "A boolean value to indicate a cache entry was found"
|
||||
runs:
|
||||
|
|
51338
dist/save-cache/index.js
generated
vendored
51338
dist/save-cache/index.js
generated
vendored
File diff suppressed because one or more lines are too long
73956
dist/setup/index.js
generated
vendored
73956
dist/setup/index.js
generated
vendored
File diff suppressed because one or more lines are too long
49252
dist/update-known-checksums/index.js
generated
vendored
49252
dist/update-known-checksums/index.js
generated
vendored
File diff suppressed because one or more lines are too long
1367
package-lock.json
generated
1367
package-lock.json
generated
File diff suppressed because it is too large
Load diff
22
package.json
22
package.json
|
@ -12,7 +12,7 @@
|
|||
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-known-checksums src/update-known-checksums.ts",
|
||||
"test": "jest",
|
||||
"act": "act pull_request -W .github/workflows/test.yml --container-architecture linux/amd64 -s GITHUB_TOKEN=\"$(gh auth token)\"",
|
||||
"update-known-checksums": "node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
|
||||
"update-known-checksums": "RUNNER_TEMP=known_checksums node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
|
||||
"all": "npm run build && npm run format && npm run lint && npm run package && npm test"
|
||||
},
|
||||
"repository": {
|
||||
|
@ -23,23 +23,27 @@
|
|||
"author": "@eifinger",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.3.0",
|
||||
"@actions/cache": "^4.0.3",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/glob": "^0.5.0",
|
||||
"@actions/io": "^1.1.3",
|
||||
"@actions/tool-cache": "^2.0.1",
|
||||
"@octokit/rest": "^21.0.2"
|
||||
"@actions/tool-cache": "^2.0.2",
|
||||
"@octokit/core": "^6.1.4",
|
||||
"@octokit/plugin-paginate-rest": "^11.4.3",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^13.3.1",
|
||||
"@renovatebot/pep440": "^4.1.0",
|
||||
"smol-toml": "^1.3.1",
|
||||
"undici": "^7.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/node": "^22.13.10",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@vercel/ncc": "^0.38.2",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"jest": "^29.7.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"ts-jest": "^29.2.5",
|
||||
"typescript": "^5.6.3"
|
||||
"ts-jest": "^29.2.6",
|
||||
"typescript": "^5.8.2"
|
||||
}
|
||||
}
|
||||
|
|
61
src/cache/restore-cache.ts
vendored
61
src/cache/restore-cache.ts
vendored
|
@ -1,19 +1,21 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as core from "@actions/core";
|
||||
import {
|
||||
cacheDependencyGlob,
|
||||
cacheLocalPath,
|
||||
cacheSuffix,
|
||||
pythonVersion as pythonVersionInput,
|
||||
} from "../utils/inputs";
|
||||
import { getArch, getPlatform } from "../utils/platforms";
|
||||
import { hashFiles } from "../hash/hash-files";
|
||||
import * as exec from "@actions/exec";
|
||||
|
||||
export const STATE_CACHE_KEY = "cache-key";
|
||||
export const STATE_CACHE_MATCHED_KEY = "cache-matched-key";
|
||||
const CACHE_VERSION = "1";
|
||||
|
||||
export async function restoreCache(version: string): Promise<void> {
|
||||
const cacheKey = await computeKeys(version);
|
||||
export async function restoreCache(): Promise<void> {
|
||||
const cacheKey = await computeKeys();
|
||||
|
||||
let matchedKey: string | undefined;
|
||||
core.info(
|
||||
|
@ -33,28 +35,57 @@ export async function restoreCache(version: string): Promise<void> {
|
|||
handleMatchResult(matchedKey, cacheKey);
|
||||
}
|
||||
|
||||
async function computeKeys(version: string): Promise<string> {
|
||||
async function computeKeys(): Promise<string> {
|
||||
let cacheDependencyPathHash = "-";
|
||||
if (cacheDependencyGlob !== "") {
|
||||
core.info(
|
||||
`Searching files using cache dependency glob: ${cacheDependencyGlob.split("\n").join(",")}`,
|
||||
);
|
||||
cacheDependencyPathHash += await glob.hashFiles(
|
||||
cacheDependencyGlob,
|
||||
undefined,
|
||||
undefined,
|
||||
true,
|
||||
);
|
||||
cacheDependencyPathHash += await hashFiles(cacheDependencyGlob, true);
|
||||
if (cacheDependencyPathHash === "-") {
|
||||
throw new Error(
|
||||
`No file in ${process.cwd()} matched to [${cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`,
|
||||
core.warning(
|
||||
`No file matched to [${cacheDependencyGlob.split("\n").join(",")}]. The cache will never get invalidated. Make sure you have checked out the target repository and configured the cache-dependency-glob input correctly.`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
cacheDependencyPathHash += "no-dependency-glob";
|
||||
}
|
||||
if (cacheDependencyPathHash === "-") {
|
||||
cacheDependencyPathHash = "-no-dependency-glob";
|
||||
}
|
||||
const suffix = cacheSuffix ? `-${cacheSuffix}` : "";
|
||||
return `setup-uv-${CACHE_VERSION}-${getArch()}-${getPlatform()}-${version}${cacheDependencyPathHash}${suffix}`;
|
||||
const pythonVersion = await getPythonVersion();
|
||||
const platform = await getPlatform();
|
||||
return `setup-uv-${CACHE_VERSION}-${getArch()}-${platform}-${pythonVersion}${cacheDependencyPathHash}${suffix}`;
|
||||
}
|
||||
|
||||
async function getPythonVersion(): Promise<string> {
|
||||
if (pythonVersionInput !== "") {
|
||||
return pythonVersionInput;
|
||||
}
|
||||
|
||||
let output = "";
|
||||
const options: exec.ExecOptions = {
|
||||
silent: !core.isDebug(),
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString();
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const execArgs = ["python", "find"];
|
||||
await exec.exec("uv", execArgs, options);
|
||||
const pythonPath = output.trim();
|
||||
|
||||
output = "";
|
||||
await exec.exec(pythonPath, ["--version"], options);
|
||||
// output is like "Python 3.8.10"
|
||||
return output.split(" ")[1].trim();
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
core.debug(`Failed to get python version from uv. Error: ${err.message}`);
|
||||
return "unknown";
|
||||
}
|
||||
}
|
||||
|
||||
function handleMatchResult(
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,5 +1,6 @@
|
|||
import { promises as fs } from "node:fs";
|
||||
import * as tc from "@actions/tool-cache";
|
||||
import { KNOWN_CHECKSUMS } from "./known-checksums";
|
||||
export async function updateChecksums(
|
||||
filePath: string,
|
||||
downloadUrls: string[],
|
||||
|
@ -7,31 +8,50 @@ export async function updateChecksums(
|
|||
await fs.rm(filePath);
|
||||
await fs.appendFile(
|
||||
filePath,
|
||||
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: {[key: string]: string} = {\n",
|
||||
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: { [key: string]: string } = {\n",
|
||||
);
|
||||
let firstLine = true;
|
||||
for (const downloadUrl of downloadUrls) {
|
||||
const content = await downloadAssetContent(downloadUrl);
|
||||
const checksum = content.split(" ")[0].trim();
|
||||
const key = getKey(downloadUrl);
|
||||
if (key === undefined) {
|
||||
continue;
|
||||
}
|
||||
const checksum = await getOrDownloadChecksum(key, downloadUrl);
|
||||
if (!firstLine) {
|
||||
await fs.appendFile(filePath, ",\n");
|
||||
}
|
||||
await fs.appendFile(filePath, ` '${key}':\n '${checksum}'`);
|
||||
await fs.appendFile(filePath, ` "${key}":\n "${checksum}"`);
|
||||
firstLine = false;
|
||||
}
|
||||
await fs.appendFile(filePath, "}\n");
|
||||
await fs.appendFile(filePath, ",\n};\n");
|
||||
}
|
||||
|
||||
function getKey(downloadUrl: string): string {
|
||||
function getKey(downloadUrl: string): string | undefined {
|
||||
// https://github.com/astral-sh/uv/releases/download/0.3.2/uv-aarch64-apple-darwin.tar.gz.sha256
|
||||
const parts = downloadUrl.split("/");
|
||||
const fileName = parts[parts.length - 1];
|
||||
if (fileName.startsWith("source")) {
|
||||
return undefined;
|
||||
}
|
||||
const name = fileName.split(".")[0].split("uv-")[1];
|
||||
const version = parts[parts.length - 2];
|
||||
return `${name}-${version}`;
|
||||
}
|
||||
|
||||
async function getOrDownloadChecksum(
|
||||
key: string,
|
||||
downloadUrl: string,
|
||||
): Promise<string> {
|
||||
let checksum = "";
|
||||
if (key in KNOWN_CHECKSUMS) {
|
||||
checksum = KNOWN_CHECKSUMS[key];
|
||||
} else {
|
||||
const content = await downloadAssetContent(downloadUrl);
|
||||
checksum = content.split(" ")[0].trim();
|
||||
}
|
||||
return checksum;
|
||||
}
|
||||
|
||||
async function downloadAssetContent(downloadUrl: string): Promise<string> {
|
||||
const downloadPath = await tc.downloadTool(downloadUrl);
|
||||
const content = await fs.readFile(downloadPath, "utf8");
|
||||
|
|
|
@ -1,71 +0,0 @@
|
|||
import * as core from "@actions/core";
|
||||
import * as tc from "@actions/tool-cache";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as path from "node:path";
|
||||
import { promises as fs } from "node:fs";
|
||||
import type { Architecture, Platform } from "../utils/platforms";
|
||||
import { validateChecksum } from "./checksum/checksum";
|
||||
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
|
||||
|
||||
export async function downloadLatest(
|
||||
platform: Platform,
|
||||
arch: Architecture,
|
||||
checkSum: string | undefined,
|
||||
githubToken: string | undefined,
|
||||
): Promise<{ cachedToolDir: string; version: string }> {
|
||||
const artifact = `uv-${arch}-${platform}`;
|
||||
let extension = ".tar.gz";
|
||||
if (platform === "pc-windows-msvc") {
|
||||
extension = ".zip";
|
||||
}
|
||||
const downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/latest/download/${artifact}${extension}`;
|
||||
core.info(`Downloading uv from "${downloadUrl}" ...`);
|
||||
|
||||
const downloadPath = await tc.downloadTool(
|
||||
downloadUrl,
|
||||
`${artifact}${extension}`,
|
||||
githubToken,
|
||||
);
|
||||
let uvExecutablePath: string;
|
||||
let uvDir: string;
|
||||
if (platform === "pc-windows-msvc") {
|
||||
uvDir = await tc.extractZip(downloadPath);
|
||||
// On windows extracting the zip does not create an intermediate directory
|
||||
uvExecutablePath = path.join(uvDir, "uv.exe");
|
||||
} else {
|
||||
const extractedDir = await tc.extractTar(downloadPath);
|
||||
uvDir = path.join(extractedDir, artifact);
|
||||
uvExecutablePath = path.join(uvDir, "uv");
|
||||
}
|
||||
const version = await getVersion(uvExecutablePath);
|
||||
await validateChecksum(checkSum, downloadPath, arch, platform, version);
|
||||
const cachedToolDir = await tc.cacheDir(
|
||||
uvDir,
|
||||
TOOL_CACHE_NAME,
|
||||
version,
|
||||
arch,
|
||||
);
|
||||
|
||||
return { cachedToolDir, version };
|
||||
}
|
||||
|
||||
async function getVersion(uvExecutablePath: string): Promise<string> {
|
||||
// Parse the output of `uv --version` to get the version
|
||||
// The output looks like
|
||||
// uv 0.3.1 (be17d132a 2024-08-21)
|
||||
|
||||
const options: exec.ExecOptions = {
|
||||
silent: !core.isDebug(),
|
||||
};
|
||||
const execArgs = ["--version"];
|
||||
|
||||
let output = "";
|
||||
options.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString();
|
||||
},
|
||||
};
|
||||
await exec.exec(uvExecutablePath, execArgs, options);
|
||||
const parts = output.split(" ");
|
||||
return parts[1].trim();
|
||||
}
|
|
@ -1,11 +1,12 @@
|
|||
import * as core from "@actions/core";
|
||||
import * as tc from "@actions/tool-cache";
|
||||
import * as path from "node:path";
|
||||
import * as pep440 from "@renovatebot/pep440";
|
||||
import { promises as fs } from "node:fs";
|
||||
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
|
||||
import type { Architecture, Platform } from "../utils/platforms";
|
||||
import { validateChecksum } from "./checksum/checksum";
|
||||
import * as github from "@actions/github";
|
||||
import { Octokit } from "../utils/octokit";
|
||||
|
||||
export function tryGetFromToolCache(
|
||||
arch: Architecture,
|
||||
|
@ -40,7 +41,7 @@ export async function downloadVersion(
|
|||
|
||||
const downloadPath = await tc.downloadTool(
|
||||
downloadUrl,
|
||||
`${artifact}${extension}`,
|
||||
undefined,
|
||||
githubToken,
|
||||
);
|
||||
await validateChecksum(
|
||||
|
@ -53,7 +54,9 @@ export async function downloadVersion(
|
|||
|
||||
let uvDir: string;
|
||||
if (platform === "pc-windows-msvc") {
|
||||
uvDir = await tc.extractZip(downloadPath);
|
||||
const fullPathWithExtension = `${downloadPath}${extension}`;
|
||||
await fs.copyFile(downloadPath, fullPathWithExtension);
|
||||
uvDir = await tc.extractZip(fullPathWithExtension);
|
||||
// On windows extracting the zip does not create an intermediate directory
|
||||
} else {
|
||||
const extractedDir = await tc.extractTar(downloadPath);
|
||||
|
@ -68,28 +71,106 @@ export async function downloadVersion(
|
|||
return { version: resolvedVersion, cachedToolDir };
|
||||
}
|
||||
|
||||
async function resolveVersion(
|
||||
version: string,
|
||||
export async function resolveVersion(
|
||||
versionInput: string,
|
||||
githubToken: string,
|
||||
): Promise<string> {
|
||||
core.debug(`Resolving version: ${versionInput}`);
|
||||
const version =
|
||||
versionInput === "latest"
|
||||
? await getLatestVersion(githubToken)
|
||||
: versionInput;
|
||||
if (tc.isExplicitVersion(version)) {
|
||||
core.debug(`Version ${version} is an explicit version.`);
|
||||
return version;
|
||||
}
|
||||
const availableVersions = await getAvailableVersions(githubToken);
|
||||
const resolvedVersion = tc.evaluateVersions(availableVersions, version);
|
||||
if (resolvedVersion === "") {
|
||||
core.debug(`Available versions: ${availableVersions}`);
|
||||
const resolvedVersion = maxSatisfying(availableVersions, version);
|
||||
if (resolvedVersion === undefined) {
|
||||
throw new Error(`No version found for ${version}`);
|
||||
}
|
||||
return resolvedVersion;
|
||||
}
|
||||
|
||||
async function getAvailableVersions(githubToken: string): Promise<string[]> {
|
||||
const octokit = github.getOctokit(githubToken);
|
||||
try {
|
||||
const octokit = new Octokit({
|
||||
auth: githubToken,
|
||||
});
|
||||
return await getReleaseTagNames(octokit);
|
||||
} catch (err) {
|
||||
if ((err as Error).message.includes("Bad credentials")) {
|
||||
core.info(
|
||||
"No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.",
|
||||
);
|
||||
const octokit = new Octokit();
|
||||
return await getReleaseTagNames(octokit);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function getReleaseTagNames(
|
||||
octokit: InstanceType<typeof Octokit>,
|
||||
): Promise<string[]> {
|
||||
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
});
|
||||
return response.map((release) => release.tag_name);
|
||||
}
|
||||
|
||||
async function getLatestVersion(githubToken: string) {
|
||||
core.debug("Getting latest version...");
|
||||
const octokit = new Octokit({
|
||||
auth: githubToken,
|
||||
});
|
||||
|
||||
let latestRelease: { tag_name: string } | undefined;
|
||||
try {
|
||||
latestRelease = await getLatestRelease(octokit);
|
||||
} catch (err) {
|
||||
core.info(
|
||||
"No (valid) GitHub token provided. Falling back to anonymous. Requests might be rate limited.",
|
||||
);
|
||||
if (err instanceof Error) {
|
||||
core.debug(err.message);
|
||||
}
|
||||
const octokit = new Octokit();
|
||||
latestRelease = await getLatestRelease(octokit);
|
||||
}
|
||||
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not determine latest release.");
|
||||
}
|
||||
core.debug(`Latest version: ${latestRelease.tag_name}`);
|
||||
return latestRelease.tag_name;
|
||||
}
|
||||
|
||||
async function getLatestRelease(octokit: InstanceType<typeof Octokit>) {
|
||||
const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
});
|
||||
return latestRelease;
|
||||
}
|
||||
|
||||
function maxSatisfying(
|
||||
versions: string[],
|
||||
version: string,
|
||||
): string | undefined {
|
||||
const maxSemver = tc.evaluateVersions(versions, version);
|
||||
if (maxSemver !== "") {
|
||||
core.debug(`Found a version that satisfies the semver range: ${maxSemver}`);
|
||||
return maxSemver;
|
||||
}
|
||||
const maxPep440 = pep440.maxSatisfying(versions, version);
|
||||
if (maxPep440 !== null) {
|
||||
core.debug(
|
||||
`Found a version that satisfies the pep440 specifier: ${maxPep440}`,
|
||||
);
|
||||
return maxPep440;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
|
48
src/hash/hash-files.ts
Normal file
48
src/hash/hash-files.ts
Normal file
|
@ -0,0 +1,48 @@
|
|||
import * as crypto from "node:crypto";
|
||||
import * as core from "@actions/core";
|
||||
import * as fs from "node:fs";
|
||||
import * as stream from "node:stream";
|
||||
import * as util from "node:util";
|
||||
import { create } from "@actions/glob";
|
||||
|
||||
/**
|
||||
* Hashes files matching the given glob pattern.
|
||||
*
|
||||
* Copied from https://github.com/actions/toolkit/blob/20ed2908f19538e9dfb66d8083f1171c0a50a87c/packages/glob/src/internal-hash-files.ts#L9-L49
|
||||
* But supports hashing files outside the GITHUB_WORKSPACE.
|
||||
* @param pattern The glob pattern to match files.
|
||||
* @param verbose Whether to log the files being hashed.
|
||||
*/
|
||||
export async function hashFiles(
|
||||
pattern: string,
|
||||
verbose = false,
|
||||
): Promise<string> {
|
||||
const globber = await create(pattern);
|
||||
let hasMatch = false;
|
||||
const writeDelegate = verbose ? core.info : core.debug;
|
||||
const result = crypto.createHash("sha256");
|
||||
let count = 0;
|
||||
for await (const file of globber.globGenerator()) {
|
||||
writeDelegate(file);
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
writeDelegate(`Skip directory '${file}'.`);
|
||||
continue;
|
||||
}
|
||||
const hash = crypto.createHash("sha256");
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
await pipeline(fs.createReadStream(file), hash);
|
||||
result.write(hash.digest());
|
||||
count++;
|
||||
if (!hasMatch) {
|
||||
hasMatch = true;
|
||||
}
|
||||
}
|
||||
result.end();
|
||||
|
||||
if (hasMatch) {
|
||||
writeDelegate(`Found ${count} files to hash.`);
|
||||
return result.digest("hex");
|
||||
}
|
||||
writeDelegate("No matches found for glob");
|
||||
return "";
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as fs from "node:fs";
|
||||
import {
|
||||
STATE_CACHE_MATCHED_KEY,
|
||||
STATE_CACHE_KEY,
|
||||
|
@ -8,6 +9,7 @@ import {
|
|||
import {
|
||||
cacheLocalPath,
|
||||
enableCache,
|
||||
ignoreNothingToCache,
|
||||
pruneCache as shouldPruneCache,
|
||||
} from "./utils/inputs";
|
||||
|
||||
|
@ -15,12 +17,17 @@ export async function run(): Promise<void> {
|
|||
try {
|
||||
if (enableCache) {
|
||||
await saveCache();
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
core.setFailed(err.message);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
async function saveCache(): Promise<void> {
|
||||
|
@ -41,9 +48,27 @@ async function saveCache(): Promise<void> {
|
|||
}
|
||||
|
||||
core.info(`Saving cache path: ${cacheLocalPath}`);
|
||||
await cache.saveCache([cacheLocalPath], cacheKey);
|
||||
|
||||
core.info(`cache saved with the key: ${cacheKey}`);
|
||||
if (!fs.existsSync(cacheLocalPath) && !ignoreNothingToCache) {
|
||||
throw new Error(
|
||||
`Cache path ${cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`,
|
||||
);
|
||||
}
|
||||
try {
|
||||
await cache.saveCache([cacheLocalPath], cacheKey);
|
||||
core.info(`cache saved with the key: ${cacheKey}`);
|
||||
} catch (e) {
|
||||
if (
|
||||
e instanceof Error &&
|
||||
e.message ===
|
||||
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved."
|
||||
) {
|
||||
core.info(
|
||||
"No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.",
|
||||
);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pruneCache(): Promise<void> {
|
||||
|
|
145
src/setup-uv.ts
145
src/setup-uv.ts
|
@ -3,10 +3,10 @@ import * as path from "node:path";
|
|||
import {
|
||||
downloadVersion,
|
||||
tryGetFromToolCache,
|
||||
resolveVersion,
|
||||
} from "./download/download-version";
|
||||
import { restoreCache } from "./cache/restore-cache";
|
||||
|
||||
import { downloadLatest } from "./download/download-latest";
|
||||
import {
|
||||
type Architecture,
|
||||
getArch,
|
||||
|
@ -16,15 +16,23 @@ import {
|
|||
import {
|
||||
cacheLocalPath,
|
||||
checkSum,
|
||||
ignoreEmptyWorkdir,
|
||||
enableCache,
|
||||
githubToken,
|
||||
pyProjectFile,
|
||||
pythonVersion,
|
||||
toolBinDir,
|
||||
toolDir,
|
||||
version,
|
||||
uvFile,
|
||||
version as versionInput,
|
||||
} from "./utils/inputs";
|
||||
import * as exec from "@actions/exec";
|
||||
import fs from "node:fs";
|
||||
import { getUvVersionFromConfigFile } from "./utils/pyproject";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
const platform = getPlatform();
|
||||
detectEmptyWorkdir();
|
||||
const platform = await getPlatform();
|
||||
const arch = getArch();
|
||||
|
||||
try {
|
||||
|
@ -34,25 +42,20 @@ async function run(): Promise<void> {
|
|||
if (arch === undefined) {
|
||||
throw new Error(`Unsupported architecture: ${process.arch}`);
|
||||
}
|
||||
const setupResult = await setupUv(
|
||||
platform,
|
||||
arch,
|
||||
version,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
const setupResult = await setupUv(platform, arch, checkSum, githubToken);
|
||||
|
||||
addUvToPath(setupResult.uvDir);
|
||||
addUvToPathAndOutput(setupResult.uvDir);
|
||||
addToolBinToPath();
|
||||
setToolDir();
|
||||
core.setOutput("uv-version", setupResult.version);
|
||||
core.info(`Successfully installed uv version ${setupResult.version}`);
|
||||
|
||||
await setupPython();
|
||||
addMatchers();
|
||||
setCacheDir(cacheLocalPath);
|
||||
|
||||
core.setOutput("uv-version", setupResult.version);
|
||||
core.info(`Successfully installed uv version ${setupResult.version}`);
|
||||
|
||||
if (enableCache) {
|
||||
await restoreCache(setupResult.version);
|
||||
await restoreCache();
|
||||
}
|
||||
process.exit(0);
|
||||
} catch (err) {
|
||||
|
@ -60,48 +63,75 @@ async function run(): Promise<void> {
|
|||
}
|
||||
}
|
||||
|
||||
function detectEmptyWorkdir(): void {
|
||||
if (fs.readdirSync(".").length === 0) {
|
||||
if (ignoreEmptyWorkdir) {
|
||||
core.info(
|
||||
"Empty workdir detected. Ignoring because ignore-empty-workdir is enabled",
|
||||
);
|
||||
} else {
|
||||
core.warning(
|
||||
"Empty workdir detected. This may cause unexpected behavior. You can enable ignore-empty-workdir to mute this warning.",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function setupUv(
|
||||
platform: Platform,
|
||||
arch: Architecture,
|
||||
versionInput: string,
|
||||
checkSum: string | undefined,
|
||||
githubToken: string,
|
||||
): Promise<{ uvDir: string; version: string }> {
|
||||
let installedPath: string | undefined;
|
||||
let cachedToolDir: string;
|
||||
let version: string;
|
||||
if (versionInput === "latest") {
|
||||
const latestResult = await downloadLatest(
|
||||
platform,
|
||||
arch,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
version = latestResult.version;
|
||||
cachedToolDir = latestResult.cachedToolDir;
|
||||
} else {
|
||||
const toolCacheResult = tryGetFromToolCache(arch, versionInput);
|
||||
version = toolCacheResult.version;
|
||||
installedPath = toolCacheResult.installedPath;
|
||||
if (installedPath) {
|
||||
core.info(`Found uv in tool-cache for ${versionInput}`);
|
||||
return { uvDir: installedPath, version };
|
||||
}
|
||||
const versionResult = await downloadVersion(
|
||||
platform,
|
||||
arch,
|
||||
versionInput,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
cachedToolDir = versionResult.cachedToolDir;
|
||||
version = versionResult.version;
|
||||
const resolvedVersion = await determineVersion();
|
||||
const toolCacheResult = tryGetFromToolCache(arch, resolvedVersion);
|
||||
if (toolCacheResult.installedPath) {
|
||||
core.info(`Found uv in tool-cache for ${toolCacheResult.version}`);
|
||||
return {
|
||||
uvDir: toolCacheResult.installedPath,
|
||||
version: toolCacheResult.version,
|
||||
};
|
||||
}
|
||||
|
||||
return { uvDir: cachedToolDir, version };
|
||||
const downloadVersionResult = await downloadVersion(
|
||||
platform,
|
||||
arch,
|
||||
resolvedVersion,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
|
||||
return {
|
||||
uvDir: downloadVersionResult.cachedToolDir,
|
||||
version: downloadVersionResult.version,
|
||||
};
|
||||
}
|
||||
|
||||
function addUvToPath(cachedPath: string): void {
|
||||
async function determineVersion(): Promise<string> {
|
||||
if (versionInput !== "") {
|
||||
return await resolveVersion(versionInput, githubToken);
|
||||
}
|
||||
const configFile = uvFile !== "" ? uvFile : pyProjectFile;
|
||||
if (configFile !== "") {
|
||||
const versionFromConfigFile = getUvVersionFromConfigFile(configFile);
|
||||
if (versionFromConfigFile === undefined) {
|
||||
core.warning(
|
||||
`Could not find required-version under [tool.uv] in ${configFile}. Falling back to latest`,
|
||||
);
|
||||
}
|
||||
return await resolveVersion(versionFromConfigFile || "latest", githubToken);
|
||||
}
|
||||
if (!fs.existsSync("uv.toml") && !fs.existsSync("pyproject.toml")) {
|
||||
return await resolveVersion("latest", githubToken);
|
||||
}
|
||||
const versionFile = fs.existsSync("uv.toml") ? "uv.toml" : "pyproject.toml";
|
||||
const versionFromConfigFile = getUvVersionFromConfigFile(versionFile);
|
||||
return await resolveVersion(versionFromConfigFile || "latest", githubToken);
|
||||
}
|
||||
|
||||
function addUvToPathAndOutput(cachedPath: string): void {
|
||||
core.setOutput("uv-path", `${cachedPath}${path.sep}uv`);
|
||||
core.setOutput("uvx-path", `${cachedPath}${path.sep}uvx`);
|
||||
core.addPath(cachedPath);
|
||||
core.info(`Added ${cachedPath} to the path`);
|
||||
}
|
||||
|
@ -133,6 +163,27 @@ function setToolDir(): void {
|
|||
}
|
||||
}
|
||||
|
||||
async function setupPython(): Promise<void> {
|
||||
if (pythonVersion !== "") {
|
||||
core.exportVariable("UV_PYTHON", pythonVersion);
|
||||
core.info(`Set UV_PYTHON to ${pythonVersion}`);
|
||||
const options: exec.ExecOptions = {
|
||||
silent: !core.isDebug(),
|
||||
};
|
||||
const execArgs = ["venv", "--python", pythonVersion];
|
||||
|
||||
core.info("Activating python venv...");
|
||||
await exec.exec("uv", execArgs, options);
|
||||
|
||||
let venvBinPath = ".venv/bin";
|
||||
if (process.platform === "win32") {
|
||||
venvBinPath = ".venv/Scripts";
|
||||
}
|
||||
core.addPath(path.resolve(venvBinPath));
|
||||
core.exportVariable("VIRTUAL_ENV", path.resolve(".venv"));
|
||||
}
|
||||
}
|
||||
|
||||
function setCacheDir(cacheLocalPath: string): void {
|
||||
core.exportVariable("UV_CACHE_DIR", cacheLocalPath);
|
||||
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath}`);
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import * as github from "@actions/github";
|
||||
import * as semver from "semver";
|
||||
import * as core from "@actions/core";
|
||||
import { Octokit } from "./utils/octokit";
|
||||
|
||||
import { OWNER, REPO } from "./utils/constants";
|
||||
import * as semver from "semver";
|
||||
|
||||
import { updateChecksums } from "./download/checksum/update-known-checksums";
|
||||
|
||||
|
@ -10,7 +10,9 @@ async function run(): Promise<void> {
|
|||
const checksumFilePath = process.argv.slice(2)[0];
|
||||
const github_token = process.argv.slice(2)[1];
|
||||
|
||||
const octokit = github.getOctokit(github_token);
|
||||
const octokit = new Octokit({
|
||||
auth: github_token,
|
||||
});
|
||||
|
||||
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
|
||||
owner: OWNER,
|
||||
|
|
|
@ -2,20 +2,35 @@ import * as core from "@actions/core";
|
|||
import path from "node:path";
|
||||
|
||||
export const version = core.getInput("version");
|
||||
export const pyProjectFile = core.getInput("pyproject-file");
|
||||
export const uvFile = core.getInput("uv-file");
|
||||
export const pythonVersion = core.getInput("python-version");
|
||||
export const checkSum = core.getInput("checksum");
|
||||
export const enableCache = core.getInput("enable-cache") === "true";
|
||||
export const enableCache = getEnableCache();
|
||||
export const cacheSuffix = core.getInput("cache-suffix") || "";
|
||||
export const cacheLocalPath = getCacheLocalPath();
|
||||
export const cacheDependencyGlob = core.getInput("cache-dependency-glob");
|
||||
export const pruneCache = core.getInput("prune-cache") === "true";
|
||||
export const ignoreNothingToCache =
|
||||
core.getInput("ignore-nothing-to-cache") === "true";
|
||||
export const ignoreEmptyWorkdir =
|
||||
core.getInput("ignore-empty-workdir") === "true";
|
||||
export const toolBinDir = getToolBinDir();
|
||||
export const toolDir = getToolDir();
|
||||
export const githubToken = core.getInput("github-token");
|
||||
|
||||
function getEnableCache(): boolean {
|
||||
const enableCacheInput = core.getInput("enable-cache");
|
||||
if (enableCacheInput === "auto") {
|
||||
return process.env.RUNNER_ENVIRONMENT === "github-hosted";
|
||||
}
|
||||
return enableCacheInput === "true";
|
||||
}
|
||||
|
||||
function getToolBinDir(): string | undefined {
|
||||
const toolBinDirInput = core.getInput("tool-bin-dir");
|
||||
if (toolBinDirInput !== "") {
|
||||
return toolBinDirInput;
|
||||
return expandTilde(toolBinDirInput);
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
|
@ -31,7 +46,7 @@ function getToolBinDir(): string | undefined {
|
|||
function getToolDir(): string | undefined {
|
||||
const toolDirInput = core.getInput("tool-dir");
|
||||
if (toolDirInput !== "") {
|
||||
return toolDirInput;
|
||||
return expandTilde(toolDirInput);
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
|
@ -47,7 +62,7 @@ function getToolDir(): string | undefined {
|
|||
function getCacheLocalPath(): string {
|
||||
const cacheLocalPathInput = core.getInput("cache-local-path");
|
||||
if (cacheLocalPathInput !== "") {
|
||||
return cacheLocalPathInput;
|
||||
return expandTilde(cacheLocalPathInput);
|
||||
}
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
return `${process.env.RUNNER_TEMP}${path.sep}setup-uv-cache`;
|
||||
|
@ -56,3 +71,10 @@ function getCacheLocalPath(): string {
|
|||
"Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input",
|
||||
);
|
||||
}
|
||||
|
||||
function expandTilde(input: string): string {
|
||||
if (input.startsWith("~")) {
|
||||
return `${process.env.HOME}${input.substring(1)}`;
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
|
58
src/utils/octokit.ts
Normal file
58
src/utils/octokit.ts
Normal file
|
@ -0,0 +1,58 @@
|
|||
import { Octokit as Core } from "@octokit/core";
|
||||
import type {
|
||||
Constructor,
|
||||
OctokitOptions,
|
||||
} from "@octokit/core/dist-types/types";
|
||||
import {
|
||||
paginateRest,
|
||||
type PaginateInterface,
|
||||
} from "@octokit/plugin-paginate-rest";
|
||||
import { legacyRestEndpointMethods } from "@octokit/plugin-rest-endpoint-methods";
|
||||
import { fetch as undiciFetch, ProxyAgent, type RequestInit } from "undici";
|
||||
|
||||
export type { RestEndpointMethodTypes } from "@octokit/plugin-rest-endpoint-methods";
|
||||
|
||||
const DEFAULTS = {
|
||||
baseUrl: "https://api.github.com",
|
||||
userAgent: "setup-uv",
|
||||
};
|
||||
|
||||
export function getProxyAgent() {
|
||||
const httpProxy = process.env.HTTP_PROXY || process.env.http_prox;
|
||||
if (httpProxy) {
|
||||
return new ProxyAgent(httpProxy);
|
||||
}
|
||||
|
||||
const httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy;
|
||||
if (httpsProxy) {
|
||||
return new ProxyAgent(httpsProxy);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export const customFetch = async (url: string, opts: RequestInit) =>
|
||||
await undiciFetch(url, {
|
||||
dispatcher: getProxyAgent(),
|
||||
...opts,
|
||||
});
|
||||
|
||||
export const Octokit: typeof Core &
|
||||
Constructor<
|
||||
{
|
||||
paginate: PaginateInterface;
|
||||
} & ReturnType<typeof legacyRestEndpointMethods>
|
||||
> = Core.plugin(paginateRest, legacyRestEndpointMethods).defaults(
|
||||
function buildDefaults(options: OctokitOptions): OctokitOptions {
|
||||
return {
|
||||
...DEFAULTS,
|
||||
...options,
|
||||
request: {
|
||||
fetch: customFetch,
|
||||
...options.request,
|
||||
},
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
export type Octokit = InstanceType<typeof Octokit>;
|
|
@ -1,10 +1,17 @@
|
|||
import * as exec from "@actions/exec";
|
||||
import * as core from "@actions/core";
|
||||
export type Platform =
|
||||
| "unknown-linux-gnu"
|
||||
| "unknown-linux-musl"
|
||||
| "unknown-linux-musleabihf"
|
||||
| "apple-darwin"
|
||||
| "pc-windows-msvc";
|
||||
export type Architecture = "i686" | "x86_64" | "aarch64";
|
||||
export type Architecture =
|
||||
| "i686"
|
||||
| "x86_64"
|
||||
| "aarch64"
|
||||
| "s390x"
|
||||
| "powerpc64le";
|
||||
|
||||
export function getArch(): Architecture | undefined {
|
||||
const arch = process.arch;
|
||||
|
@ -12,6 +19,8 @@ export function getArch(): Architecture | undefined {
|
|||
ia32: "i686",
|
||||
x64: "x86_64",
|
||||
arm64: "aarch64",
|
||||
s390x: "s390x",
|
||||
ppc64: "powerpc64le",
|
||||
};
|
||||
|
||||
if (arch in archMapping) {
|
||||
|
@ -19,15 +28,49 @@ export function getArch(): Architecture | undefined {
|
|||
}
|
||||
}
|
||||
|
||||
export function getPlatform(): Platform | undefined {
|
||||
const platform = process.platform;
|
||||
export async function getPlatform(): Promise<Platform | undefined> {
|
||||
const processPlatform = process.platform;
|
||||
const platformMapping: { [key: string]: Platform } = {
|
||||
linux: "unknown-linux-gnu",
|
||||
darwin: "apple-darwin",
|
||||
win32: "pc-windows-msvc",
|
||||
};
|
||||
|
||||
if (platform in platformMapping) {
|
||||
return platformMapping[platform];
|
||||
if (processPlatform in platformMapping) {
|
||||
const platform = platformMapping[processPlatform];
|
||||
if (platform === "unknown-linux-gnu") {
|
||||
const isMusl = await isMuslOs();
|
||||
return isMusl ? "unknown-linux-musl" : platform;
|
||||
}
|
||||
return platform;
|
||||
}
|
||||
}
|
||||
|
||||
async function isMuslOs(): Promise<boolean> {
|
||||
let stdOutput = "";
|
||||
let errOutput = "";
|
||||
const options: exec.ExecOptions = {
|
||||
silent: !core.isDebug(),
|
||||
listeners: {
|
||||
stdout: (data: Buffer) => {
|
||||
stdOutput += data.toString();
|
||||
},
|
||||
stderr: (data: Buffer) => {
|
||||
errOutput += data.toString();
|
||||
},
|
||||
},
|
||||
ignoreReturnCode: true,
|
||||
};
|
||||
|
||||
try {
|
||||
const execArgs = ["--version"];
|
||||
await exec.exec("ldd", execArgs, options);
|
||||
return stdOutput.includes("musl") || errOutput.includes("musl");
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
core.warning(
|
||||
`Failed to determine glibc or musl. Falling back to glibc. Error: ${err.message}`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
46
src/utils/pyproject.ts
Normal file
46
src/utils/pyproject.ts
Normal file
|
@ -0,0 +1,46 @@
|
|||
import fs from "node:fs";
|
||||
import * as core from "@actions/core";
|
||||
import * as toml from "smol-toml";
|
||||
|
||||
export function getUvVersionFromConfigFile(
|
||||
filePath: string,
|
||||
): string | undefined {
|
||||
core.debug(`Trying to find required-version for uv in: ${filePath}`);
|
||||
if (!fs.existsSync(filePath)) {
|
||||
core.warning(`Could not find file: ${filePath}`);
|
||||
return undefined;
|
||||
}
|
||||
let requiredVersion: string | undefined;
|
||||
try {
|
||||
requiredVersion = getRequiredVersion(filePath);
|
||||
} catch (err) {
|
||||
const message = (err as Error).message;
|
||||
core.warning(`Error while parsing ${filePath}: ${message}`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (requiredVersion?.startsWith("==")) {
|
||||
requiredVersion = requiredVersion.slice(2);
|
||||
}
|
||||
if (requiredVersion !== undefined) {
|
||||
core.info(
|
||||
`Found required-version for uv in ${filePath}: ${requiredVersion}`,
|
||||
);
|
||||
}
|
||||
return requiredVersion;
|
||||
}
|
||||
|
||||
function getRequiredVersion(filePath: string): string | undefined {
|
||||
const fileContent = fs.readFileSync(filePath, "utf-8");
|
||||
|
||||
if (filePath.endsWith("pyproject.toml")) {
|
||||
const tomlContent = toml.parse(fileContent) as {
|
||||
tool?: { uv?: { "required-version"?: string } };
|
||||
};
|
||||
return tomlContent?.tool?.uv?.["required-version"];
|
||||
}
|
||||
const tomlContent = toml.parse(fileContent) as {
|
||||
"required-version"?: string;
|
||||
};
|
||||
return tomlContent["required-version"];
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
|
||||
"target": "ES2022" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
|
||||
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
|
||||
"outDir": "./lib" /* Redirect output structure to the directory. */,
|
||||
"rootDir": "./src" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,
|
||||
|
|
Loading…
Add table
Reference in a new issue