mirror of
https://github.com/astral-sh/setup-uv.git
synced 2025-12-15 11:07:14 +00:00
Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
38f3f10444 | ||
|
|
8bdd012be5 | ||
|
|
5f42d5af6c | ||
|
|
26ddfef6e1 | ||
|
|
ee4fa33003 | ||
|
|
420915557e | ||
|
|
9839fa9fb5 | ||
|
|
196fe5f098 | ||
|
|
49d8a3d9a8 | ||
|
|
d8db0a86d3 | ||
|
|
ed171c292b | ||
|
|
691a091485 | ||
|
|
9b71657bb2 | ||
|
|
caf0cab7a6 | ||
|
|
7c238111e6 | ||
|
|
3eca4c2715 | ||
|
|
aee2e918ee | ||
|
|
4ffb6d766c |
10
.github/workflows/test-cache-windows.yml
vendored
10
.github/workflows/test-cache-windows.yml
vendored
@@ -11,10 +11,7 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
test-setup-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
@@ -25,10 +22,7 @@ jobs:
|
||||
- run: uv sync
|
||||
working-directory: __tests__\fixtures\uv-project
|
||||
test-restore-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
runs-on: windows-latest
|
||||
needs: test-setup-cache
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
69
.github/workflows/test-cache.yml
vendored
69
.github/workflows/test-cache.yml
vendored
@@ -11,24 +11,18 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
test-setup-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14]
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-setup-cache
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -37,7 +31,7 @@ jobs:
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache
|
||||
- name: Cache was hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" != "true" ]; then
|
||||
@@ -59,7 +53,7 @@ jobs:
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache-dependency-glob:
|
||||
@@ -79,7 +73,8 @@ jobs:
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
|
||||
ignore-nothing-to-cache: true
|
||||
- name: Cache was not hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" == "true" ]; then
|
||||
@@ -96,7 +91,7 @@ jobs:
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
|
||||
cache-local-path: /tmp/uv-cache
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
@@ -110,7 +105,7 @@ jobs:
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
|
||||
cache-local-path: /tmp/uv-cache
|
||||
- name: Cache was hit
|
||||
run: |
|
||||
@@ -121,3 +116,49 @@ jobs:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-tilde-expansion-cache-local-path:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Create cache directory
|
||||
run: mkdir -p ~/uv-cache
|
||||
shell: bash
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
cache-local-path: ~/uv-cache/cache-local-path
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-tilde-expansion-cache-dependency-glob:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Create cache directory
|
||||
run: mkdir -p ~/uv-cache
|
||||
shell: bash
|
||||
- name: Create cache dependency glob file
|
||||
run: touch ~/uv-cache.glob
|
||||
shell: bash
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-local-path: ~/uv-cache/cache-dependency-glob
|
||||
cache-dependency-glob: "~/uv-cache.glob"
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
cleanup-tilde-expansion-tests:
|
||||
needs:
|
||||
- test-tilde-expansion-cache-local-path
|
||||
- test-tilde-expansion-cache-dependency-glob
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- name: Remove cache directory
|
||||
run: rm -rf ~/uv-cache
|
||||
shell: bash
|
||||
- name: Remove cache dependency glob file
|
||||
run: rm -f ~/uv-cache.glob
|
||||
shell: bash
|
||||
|
||||
60
.github/workflows/test.yml
vendored
60
.github/workflows/test.yml
vendored
@@ -28,19 +28,18 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14, selfhosted-ubuntu-arm64]
|
||||
os: [ubuntu-latest, macos-latest, macos-14]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install default version
|
||||
- name: Install latest version
|
||||
uses: ./
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-specific-version:
|
||||
runs-on: ${{ matrix.os }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, macos-14, selfhosted-ubuntu-arm64]
|
||||
uv-version: ["latest", "0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
|
||||
uv-version: ["0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install version ${{ matrix.uv-version }}
|
||||
@@ -50,10 +49,7 @@ jobs:
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-semver-range:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, selfhosted-ubuntu-arm64]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install version 0.3
|
||||
@@ -72,15 +68,15 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, selfhosted-ubuntu-arm64]
|
||||
os: [ubuntu-latest, macos-latest]
|
||||
checksum:
|
||||
["4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"]
|
||||
exclude:
|
||||
- os: selfhosted-ubuntu-arm64
|
||||
- os: macos-latest
|
||||
checksum: "4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"
|
||||
include:
|
||||
- os: selfhosted-ubuntu-arm64
|
||||
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
|
||||
- os: macos-latest
|
||||
checksum: "a70cbfbf3bb5c08b2f84963b4f12c94e08fbb2468ba418a3bfe1066fbe9e7218"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checksum matches expected
|
||||
@@ -115,7 +111,6 @@ jobs:
|
||||
macos-latest,
|
||||
macos-14,
|
||||
windows-latest,
|
||||
selfhosted-ubuntu-arm64,
|
||||
]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -123,3 +118,40 @@ jobs:
|
||||
uses: ./
|
||||
- run: uv tool install ruff
|
||||
- run: ruff --version
|
||||
test-tilde-expansion-tool-dirs:
|
||||
runs-on: selfhosted-ubuntu-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
tool-bin-dir: "~/tool-bin-dir"
|
||||
tool-dir: "~/tool-dir"
|
||||
- name: "Check if tool dirs are expanded"
|
||||
run: |
|
||||
if ! echo "$PATH" | grep -q "/home/ubuntu/tool-bin-dir"; then
|
||||
echo "PATH does not contain /home/ubuntu/tool-bin-dir: $PATH"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$UV_TOOL_DIR" != "/home/ubuntu/tool-dir" ]; then
|
||||
echo "UV_TOOL_DIR does not contain /home/ubuntu/tool-dir: $UV_TOOL_DIR"
|
||||
exit 1
|
||||
fi
|
||||
test-python-version:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12", "3.13t"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install latest version
|
||||
uses: ./
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Verify UV_PYTHON is set to correct version
|
||||
run: |
|
||||
if [ "$UV_PYTHON" != "${{ matrix.python-version }}" ]; then
|
||||
exit 1
|
||||
fi
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
134
README.md
134
README.md
@@ -14,13 +14,17 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
|
||||
- [Install the latest version (default)](#install-the-latest-version-default)
|
||||
- [Install a specific version](#install-a-specific-version)
|
||||
- [Install a version by supplying a semver range](#install-a-version-by-supplying-a-semver-range)
|
||||
- [Python version](#python-version)
|
||||
- [Validate checksum](#validate-checksum)
|
||||
- [Enable Caching](#enable-caching)
|
||||
- [Cache dependency glob](#cache-dependency-glob)
|
||||
- [Local cache path](#local-cache-path)
|
||||
- [Disable cache pruning](#disable-cache-pruning)
|
||||
- [Ignore nothing to cache](#ignore-nothing-to-cache)
|
||||
- [GitHub authentication token](#github-authentication-token)
|
||||
- [UV_TOOL_DIR](#uv_tool_dir)
|
||||
- [UV_TOOL_BIN_DIR](#uv_tool_bin_dir)
|
||||
- [Tilde Expansion](#tilde-expansion)
|
||||
- [How it works](#how-it-works)
|
||||
- [FAQ](#faq)
|
||||
|
||||
@@ -30,7 +34,7 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
version: "latest"
|
||||
```
|
||||
@@ -38,49 +42,75 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
|
||||
For an example workflow, see
|
||||
[here](https://github.com/charliermarsh/autobot/blob/e42c66659bf97b90ca9ff305a19cc99952d0d43f/.github/workflows/ci.yaml).
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
> Using `latest` requires that uv download the executable on every run, which incurs a cost
|
||||
> (especially on self-hosted runners). As a best practice, consider pinning the version to a
|
||||
> specific release.
|
||||
|
||||
### Install a specific version
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
version: "0.4.4"
|
||||
```
|
||||
|
||||
### Install a version by supplying a semver range
|
||||
|
||||
You can also specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
|
||||
You can specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
|
||||
to install the latest version that satisfies the range.
|
||||
|
||||
```yaml
|
||||
- name: Install a semver range of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
version: ">=0.4.0"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Pinning a minor version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
version: "0.4.x"
|
||||
```
|
||||
|
||||
### Python version
|
||||
|
||||
You can use the input `python-version` to set the environment variable `UV_PYTHON` for the rest
|
||||
of your workflow.
|
||||
This will override any python version specifications in `pyproject.toml` and `.python-version`
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv and set the python version to 3.12
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
python-version: "3.12"
|
||||
```
|
||||
|
||||
You can combine this with a matrix to test multiple python versions:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install the latest version of uv and set the python version
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Test with python ${{ matrix.python-version }}
|
||||
run: uv run --frozen pytest
|
||||
```
|
||||
|
||||
### Validate checksum
|
||||
|
||||
You can also specify a checksum to validate the downloaded file. Checksums up to the default version
|
||||
You can specify a checksum to validate the downloaded executable. Checksums up to the default version
|
||||
are automatically verified by this action. The sha256 hashes can be found on the
|
||||
[releases page](https://github.com/astral-sh/uv/releases) of the uv repo.
|
||||
|
||||
```yaml
|
||||
- name: Install a specific version and validate the checksum
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
version: "0.3.1"
|
||||
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
|
||||
@@ -88,8 +118,8 @@ are automatically verified by this action. The sha256 hashes can be found on the
|
||||
|
||||
### Enable caching
|
||||
|
||||
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be cached to
|
||||
the GitHub Actions Cache. This can speed up runs that reuse the cache by several minutes.
|
||||
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be uploaded to
|
||||
the GitHub Actions cache. This can speed up runs that reuse the cache by several minutes.
|
||||
|
||||
> [!TIP]
|
||||
>
|
||||
@@ -101,7 +131,7 @@ You can optionally define a custom cache key suffix.
|
||||
```yaml
|
||||
- name: Enable caching and define a custom cache key suffix
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: "optional-suffix"
|
||||
@@ -118,9 +148,9 @@ use it in subsequent steps. For example, to use the cache in the above case:
|
||||
|
||||
#### Cache dependency glob
|
||||
|
||||
If you want to control when the cache is invalidated, specify a glob pattern with the
|
||||
`cache-dependency-glob` input. The cache will be invalidated if any file matching the glob pattern
|
||||
changes. The glob matches files relative to the repository root.
|
||||
If you want to control when the GitHub Actions cache is invalidated, specify a glob pattern with the
|
||||
`cache-dependency-glob` input. The GitHub Actions cache will be invalidated if any file matching the glob pattern
|
||||
changes. If you use relative paths, they are relative to the repository root.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
@@ -128,7 +158,7 @@ changes. The glob matches files relative to the repository root.
|
||||
|
||||
```yaml
|
||||
- name: Define a cache dependency glob
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "**/requirements*.txt"
|
||||
@@ -136,7 +166,7 @@ changes. The glob matches files relative to the repository root.
|
||||
|
||||
```yaml
|
||||
- name: Define a list of cache dependency globs
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
@@ -144,9 +174,17 @@ changes. The glob matches files relative to the repository root.
|
||||
**/pyproject.toml
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define an absolute cache dependency glob
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: "/tmp/my-folder/requirements*.txt"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Never invalidate the cache
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: ""
|
||||
@@ -161,7 +199,7 @@ It defaults to `setup-uv-cache` in the `TMP` dir, `D:\a\_temp\uv-tool-dir` on Wi
|
||||
|
||||
```yaml
|
||||
- name: Define a custom uv cache path
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
cache-local-path: "/path/to/cache"
|
||||
```
|
||||
@@ -172,20 +210,33 @@ By default, the uv cache is pruned after every run, removing pre-built wheels, b
|
||||
wheels that were built from source. On GitHub-hosted runners, it's typically faster to omit those
|
||||
pre-built wheels from the cache (and instead re-download them from the registry on each run).
|
||||
However, on self-hosted or local runners, preserving the cache may be more efficient. See
|
||||
the[documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
|
||||
more.
|
||||
the [documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
|
||||
more information.
|
||||
|
||||
If you want to persist the entire cache across runs, disable cache pruning with the `prune-cache`
|
||||
input.
|
||||
|
||||
```yaml
|
||||
- name: Don't prune the cache before saving it
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
prune-cache: false
|
||||
```
|
||||
|
||||
### Ignore nothing to cache
|
||||
|
||||
By default, the action will fail if caching is enabled but there is nothing to upload (the uv cache directory does not exist).
|
||||
If you want to ignore this, set the `ignore-nothing-to-cache` input to `true`.
|
||||
|
||||
```yaml
|
||||
- name: Ignore nothing to cache
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
ignore-nothing-to-cache: true
|
||||
```
|
||||
|
||||
### GitHub authentication token
|
||||
|
||||
This action uses the GitHub API to fetch the uv release artifacts. To avoid hitting the GitHub API
|
||||
@@ -198,7 +249,7 @@ are not sufficient, you can provide a custom GitHub token with the necessary per
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom GitHub token
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
github-token: ${{ secrets.CUSTOM_GITHUB_TOKEN }}
|
||||
```
|
||||
@@ -216,7 +267,7 @@ input:
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool dir
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
tool-dir: "/path/to/tool/dir"
|
||||
```
|
||||
@@ -235,11 +286,30 @@ If you want to change this behaviour (especially on self-hosted runners) you can
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv with a custom tool bin dir
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
tool-bin-dir: "/path/to/tool-bin/dir"
|
||||
```
|
||||
|
||||
### Tilde Expansion
|
||||
|
||||
This action supports expanding the `~` character to the user's home directory for the following inputs:
|
||||
|
||||
- `cache-local-path`
|
||||
- `tool-dir`
|
||||
- `tool-bin-dir`
|
||||
- `cache-dependency-glob`
|
||||
|
||||
```yaml
|
||||
- name: Expand the tilde character
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
cache-local-path: "~/path/to/cache"
|
||||
tool-dir: "~/path/to/tool/dir"
|
||||
tool-bin-dir: "~/path/to/tool-bin/dir"
|
||||
cache-dependency-glob: "~/my-cache-buster"
|
||||
```
|
||||
|
||||
## How it works
|
||||
|
||||
This action downloads uv from the uv repo's official
|
||||
@@ -264,7 +334,7 @@ For example:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@main
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Test
|
||||
@@ -276,7 +346,7 @@ To install a specific version of Python, use
|
||||
|
||||
```yaml
|
||||
- name: Install the latest version of uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
- name: Install Python 3.12
|
||||
@@ -295,7 +365,7 @@ output:
|
||||
uses: actions/checkout@main
|
||||
- name: Install the default version of uv
|
||||
id: setup-uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
uses: astral-sh/setup-uv@v4
|
||||
- name: Print the installed version
|
||||
run: echo "Installed uv version is ${{ steps.setup-uv.outputs.uv-version }}"
|
||||
```
|
||||
|
||||
@@ -6,6 +6,9 @@ inputs:
|
||||
version:
|
||||
description: "The version of uv to install"
|
||||
default: "latest"
|
||||
python-version:
|
||||
description: "The version of Python to set UV_PYTHON to"
|
||||
required: false
|
||||
checksum:
|
||||
description: "The checksum of the uv version to install"
|
||||
required: false
|
||||
@@ -31,7 +34,10 @@ inputs:
|
||||
default: ""
|
||||
prune-cache:
|
||||
description: "Prune cache before saving."
|
||||
default: true
|
||||
default: "true"
|
||||
ignore-nothing-to-cache:
|
||||
description: "Ignore when nothing is found to cache."
|
||||
default: "false"
|
||||
tool-dir:
|
||||
description: "Custom path to set UV_TOOL_DIR to."
|
||||
required: false
|
||||
|
||||
173
dist/save-cache/index.js
generated
vendored
173
dist/save-cache/index.js
generated
vendored
@@ -82304,10 +82304,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.STATE_CACHE_MATCHED_KEY = exports.STATE_CACHE_KEY = void 0;
|
||||
exports.restoreCache = restoreCache;
|
||||
const cache = __importStar(__nccwpck_require__(5116));
|
||||
const glob = __importStar(__nccwpck_require__(7206));
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const inputs_1 = __nccwpck_require__(9612);
|
||||
const platforms_1 = __nccwpck_require__(8361);
|
||||
const hash_files_1 = __nccwpck_require__(9660);
|
||||
exports.STATE_CACHE_KEY = "cache-key";
|
||||
exports.STATE_CACHE_MATCHED_KEY = "cache-matched-key";
|
||||
const CACHE_VERSION = "1";
|
||||
@@ -82334,9 +82334,9 @@ function computeKeys(version) {
|
||||
let cacheDependencyPathHash = "-";
|
||||
if (inputs_1.cacheDependencyGlob !== "") {
|
||||
core.info(`Searching files using cache dependency glob: ${inputs_1.cacheDependencyGlob.split("\n").join(",")}`);
|
||||
cacheDependencyPathHash += yield glob.hashFiles(inputs_1.cacheDependencyGlob, undefined, undefined, true);
|
||||
cacheDependencyPathHash += yield (0, hash_files_1.hashFiles)(inputs_1.cacheDependencyGlob, true);
|
||||
if (cacheDependencyPathHash === "-") {
|
||||
throw new Error(`No file in ${process.cwd()} matched to [${inputs_1.cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`);
|
||||
throw new Error(`No file matched to [${inputs_1.cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -82358,6 +82358,114 @@ function handleMatchResult(matchedKey, primaryKey) {
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 9660:
|
||||
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.hashFiles = hashFiles;
|
||||
const crypto = __importStar(__nccwpck_require__(7598));
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const fs = __importStar(__nccwpck_require__(3024));
|
||||
const stream = __importStar(__nccwpck_require__(7075));
|
||||
const util = __importStar(__nccwpck_require__(7975));
|
||||
const glob_1 = __nccwpck_require__(7206);
|
||||
/**
|
||||
* Hashes files matching the given glob pattern.
|
||||
*
|
||||
* Copied from https://github.com/actions/toolkit/blob/20ed2908f19538e9dfb66d8083f1171c0a50a87c/packages/glob/src/internal-hash-files.ts#L9-L49
|
||||
* But supports hashing files outside the GITHUB_WORKSPACE.
|
||||
* @param pattern The glob pattern to match files.
|
||||
* @param verbose Whether to log the files being hashed.
|
||||
*/
|
||||
function hashFiles(pattern_1) {
|
||||
return __awaiter(this, arguments, void 0, function* (pattern, verbose = false) {
|
||||
var _a, e_1, _b, _c;
|
||||
const globber = yield (0, glob_1.create)(pattern);
|
||||
let hasMatch = false;
|
||||
const writeDelegate = verbose ? core.info : core.debug;
|
||||
const result = crypto.createHash("sha256");
|
||||
let count = 0;
|
||||
try {
|
||||
for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
||||
_c = _f.value;
|
||||
_d = false;
|
||||
const file = _c;
|
||||
writeDelegate(file);
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
writeDelegate(`Skip directory '${file}'.`);
|
||||
continue;
|
||||
}
|
||||
const hash = crypto.createHash("sha256");
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
yield pipeline(fs.createReadStream(file), hash);
|
||||
result.write(hash.digest());
|
||||
count++;
|
||||
if (!hasMatch) {
|
||||
hasMatch = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
result.end();
|
||||
if (hasMatch) {
|
||||
writeDelegate(`Found ${count} files to hash.`);
|
||||
return result.digest("hex");
|
||||
}
|
||||
writeDelegate("No matches found for glob");
|
||||
return "";
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 1653:
|
||||
@@ -82402,6 +82510,7 @@ exports.run = run;
|
||||
const cache = __importStar(__nccwpck_require__(5116));
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const exec = __importStar(__nccwpck_require__(5236));
|
||||
const fs = __importStar(__nccwpck_require__(3024));
|
||||
const restore_cache_1 = __nccwpck_require__(5391);
|
||||
const inputs_1 = __nccwpck_require__(9612);
|
||||
function run() {
|
||||
@@ -82409,13 +82518,18 @@ function run() {
|
||||
try {
|
||||
if (inputs_1.enableCache) {
|
||||
yield saveCache();
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
const err = error;
|
||||
core.setFailed(err.message);
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
function saveCache() {
|
||||
@@ -82434,8 +82548,23 @@ function saveCache() {
|
||||
yield pruneCache();
|
||||
}
|
||||
core.info(`Saving cache path: ${inputs_1.cacheLocalPath}`);
|
||||
yield cache.saveCache([inputs_1.cacheLocalPath], cacheKey);
|
||||
core.info(`cache saved with the key: ${cacheKey}`);
|
||||
if (!fs.existsSync(inputs_1.cacheLocalPath) && !inputs_1.ignoreNothingToCache) {
|
||||
throw new Error(`Cache path ${inputs_1.cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`);
|
||||
}
|
||||
try {
|
||||
yield cache.saveCache([inputs_1.cacheLocalPath], cacheKey);
|
||||
core.info(`cache saved with the key: ${cacheKey}`);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error &&
|
||||
e.message ===
|
||||
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.") {
|
||||
core.info("No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.");
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function pruneCache() {
|
||||
@@ -82485,23 +82614,25 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.githubToken = exports.toolDir = exports.toolBinDir = exports.pruneCache = exports.cacheDependencyGlob = exports.cacheLocalPath = exports.cacheSuffix = exports.enableCache = exports.checkSum = exports.version = void 0;
|
||||
exports.githubToken = exports.toolDir = exports.toolBinDir = exports.ignoreNothingToCache = exports.pruneCache = exports.cacheDependencyGlob = exports.cacheLocalPath = exports.cacheSuffix = exports.enableCache = exports.checkSum = exports.pythonVersion = exports.version = void 0;
|
||||
const core = __importStar(__nccwpck_require__(7484));
|
||||
const node_path_1 = __importDefault(__nccwpck_require__(6760));
|
||||
exports.version = core.getInput("version");
|
||||
exports.pythonVersion = core.getInput("python-version");
|
||||
exports.checkSum = core.getInput("checksum");
|
||||
exports.enableCache = core.getInput("enable-cache") === "true";
|
||||
exports.cacheSuffix = core.getInput("cache-suffix") || "";
|
||||
exports.cacheLocalPath = getCacheLocalPath();
|
||||
exports.cacheDependencyGlob = core.getInput("cache-dependency-glob");
|
||||
exports.pruneCache = core.getInput("prune-cache") === "true";
|
||||
exports.ignoreNothingToCache = core.getInput("ignore-nothing-to-cache") === "true";
|
||||
exports.toolBinDir = getToolBinDir();
|
||||
exports.toolDir = getToolDir();
|
||||
exports.githubToken = core.getInput("github-token");
|
||||
function getToolBinDir() {
|
||||
const toolBinDirInput = core.getInput("tool-bin-dir");
|
||||
if (toolBinDirInput !== "") {
|
||||
return toolBinDirInput;
|
||||
return expandTilde(toolBinDirInput);
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
@@ -82514,7 +82645,7 @@ function getToolBinDir() {
|
||||
function getToolDir() {
|
||||
const toolDirInput = core.getInput("tool-dir");
|
||||
if (toolDirInput !== "") {
|
||||
return toolDirInput;
|
||||
return expandTilde(toolDirInput);
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
@@ -82527,13 +82658,19 @@ function getToolDir() {
|
||||
function getCacheLocalPath() {
|
||||
const cacheLocalPathInput = core.getInput("cache-local-path");
|
||||
if (cacheLocalPathInput !== "") {
|
||||
return cacheLocalPathInput;
|
||||
return expandTilde(cacheLocalPathInput);
|
||||
}
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
return `${process.env.RUNNER_TEMP}${node_path_1.default.sep}setup-uv-cache`;
|
||||
}
|
||||
throw Error("Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input");
|
||||
}
|
||||
function expandTilde(input) {
|
||||
if (input.startsWith("~")) {
|
||||
return `${process.env.HOME}${input.substring(1)}`;
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -82684,6 +82821,14 @@ module.exports = require("net");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 7598:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
module.exports = require("node:crypto");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 8474:
|
||||
/***/ ((module) => {
|
||||
|
||||
@@ -82692,6 +82837,14 @@ module.exports = require("node:events");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 3024:
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
module.exports = require("node:fs");
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 6760:
|
||||
/***/ ((module) => {
|
||||
|
||||
|
||||
470
dist/setup/index.js
generated
vendored
470
dist/setup/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
2141
dist/update-known-checksums/index.js
generated
vendored
2141
dist/update-known-checksums/index.js
generated
vendored
File diff suppressed because it is too large
Load Diff
28
package-lock.json
generated
28
package-lock.json
generated
@@ -20,9 +20,9 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/node": "^22.9.1",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@vercel/ncc": "^0.38.2",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"jest": "^29.7.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"ts-jest": "^29.2.5",
|
||||
@@ -2025,9 +2025,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "22.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.0.tgz",
|
||||
"integrity": "sha512-vuyHg81vvWA1Z1ELfvLko2c8f34gyA0zaic0+Rllc5lbCnbSyuvb2Oxpm6TAUAC/2xZN3QGqxBNggD1nNR2AfQ==",
|
||||
"version": "22.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.1.tgz",
|
||||
"integrity": "sha512-p8Yy/8sw1caA8CdRIQBG5tiLHmxtQKObCijiAa9Ez+d4+PRffM4054xbju0msf+cvhJpnFEeNjxmVT/0ipktrg==",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.19.8"
|
||||
}
|
||||
@@ -2090,9 +2090,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@vercel/ncc": {
|
||||
"version": "0.38.2",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.2.tgz",
|
||||
"integrity": "sha512-3yel3jaxUg9pHBv4+KeC9qlbdZPug+UMtUOlhvpDYCMSgcNSrS2Hv1LoqMsOV7hf2lYscx+BESfJOIla1WsmMQ==",
|
||||
"version": "0.38.3",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.3.tgz",
|
||||
"integrity": "sha512-rnK6hJBS6mwc+Bkab+PGPs9OiS0i/3kdTO+CkI8V0/VrW3vmz7O2Pxjw/owOlmo6PKEIxRSeZKv/kuL9itnpYA==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"ncc": "dist/ncc/cli.js"
|
||||
@@ -6464,9 +6464,9 @@
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "22.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.0.tgz",
|
||||
"integrity": "sha512-vuyHg81vvWA1Z1ELfvLko2c8f34gyA0zaic0+Rllc5lbCnbSyuvb2Oxpm6TAUAC/2xZN3QGqxBNggD1nNR2AfQ==",
|
||||
"version": "22.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.1.tgz",
|
||||
"integrity": "sha512-p8Yy/8sw1caA8CdRIQBG5tiLHmxtQKObCijiAa9Ez+d4+PRffM4054xbju0msf+cvhJpnFEeNjxmVT/0ipktrg==",
|
||||
"requires": {
|
||||
"undici-types": "~6.19.8"
|
||||
}
|
||||
@@ -6528,9 +6528,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"@vercel/ncc": {
|
||||
"version": "0.38.2",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.2.tgz",
|
||||
"integrity": "sha512-3yel3jaxUg9pHBv4+KeC9qlbdZPug+UMtUOlhvpDYCMSgcNSrS2Hv1LoqMsOV7hf2lYscx+BESfJOIla1WsmMQ==",
|
||||
"version": "0.38.3",
|
||||
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.3.tgz",
|
||||
"integrity": "sha512-rnK6hJBS6mwc+Bkab+PGPs9OiS0i/3kdTO+CkI8V0/VrW3vmz7O2Pxjw/owOlmo6PKEIxRSeZKv/kuL9itnpYA==",
|
||||
"dev": true
|
||||
},
|
||||
"abort-controller": {
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-known-checksums src/update-known-checksums.ts",
|
||||
"test": "jest",
|
||||
"act": "act pull_request -W .github/workflows/test.yml --container-architecture linux/amd64 -s GITHUB_TOKEN=\"$(gh auth token)\"",
|
||||
"update-known-checksums": "node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
|
||||
"update-known-checksums": "RUNNER_TEMP=known_checksums node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
|
||||
"all": "npm run build && npm run format && npm run lint && npm run package && npm test"
|
||||
},
|
||||
"repository": {
|
||||
@@ -34,9 +34,9 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@types/node": "^22.9.0",
|
||||
"@types/node": "^22.9.1",
|
||||
"@types/semver": "^7.5.8",
|
||||
"@vercel/ncc": "^0.38.2",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"jest": "^29.7.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"ts-jest": "^29.2.5",
|
||||
|
||||
11
src/cache/restore-cache.ts
vendored
11
src/cache/restore-cache.ts
vendored
@@ -1,5 +1,4 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as core from "@actions/core";
|
||||
import {
|
||||
cacheDependencyGlob,
|
||||
@@ -7,6 +6,7 @@ import {
|
||||
cacheSuffix,
|
||||
} from "../utils/inputs";
|
||||
import { getArch, getPlatform } from "../utils/platforms";
|
||||
import { hashFiles } from "../hash/hash-files";
|
||||
|
||||
export const STATE_CACHE_KEY = "cache-key";
|
||||
export const STATE_CACHE_MATCHED_KEY = "cache-matched-key";
|
||||
@@ -39,15 +39,10 @@ async function computeKeys(version: string): Promise<string> {
|
||||
core.info(
|
||||
`Searching files using cache dependency glob: ${cacheDependencyGlob.split("\n").join(",")}`,
|
||||
);
|
||||
cacheDependencyPathHash += await glob.hashFiles(
|
||||
cacheDependencyGlob,
|
||||
undefined,
|
||||
undefined,
|
||||
true,
|
||||
);
|
||||
cacheDependencyPathHash += await hashFiles(cacheDependencyGlob, true);
|
||||
if (cacheDependencyPathHash === "-") {
|
||||
throw new Error(
|
||||
`No file in ${process.cwd()} matched to [${cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`,
|
||||
`No file matched to [${cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,6 @@
|
||||
import { promises as fs } from "node:fs";
|
||||
import * as tc from "@actions/tool-cache";
|
||||
import { KNOWN_CHECKSUMS } from "./known-checksums";
|
||||
export async function updateChecksums(
|
||||
filePath: string,
|
||||
downloadUrls: string[],
|
||||
@@ -7,31 +8,50 @@ export async function updateChecksums(
|
||||
await fs.rm(filePath);
|
||||
await fs.appendFile(
|
||||
filePath,
|
||||
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: {[key: string]: string} = {\n",
|
||||
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: { [key: string]: string } = {\n",
|
||||
);
|
||||
let firstLine = true;
|
||||
for (const downloadUrl of downloadUrls) {
|
||||
const content = await downloadAssetContent(downloadUrl);
|
||||
const checksum = content.split(" ")[0].trim();
|
||||
const key = getKey(downloadUrl);
|
||||
if (key === undefined) {
|
||||
continue;
|
||||
}
|
||||
const checksum = await getOrDownloadChecksum(key, downloadUrl);
|
||||
if (!firstLine) {
|
||||
await fs.appendFile(filePath, ",\n");
|
||||
}
|
||||
await fs.appendFile(filePath, ` '${key}':\n '${checksum}'`);
|
||||
await fs.appendFile(filePath, ` "${key}":\n "${checksum}"`);
|
||||
firstLine = false;
|
||||
}
|
||||
await fs.appendFile(filePath, "}\n");
|
||||
await fs.appendFile(filePath, ",\n};\n");
|
||||
}
|
||||
|
||||
function getKey(downloadUrl: string): string {
|
||||
function getKey(downloadUrl: string): string | undefined {
|
||||
// https://github.com/astral-sh/uv/releases/download/0.3.2/uv-aarch64-apple-darwin.tar.gz.sha256
|
||||
const parts = downloadUrl.split("/");
|
||||
const fileName = parts[parts.length - 1];
|
||||
if (fileName.startsWith("source")) {
|
||||
return undefined;
|
||||
}
|
||||
const name = fileName.split(".")[0].split("uv-")[1];
|
||||
const version = parts[parts.length - 2];
|
||||
return `${name}-${version}`;
|
||||
}
|
||||
|
||||
async function getOrDownloadChecksum(
|
||||
key: string,
|
||||
downloadUrl: string,
|
||||
): Promise<string> {
|
||||
let checksum = "";
|
||||
if (key in KNOWN_CHECKSUMS) {
|
||||
checksum = KNOWN_CHECKSUMS[key];
|
||||
} else {
|
||||
const content = await downloadAssetContent(downloadUrl);
|
||||
checksum = content.split(" ")[0].trim();
|
||||
}
|
||||
return checksum;
|
||||
}
|
||||
|
||||
async function downloadAssetContent(downloadUrl: string): Promise<string> {
|
||||
const downloadPath = await tc.downloadTool(downloadUrl);
|
||||
const content = await fs.readFile(downloadPath, "utf8");
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as tc from "@actions/tool-cache";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as path from "node:path";
|
||||
import { promises as fs } from "node:fs";
|
||||
import type { Architecture, Platform } from "../utils/platforms";
|
||||
import { validateChecksum } from "./checksum/checksum";
|
||||
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
|
||||
|
||||
export async function downloadLatest(
|
||||
platform: Platform,
|
||||
arch: Architecture,
|
||||
checkSum: string | undefined,
|
||||
githubToken: string | undefined,
|
||||
): Promise<{ cachedToolDir: string; version: string }> {
|
||||
const artifact = `uv-${arch}-${platform}`;
|
||||
let extension = ".tar.gz";
|
||||
if (platform === "pc-windows-msvc") {
|
||||
extension = ".zip";
|
||||
}
|
||||
const downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/latest/download/${artifact}${extension}`;
|
||||
core.info(`Downloading uv from "${downloadUrl}" ...`);
|
||||
|
||||
const downloadPath = await tc.downloadTool(
|
||||
downloadUrl,
|
||||
undefined,
|
||||
githubToken,
|
||||
);
|
||||
let uvExecutablePath: string;
|
||||
let uvDir: string;
|
||||
if (platform === "pc-windows-msvc") {
|
||||
const fullPathWithExtension = `${downloadPath}${extension}`;
|
||||
await fs.copyFile(downloadPath, fullPathWithExtension);
|
||||
uvDir = await tc.extractZip(fullPathWithExtension);
|
||||
// On windows extracting the zip does not create an intermediate directory
|
||||
uvExecutablePath = path.join(uvDir, "uv.exe");
|
||||
} else {
|
||||
const extractedDir = await tc.extractTar(downloadPath);
|
||||
uvDir = path.join(extractedDir, artifact);
|
||||
uvExecutablePath = path.join(uvDir, "uv");
|
||||
}
|
||||
const version = await getVersion(uvExecutablePath);
|
||||
await validateChecksum(checkSum, downloadPath, arch, platform, version);
|
||||
const cachedToolDir = await tc.cacheDir(
|
||||
uvDir,
|
||||
TOOL_CACHE_NAME,
|
||||
version,
|
||||
arch,
|
||||
);
|
||||
|
||||
return { cachedToolDir, version };
|
||||
}
|
||||
|
||||
async function getVersion(uvExecutablePath: string): Promise<string> {
|
||||
// Parse the output of `uv --version` to get the version
|
||||
// The output looks like
|
||||
// uv 0.3.1 (be17d132a 2024-08-21)
|
||||
|
||||
const options: exec.ExecOptions = {
|
||||
silent: !core.isDebug(),
|
||||
};
|
||||
const execArgs = ["--version"];
|
||||
|
||||
let output = "";
|
||||
options.listeners = {
|
||||
stdout: (data: Buffer) => {
|
||||
output += data.toString();
|
||||
},
|
||||
};
|
||||
await exec.exec(uvExecutablePath, execArgs, options);
|
||||
const parts = output.split(" ");
|
||||
return parts[1].trim();
|
||||
}
|
||||
@@ -70,10 +70,14 @@ export async function downloadVersion(
|
||||
return { version: resolvedVersion, cachedToolDir };
|
||||
}
|
||||
|
||||
async function resolveVersion(
|
||||
version: string,
|
||||
export async function resolveVersion(
|
||||
versionInput: string,
|
||||
githubToken: string,
|
||||
): Promise<string> {
|
||||
const version =
|
||||
versionInput === "latest"
|
||||
? await getLatestVersion(githubToken)
|
||||
: versionInput;
|
||||
if (tc.isExplicitVersion(version)) {
|
||||
core.debug(`Version ${version} is an explicit version.`);
|
||||
return version;
|
||||
@@ -95,3 +99,17 @@ async function getAvailableVersions(githubToken: string): Promise<string[]> {
|
||||
});
|
||||
return response.map((release) => release.tag_name);
|
||||
}
|
||||
|
||||
async function getLatestVersion(githubToken: string) {
|
||||
const octokit = github.getOctokit(githubToken);
|
||||
|
||||
const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
});
|
||||
|
||||
if (!latestRelease) {
|
||||
throw new Error("Could not determine latest release.");
|
||||
}
|
||||
return latestRelease.tag_name;
|
||||
}
|
||||
|
||||
48
src/hash/hash-files.ts
Normal file
48
src/hash/hash-files.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import * as crypto from "node:crypto";
|
||||
import * as core from "@actions/core";
|
||||
import * as fs from "node:fs";
|
||||
import * as stream from "node:stream";
|
||||
import * as util from "node:util";
|
||||
import { create } from "@actions/glob";
|
||||
|
||||
/**
|
||||
* Hashes files matching the given glob pattern.
|
||||
*
|
||||
* Copied from https://github.com/actions/toolkit/blob/20ed2908f19538e9dfb66d8083f1171c0a50a87c/packages/glob/src/internal-hash-files.ts#L9-L49
|
||||
* But supports hashing files outside the GITHUB_WORKSPACE.
|
||||
* @param pattern The glob pattern to match files.
|
||||
* @param verbose Whether to log the files being hashed.
|
||||
*/
|
||||
export async function hashFiles(
|
||||
pattern: string,
|
||||
verbose = false,
|
||||
): Promise<string> {
|
||||
const globber = await create(pattern);
|
||||
let hasMatch = false;
|
||||
const writeDelegate = verbose ? core.info : core.debug;
|
||||
const result = crypto.createHash("sha256");
|
||||
let count = 0;
|
||||
for await (const file of globber.globGenerator()) {
|
||||
writeDelegate(file);
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
writeDelegate(`Skip directory '${file}'.`);
|
||||
continue;
|
||||
}
|
||||
const hash = crypto.createHash("sha256");
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
await pipeline(fs.createReadStream(file), hash);
|
||||
result.write(hash.digest());
|
||||
count++;
|
||||
if (!hasMatch) {
|
||||
hasMatch = true;
|
||||
}
|
||||
}
|
||||
result.end();
|
||||
|
||||
if (hasMatch) {
|
||||
writeDelegate(`Found ${count} files to hash.`);
|
||||
return result.digest("hex");
|
||||
}
|
||||
writeDelegate("No matches found for glob");
|
||||
return "";
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as fs from "node:fs";
|
||||
import {
|
||||
STATE_CACHE_MATCHED_KEY,
|
||||
STATE_CACHE_KEY,
|
||||
@@ -8,6 +9,7 @@ import {
|
||||
import {
|
||||
cacheLocalPath,
|
||||
enableCache,
|
||||
ignoreNothingToCache,
|
||||
pruneCache as shouldPruneCache,
|
||||
} from "./utils/inputs";
|
||||
|
||||
@@ -15,12 +17,17 @@ export async function run(): Promise<void> {
|
||||
try {
|
||||
if (enableCache) {
|
||||
await saveCache();
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
process.exit(0);
|
||||
}
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
core.setFailed(err.message);
|
||||
}
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
async function saveCache(): Promise<void> {
|
||||
@@ -41,9 +48,27 @@ async function saveCache(): Promise<void> {
|
||||
}
|
||||
|
||||
core.info(`Saving cache path: ${cacheLocalPath}`);
|
||||
await cache.saveCache([cacheLocalPath], cacheKey);
|
||||
|
||||
core.info(`cache saved with the key: ${cacheKey}`);
|
||||
if (!fs.existsSync(cacheLocalPath) && !ignoreNothingToCache) {
|
||||
throw new Error(
|
||||
`Cache path ${cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`,
|
||||
);
|
||||
}
|
||||
try {
|
||||
await cache.saveCache([cacheLocalPath], cacheKey);
|
||||
core.info(`cache saved with the key: ${cacheKey}`);
|
||||
} catch (e) {
|
||||
if (
|
||||
e instanceof Error &&
|
||||
e.message ===
|
||||
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved."
|
||||
) {
|
||||
core.info(
|
||||
"No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.",
|
||||
);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pruneCache(): Promise<void> {
|
||||
|
||||
@@ -3,10 +3,10 @@ import * as path from "node:path";
|
||||
import {
|
||||
downloadVersion,
|
||||
tryGetFromToolCache,
|
||||
resolveVersion,
|
||||
} from "./download/download-version";
|
||||
import { restoreCache } from "./cache/restore-cache";
|
||||
|
||||
import { downloadLatest } from "./download/download-latest";
|
||||
import {
|
||||
type Architecture,
|
||||
getArch,
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
checkSum,
|
||||
enableCache,
|
||||
githubToken,
|
||||
pythonVersion,
|
||||
toolBinDir,
|
||||
toolDir,
|
||||
version,
|
||||
@@ -45,12 +46,13 @@ async function run(): Promise<void> {
|
||||
addUvToPath(setupResult.uvDir);
|
||||
addToolBinToPath();
|
||||
setToolDir();
|
||||
core.setOutput("uv-version", setupResult.version);
|
||||
core.info(`Successfully installed uv version ${setupResult.version}`);
|
||||
|
||||
setupPython();
|
||||
addMatchers();
|
||||
setCacheDir(cacheLocalPath);
|
||||
|
||||
core.setOutput("uv-version", setupResult.version);
|
||||
core.info(`Successfully installed uv version ${setupResult.version}`);
|
||||
|
||||
if (enableCache) {
|
||||
await restoreCache(setupResult.version);
|
||||
}
|
||||
@@ -67,38 +69,28 @@ async function setupUv(
|
||||
checkSum: string | undefined,
|
||||
githubToken: string,
|
||||
): Promise<{ uvDir: string; version: string }> {
|
||||
let installedPath: string | undefined;
|
||||
let cachedToolDir: string;
|
||||
let version: string;
|
||||
if (versionInput === "latest") {
|
||||
const latestResult = await downloadLatest(
|
||||
platform,
|
||||
arch,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
version = latestResult.version;
|
||||
cachedToolDir = latestResult.cachedToolDir;
|
||||
} else {
|
||||
const toolCacheResult = tryGetFromToolCache(arch, versionInput);
|
||||
version = toolCacheResult.version;
|
||||
installedPath = toolCacheResult.installedPath;
|
||||
if (installedPath) {
|
||||
core.info(`Found uv in tool-cache for ${versionInput}`);
|
||||
return { uvDir: installedPath, version };
|
||||
}
|
||||
const versionResult = await downloadVersion(
|
||||
platform,
|
||||
arch,
|
||||
versionInput,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
cachedToolDir = versionResult.cachedToolDir;
|
||||
version = versionResult.version;
|
||||
const resolvedVersion = await resolveVersion(versionInput, githubToken);
|
||||
const toolCacheResult = tryGetFromToolCache(arch, resolvedVersion);
|
||||
if (toolCacheResult.installedPath) {
|
||||
core.info(`Found uv in tool-cache for ${toolCacheResult.version}`);
|
||||
return {
|
||||
uvDir: toolCacheResult.installedPath,
|
||||
version: toolCacheResult.version,
|
||||
};
|
||||
}
|
||||
|
||||
return { uvDir: cachedToolDir, version };
|
||||
const downloadVersionResult = await downloadVersion(
|
||||
platform,
|
||||
arch,
|
||||
resolvedVersion,
|
||||
checkSum,
|
||||
githubToken,
|
||||
);
|
||||
|
||||
return {
|
||||
uvDir: downloadVersionResult.cachedToolDir,
|
||||
version: downloadVersionResult.version,
|
||||
};
|
||||
}
|
||||
|
||||
function addUvToPath(cachedPath: string): void {
|
||||
@@ -133,6 +125,13 @@ function setToolDir(): void {
|
||||
}
|
||||
}
|
||||
|
||||
function setupPython(): void {
|
||||
if (pythonVersion !== "") {
|
||||
core.exportVariable("UV_PYTHON", pythonVersion);
|
||||
core.info(`Set UV_PYTHON to ${pythonVersion}`);
|
||||
}
|
||||
}
|
||||
|
||||
function setCacheDir(cacheLocalPath: string): void {
|
||||
core.exportVariable("UV_CACHE_DIR", cacheLocalPath);
|
||||
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath}`);
|
||||
|
||||
@@ -2,12 +2,15 @@ import * as core from "@actions/core";
|
||||
import path from "node:path";
|
||||
|
||||
export const version = core.getInput("version");
|
||||
export const pythonVersion = core.getInput("python-version");
|
||||
export const checkSum = core.getInput("checksum");
|
||||
export const enableCache = core.getInput("enable-cache") === "true";
|
||||
export const cacheSuffix = core.getInput("cache-suffix") || "";
|
||||
export const cacheLocalPath = getCacheLocalPath();
|
||||
export const cacheDependencyGlob = core.getInput("cache-dependency-glob");
|
||||
export const pruneCache = core.getInput("prune-cache") === "true";
|
||||
export const ignoreNothingToCache =
|
||||
core.getInput("ignore-nothing-to-cache") === "true";
|
||||
export const toolBinDir = getToolBinDir();
|
||||
export const toolDir = getToolDir();
|
||||
export const githubToken = core.getInput("github-token");
|
||||
@@ -15,7 +18,7 @@ export const githubToken = core.getInput("github-token");
|
||||
function getToolBinDir(): string | undefined {
|
||||
const toolBinDirInput = core.getInput("tool-bin-dir");
|
||||
if (toolBinDirInput !== "") {
|
||||
return toolBinDirInput;
|
||||
return expandTilde(toolBinDirInput);
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
@@ -31,7 +34,7 @@ function getToolBinDir(): string | undefined {
|
||||
function getToolDir(): string | undefined {
|
||||
const toolDirInput = core.getInput("tool-dir");
|
||||
if (toolDirInput !== "") {
|
||||
return toolDirInput;
|
||||
return expandTilde(toolDirInput);
|
||||
}
|
||||
if (process.platform === "win32") {
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
@@ -47,7 +50,7 @@ function getToolDir(): string | undefined {
|
||||
function getCacheLocalPath(): string {
|
||||
const cacheLocalPathInput = core.getInput("cache-local-path");
|
||||
if (cacheLocalPathInput !== "") {
|
||||
return cacheLocalPathInput;
|
||||
return expandTilde(cacheLocalPathInput);
|
||||
}
|
||||
if (process.env.RUNNER_TEMP !== undefined) {
|
||||
return `${process.env.RUNNER_TEMP}${path.sep}setup-uv-cache`;
|
||||
@@ -56,3 +59,10 @@ function getCacheLocalPath(): string {
|
||||
"Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input",
|
||||
);
|
||||
}
|
||||
|
||||
function expandTilde(input: string): string {
|
||||
if (input.startsWith("~")) {
|
||||
return `${process.env.HOME}${input.substring(1)}`;
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user