5
0
mirror of https://github.com/astral-sh/setup-uv.git synced 2025-12-19 11:04:08 +00:00

Compare commits

..

15 Commits

Author SHA1 Message Date
Kevin Stillhammer
3e46ac5a37 test-system-install 2024-10-25 15:33:23 +02:00
Kevin Stillhammer
1bbfe9c09c test-system-install 2024-10-25 15:29:06 +02:00
Kevin Stillhammer
55c849c79e test-system-install 2024-10-25 15:26:58 +02:00
Kevin Stillhammer
e21d14e35c test-system-install 2024-10-25 15:26:18 +02:00
Kevin Stillhammer
0821df821a test-system-install 2024-10-25 15:25:02 +02:00
Kevin Stillhammer
a616599fe8 test-system-install 2024-10-25 15:22:19 +02:00
Kevin Stillhammer
37312cc09e test-system-install 2024-10-25 15:21:02 +02:00
Kevin Stillhammer
d235065f16 test-system-install 2024-10-25 15:18:02 +02:00
Kevin Stillhammer
fae33879a4 test-system-install 2024-10-25 15:17:01 +02:00
Kevin Stillhammer
0a33b01b5f test-system-install 2024-10-25 15:13:50 +02:00
Kevin Stillhammer
4c68dba969 test-system-install 2024-10-25 15:11:57 +02:00
Kevin Stillhammer
11faf45e8f test-system-install 2024-10-25 15:11:01 +02:00
Kevin Stillhammer
75ab242331 test-system-install 2024-10-25 15:07:10 +02:00
Kevin Stillhammer
9f6b27e651 test-system-install 2024-10-25 15:03:07 +02:00
Kevin Stillhammer
fdba019944 test-system-install 2024-10-25 15:02:25 +02:00
20 changed files with 886 additions and 3333 deletions

View File

@@ -47,3 +47,10 @@ jobs:
with:
name: dist
path: dist/
test-sudo-system-install:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./
- run: uv run --python=3.13t -i https://pypi.anaconda.org/scientific-python-nightly-wheels/simple --with cython cython

View File

@@ -11,7 +11,10 @@ concurrency:
jobs:
test-setup-cache:
runs-on: windows-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
steps:
- uses: actions/checkout@v4
- name: Setup with cache
@@ -22,7 +25,10 @@ jobs:
- run: uv sync
working-directory: __tests__\fixtures\uv-project
test-restore-cache:
runs-on: windows-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
needs: test-setup-cache
steps:
- uses: actions/checkout@v4

View File

@@ -11,18 +11,24 @@ concurrency:
jobs:
test-setup-cache:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14]
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14]
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
@@ -31,7 +37,7 @@ jobs:
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- name: Cache was hit
run: |
if [ "$CACHE_HIT" != "true" ]; then
@@ -53,7 +59,7 @@ jobs:
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache-dependency-glob:
@@ -73,8 +79,7 @@ jobs:
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
ignore-nothing-to-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- name: Cache was not hit
run: |
if [ "$CACHE_HIT" == "true" ]; then
@@ -91,7 +96,7 @@ jobs:
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
cache-local-path: /tmp/uv-cache
- run: uv sync
working-directory: __tests__/fixtures/uv-project
@@ -105,7 +110,7 @@ jobs:
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
cache-local-path: /tmp/uv-cache
- name: Cache was hit
run: |
@@ -116,49 +121,3 @@ jobs:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-tilde-expansion-cache-local-path:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Create cache directory
run: mkdir -p ~/uv-cache
shell: bash
- name: Setup with cache
uses: ./
with:
cache-local-path: ~/uv-cache/cache-local-path
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-tilde-expansion-cache-dependency-glob:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Create cache directory
run: mkdir -p ~/uv-cache
shell: bash
- name: Create cache dependency glob file
run: touch ~/uv-cache.glob
shell: bash
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-local-path: ~/uv-cache/cache-dependency-glob
cache-dependency-glob: "~/uv-cache.glob"
- run: uv sync
working-directory: __tests__/fixtures/uv-project
cleanup-tilde-expansion-tests:
needs:
- test-tilde-expansion-cache-local-path
- test-tilde-expansion-cache-dependency-glob
runs-on: selfhosted-ubuntu-arm64
steps:
- name: Remove cache directory
run: rm -rf ~/uv-cache
shell: bash
- name: Remove cache dependency glob file
run: rm -f ~/uv-cache.glob
shell: bash

View File

@@ -28,18 +28,19 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14]
os: [ubuntu-latest, macos-latest, macos-14, selfhosted-ubuntu-arm64]
steps:
- uses: actions/checkout@v4
- name: Install latest version
- name: Install default version
uses: ./
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-specific-version:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
uv-version: ["0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
os: [ubuntu-latest, macos-latest, macos-14, selfhosted-ubuntu-arm64]
uv-version: ["latest", "0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
steps:
- uses: actions/checkout@v4
- name: Install version ${{ matrix.uv-version }}
@@ -49,7 +50,10 @@ jobs:
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-semver-range:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, selfhosted-ubuntu-arm64]
steps:
- uses: actions/checkout@v4
- name: Install version 0.3
@@ -68,15 +72,15 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
os: [ubuntu-latest, selfhosted-ubuntu-arm64]
checksum:
["4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"]
exclude:
- os: macos-latest
- os: selfhosted-ubuntu-arm64
checksum: "4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"
include:
- os: macos-latest
checksum: "a70cbfbf3bb5c08b2f84963b4f12c94e08fbb2468ba418a3bfe1066fbe9e7218"
- os: selfhosted-ubuntu-arm64
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
steps:
- uses: actions/checkout@v4
- name: Checksum matches expected
@@ -111,6 +115,7 @@ jobs:
macos-latest,
macos-14,
windows-latest,
selfhosted-ubuntu-arm64,
]
steps:
- uses: actions/checkout@v4
@@ -119,23 +124,9 @@ jobs:
- run: uv tool install ruff
- run: ruff --version
test-tilde-expansion-tool-dirs:
runs-on: selfhosted-ubuntu-arm64
test-system-install:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
tool-bin-dir: "~/tool-bin-dir"
tool-dir: "~/tool-dir"
- name: "Check if tool dirs are expanded"
run: |
if ! echo "$PATH" | grep -q "/home/ubuntu/tool-bin-dir"; then
echo "PATH does not contain /home/ubuntu/tool-bin-dir: $PATH"
exit 1
fi
if [ "$UV_TOOL_DIR" != "/home/ubuntu/tool-dir" ]; then
echo "UV_TOOL_DIR does not contain /home/ubuntu/tool-dir: $UV_TOOL_DIR"
exit 1
fi
- uses: ./
- run: uv pip install --system black

130
README.md
View File

@@ -14,17 +14,13 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
- [Install the latest version (default)](#install-the-latest-version-default)
- [Install a specific version](#install-a-specific-version)
- [Install a version by supplying a semver range](#install-a-version-by-supplying-a-semver-range)
- [Python version](#python-version)
- [Validate checksum](#validate-checksum)
- [Enable Caching](#enable-caching)
- [Cache dependency glob](#cache-dependency-glob)
- [Local cache path](#local-cache-path)
- [Disable cache pruning](#disable-cache-pruning)
- [Ignore nothing to cache](#ignore-nothing-to-cache)
- [GitHub authentication token](#github-authentication-token)
- [UV_TOOL_DIR](#uv_tool_dir)
- [UV_TOOL_BIN_DIR](#uv_tool_bin_dir)
- [Tilde Expansion](#tilde-expansion)
- [How it works](#how-it-works)
- [FAQ](#faq)
@@ -34,7 +30,7 @@ Set up your GitHub Actions workflow with a specific version of [uv](https://docs
```yaml
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
version: "latest"
```
@@ -44,7 +40,7 @@ For an example workflow, see
> [!TIP]
>
> Using `latest` requires to download the uv executable on every run, which incurs a cost
> Using `latest` requires that uv download the executable on every run, which incurs a cost
> (especially on self-hosted runners). As a best practice, consider pinning the version to a
> specific release.
@@ -52,71 +48,39 @@ For an example workflow, see
```yaml
- name: Install a specific version of uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
version: "0.4.4"
```
### Install a version by supplying a semver range
You can specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
You can also specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
to install the latest version that satisfies the range.
```yaml
- name: Install a semver range of uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
version: ">=0.4.0"
```
```yaml
- name: Pinning a minor version of uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
version: "0.4.x"
```
### Python version
You can use the input `python-version` to set the environment variable `UV_PYTHON` for the rest
of your workflow.
This will override any python version specifications in `pyproject.toml` and `.python-version`
```yaml
- name: Install the latest version of uv and set the python version to 3.12
uses: astral-sh/setup-uv@v4
with:
python-version: "3.12"
```
You can combine this with a matrix to test multiple python versions:
```yaml
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
- name: Install the latest version of uv and set the python version
uses: astral-sh/setup-uv@v4
with:
python-version: ${{ matrix.python-version }}
- name: Test with python ${{ matrix.python-version }}
run: uv run --frozen pytest
```
### Validate checksum
You can specify a checksum to validate the downloaded executable. Checksums up to the default version
You can also specify a checksum to validate the downloaded file. Checksums up to the default version
are automatically verified by this action. The sha256 hashes can be found on the
[releases page](https://github.com/astral-sh/uv/releases) of the uv repo.
```yaml
- name: Install a specific version and validate the checksum
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
version: "0.3.1"
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
@@ -124,8 +88,8 @@ are automatically verified by this action. The sha256 hashes can be found on the
### Enable caching
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be uploaded to
the GitHub Actions cache. This can speed up runs that reuse the cache by several minutes.
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be cached to
the GitHub Actions Cache. This can speed up runs that reuse the cache by several minutes.
> [!TIP]
>
@@ -137,7 +101,7 @@ You can optionally define a custom cache key suffix.
```yaml
- name: Enable caching and define a custom cache key suffix
id: setup-uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
cache-suffix: "optional-suffix"
@@ -154,9 +118,9 @@ use it in subsequent steps. For example, to use the cache in the above case:
#### Cache dependency glob
If you want to control when the GitHub Actions cache is invalidated, specify a glob pattern with the
`cache-dependency-glob` input. The GitHub Actions cache will be invalidated if any file matching the glob pattern
changes. If you use relative paths, they are relative to the repository root.
If you want to control when the cache is invalidated, specify a glob pattern with the
`cache-dependency-glob` input. The cache will be invalidated if any file matching the glob pattern
changes. The glob matches files relative to the repository root.
> [!NOTE]
>
@@ -164,7 +128,7 @@ changes. If you use relative paths, they are relative to the repository root.
```yaml
- name: Define a cache dependency glob
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
cache-dependency-glob: "**/requirements*.txt"
@@ -172,7 +136,7 @@ changes. If you use relative paths, they are relative to the repository root.
```yaml
- name: Define a list of cache dependency globs
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
cache-dependency-glob: |
@@ -180,17 +144,9 @@ changes. If you use relative paths, they are relative to the repository root.
**/pyproject.toml
```
```yaml
- name: Define an absolute cache dependency glob
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "/tmp/my-folder/requirements*.txt"
```
```yaml
- name: Never invalidate the cache
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
cache-dependency-glob: ""
@@ -205,7 +161,7 @@ It defaults to `setup-uv-cache` in the `TMP` dir, `D:\a\_temp\uv-tool-dir` on Wi
```yaml
- name: Define a custom uv cache path
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
cache-local-path: "/path/to/cache"
```
@@ -216,33 +172,20 @@ By default, the uv cache is pruned after every run, removing pre-built wheels, b
wheels that were built from source. On GitHub-hosted runners, it's typically faster to omit those
pre-built wheels from the cache (and instead re-download them from the registry on each run).
However, on self-hosted or local runners, preserving the cache may be more efficient. See
the [documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
more information.
the[documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
more.
If you want to persist the entire cache across runs, disable cache pruning with the `prune-cache`
input.
```yaml
- name: Don't prune the cache before saving it
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
prune-cache: false
```
### Ignore nothing to cache
By default, the action will fail if caching is enabled but there is nothing to upload (the uv cache directory does not exist).
If you want to ignore this, set the `ignore-nothing-to-cache` input to `true`.
```yaml
- name: Ignore nothing to cache
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
ignore-nothing-to-cache: true
```
### GitHub authentication token
This action uses the GitHub API to fetch the uv release artifacts. To avoid hitting the GitHub API
@@ -255,7 +198,7 @@ are not sufficient, you can provide a custom GitHub token with the necessary per
```yaml
- name: Install the latest version of uv with a custom GitHub token
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
github-token: ${{ secrets.CUSTOM_GITHUB_TOKEN }}
```
@@ -273,7 +216,7 @@ input:
```yaml
- name: Install the latest version of uv with a custom tool dir
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
tool-dir: "/path/to/tool/dir"
```
@@ -292,30 +235,11 @@ If you want to change this behaviour (especially on self-hosted runners) you can
```yaml
- name: Install the latest version of uv with a custom tool bin dir
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
tool-bin-dir: "/path/to/tool-bin/dir"
```
### Tilde Expansion
This action supports expanding the `~` character to the user's home directory for the following inputs:
- `cache-local-path`
- `tool-dir`
- `tool-bin-dir`
- `cache-dependency-glob`
```yaml
- name: Expand the tilde character
uses: astral-sh/setup-uv@v4
with:
cache-local-path: "~/path/to/cache"
tool-dir: "~/path/to/tool/dir"
tool-bin-dir: "~/path/to/tool-bin/dir"
cache-dependency-glob: "~/my-cache-buster"
```
## How it works
This action downloads uv from the uv repo's official
@@ -340,7 +264,7 @@ For example:
- name: Checkout the repository
uses: actions/checkout@main
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Test
@@ -352,7 +276,7 @@ To install a specific version of Python, use
```yaml
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Install Python 3.12
@@ -371,7 +295,7 @@ output:
uses: actions/checkout@main
- name: Install the default version of uv
id: setup-uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@v3
- name: Print the installed version
run: echo "Installed uv version is ${{ steps.setup-uv.outputs.uv-version }}"
```

View File

@@ -6,9 +6,6 @@ inputs:
version:
description: "The version of uv to install"
default: "latest"
python-version:
description: "The version of Python to set UV_PYTHON to"
required: false
checksum:
description: "The checksum of the uv version to install"
required: false
@@ -34,10 +31,7 @@ inputs:
default: ""
prune-cache:
description: "Prune cache before saving."
default: "true"
ignore-nothing-to-cache:
description: "Ignore when nothing is found to cache."
default: "false"
default: true
tool-dir:
description: "Custom path to set UV_TOOL_DIR to."
required: false

392
dist/save-cache/index.js generated vendored
View File

@@ -562,11 +562,11 @@ const core = __importStar(__nccwpck_require__(7484));
const exec = __importStar(__nccwpck_require__(5236));
const glob = __importStar(__nccwpck_require__(9688));
const io = __importStar(__nccwpck_require__(4994));
const crypto = __importStar(__nccwpck_require__(6982));
const fs = __importStar(__nccwpck_require__(9896));
const path = __importStar(__nccwpck_require__(6928));
const semver = __importStar(__nccwpck_require__(9318));
const util = __importStar(__nccwpck_require__(9023));
const uuid_1 = __nccwpck_require__(7723);
const constants_1 = __nccwpck_require__(8287);
// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23
function createTempDirectory() {
@@ -589,7 +589,7 @@ function createTempDirectory() {
}
tempDirectory = path.join(baseLocation, 'actions', 'temp');
}
const dest = path.join(tempDirectory, crypto.randomUUID());
const dest = path.join(tempDirectory, (0, uuid_1.v4)());
yield io.mkdirP(dest);
return dest;
});
@@ -44599,7 +44599,7 @@ exports.ContextAPI = ContextAPI;
*/
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.DiagAPI = void 0;
const ComponentLogger_1 = __nccwpck_require__(7723);
const ComponentLogger_1 = __nccwpck_require__(104);
const logLevelLogger_1 = __nccwpck_require__(3514);
const types_1 = __nccwpck_require__(2573);
const global_utils_1 = __nccwpck_require__(9923);
@@ -45321,7 +45321,7 @@ exports.diag = diag_1.DiagAPI.instance();
/***/ }),
/***/ 7723:
/***/ 104:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
@@ -75293,6 +75293,221 @@ module.exports = {
}
/***/ }),
/***/ 7723:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var v1 = __nccwpck_require__(6626);
var v4 = __nccwpck_require__(9021);
var uuid = v4;
uuid.v1 = v1;
uuid.v4 = v4;
module.exports = uuid;
/***/ }),
/***/ 8682:
/***/ ((module) => {
/**
* Convert array of 16 byte values to UUID string format of the form:
* XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
*/
var byteToHex = [];
for (var i = 0; i < 256; ++i) {
byteToHex[i] = (i + 0x100).toString(16).substr(1);
}
function bytesToUuid(buf, offset) {
var i = offset || 0;
var bth = byteToHex;
// join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4
return ([
bth[buf[i++]], bth[buf[i++]],
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]], '-',
bth[buf[i++]], bth[buf[i++]],
bth[buf[i++]], bth[buf[i++]],
bth[buf[i++]], bth[buf[i++]]
]).join('');
}
module.exports = bytesToUuid;
/***/ }),
/***/ 1694:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
// Unique ID creation requires a high quality random # generator. In node.js
// this is pretty straight-forward - we use the crypto API.
var crypto = __nccwpck_require__(6982);
module.exports = function nodeRNG() {
return crypto.randomBytes(16);
};
/***/ }),
/***/ 6626:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var rng = __nccwpck_require__(1694);
var bytesToUuid = __nccwpck_require__(8682);
// **`v1()` - Generate time-based UUID**
//
// Inspired by https://github.com/LiosK/UUID.js
// and http://docs.python.org/library/uuid.html
var _nodeId;
var _clockseq;
// Previous uuid creation time
var _lastMSecs = 0;
var _lastNSecs = 0;
// See https://github.com/uuidjs/uuid for API details
function v1(options, buf, offset) {
var i = buf && offset || 0;
var b = buf || [];
options = options || {};
var node = options.node || _nodeId;
var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq;
// node and clockseq need to be initialized to random values if they're not
// specified. We do this lazily to minimize issues related to insufficient
// system entropy. See #189
if (node == null || clockseq == null) {
var seedBytes = rng();
if (node == null) {
// Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)
node = _nodeId = [
seedBytes[0] | 0x01,
seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]
];
}
if (clockseq == null) {
// Per 4.2.2, randomize (14 bit) clockseq
clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;
}
}
// UUID timestamps are 100 nano-second units since the Gregorian epoch,
// (1582-10-15 00:00). JSNumbers aren't precise enough for this, so
// time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'
// (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.
var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime();
// Per 4.2.1.2, use count of uuid's generated during the current clock
// cycle to simulate higher resolution clock
var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1;
// Time since last uuid creation (in msecs)
var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000;
// Per 4.2.1.2, Bump clockseq on clock regression
if (dt < 0 && options.clockseq === undefined) {
clockseq = clockseq + 1 & 0x3fff;
}
// Reset nsecs if clock regresses (new clockseq) or we've moved onto a new
// time interval
if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {
nsecs = 0;
}
// Per 4.2.1.2 Throw error if too many uuids are requested
if (nsecs >= 10000) {
throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec');
}
_lastMSecs = msecs;
_lastNSecs = nsecs;
_clockseq = clockseq;
// Per 4.1.4 - Convert from unix epoch to Gregorian epoch
msecs += 12219292800000;
// `time_low`
var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;
b[i++] = tl >>> 24 & 0xff;
b[i++] = tl >>> 16 & 0xff;
b[i++] = tl >>> 8 & 0xff;
b[i++] = tl & 0xff;
// `time_mid`
var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff;
b[i++] = tmh >>> 8 & 0xff;
b[i++] = tmh & 0xff;
// `time_high_and_version`
b[i++] = tmh >>> 24 & 0xf | 0x10; // include version
b[i++] = tmh >>> 16 & 0xff;
// `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)
b[i++] = clockseq >>> 8 | 0x80;
// `clock_seq_low`
b[i++] = clockseq & 0xff;
// `node`
for (var n = 0; n < 6; ++n) {
b[i + n] = node[n];
}
return buf ? buf : bytesToUuid(b);
}
module.exports = v1;
/***/ }),
/***/ 9021:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var rng = __nccwpck_require__(1694);
var bytesToUuid = __nccwpck_require__(8682);
function v4(options, buf, offset) {
var i = buf && offset || 0;
if (typeof(options) == 'string') {
buf = options === 'binary' ? new Array(16) : null;
options = null;
}
options = options || {};
var rnds = options.random || (options.rng || rng)();
// Per 4.4, set bits for version and `clock_seq_hi_and_reserved`
rnds[6] = (rnds[6] & 0x0f) | 0x40;
rnds[8] = (rnds[8] & 0x3f) | 0x80;
// Copy bytes to buffer, if provided
if (buf) {
for (var ii = 0; ii < 16; ++ii) {
buf[i + ii] = rnds[ii];
}
}
return buf || bytesToUuid(rnds);
}
module.exports = v4;
/***/ }),
/***/ 7125:
@@ -82304,10 +82519,10 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.STATE_CACHE_MATCHED_KEY = exports.STATE_CACHE_KEY = void 0;
exports.restoreCache = restoreCache;
const cache = __importStar(__nccwpck_require__(5116));
const glob = __importStar(__nccwpck_require__(7206));
const core = __importStar(__nccwpck_require__(7484));
const inputs_1 = __nccwpck_require__(9612);
const platforms_1 = __nccwpck_require__(8361);
const hash_files_1 = __nccwpck_require__(9660);
exports.STATE_CACHE_KEY = "cache-key";
exports.STATE_CACHE_MATCHED_KEY = "cache-matched-key";
const CACHE_VERSION = "1";
@@ -82334,9 +82549,9 @@ function computeKeys(version) {
let cacheDependencyPathHash = "-";
if (inputs_1.cacheDependencyGlob !== "") {
core.info(`Searching files using cache dependency glob: ${inputs_1.cacheDependencyGlob.split("\n").join(",")}`);
cacheDependencyPathHash += yield (0, hash_files_1.hashFiles)(inputs_1.cacheDependencyGlob, true);
cacheDependencyPathHash += yield glob.hashFiles(inputs_1.cacheDependencyGlob, undefined, undefined, true);
if (cacheDependencyPathHash === "-") {
throw new Error(`No file matched to [${inputs_1.cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`);
throw new Error(`No file in ${process.cwd()} matched to [${inputs_1.cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`);
}
}
else {
@@ -82358,114 +82573,6 @@ function handleMatchResult(matchedKey, primaryKey) {
}
/***/ }),
/***/ 9660:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __asyncValues = (this && this.__asyncValues) || function (o) {
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
var m = o[Symbol.asyncIterator], i;
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.hashFiles = hashFiles;
const crypto = __importStar(__nccwpck_require__(7598));
const core = __importStar(__nccwpck_require__(7484));
const fs = __importStar(__nccwpck_require__(3024));
const stream = __importStar(__nccwpck_require__(7075));
const util = __importStar(__nccwpck_require__(7975));
const glob_1 = __nccwpck_require__(7206);
/**
* Hashes files matching the given glob pattern.
*
* Copied from https://github.com/actions/toolkit/blob/20ed2908f19538e9dfb66d8083f1171c0a50a87c/packages/glob/src/internal-hash-files.ts#L9-L49
* But supports hashing files outside the GITHUB_WORKSPACE.
* @param pattern The glob pattern to match files.
* @param verbose Whether to log the files being hashed.
*/
function hashFiles(pattern_1) {
return __awaiter(this, arguments, void 0, function* (pattern, verbose = false) {
var _a, e_1, _b, _c;
const globber = yield (0, glob_1.create)(pattern);
let hasMatch = false;
const writeDelegate = verbose ? core.info : core.debug;
const result = crypto.createHash("sha256");
let count = 0;
try {
for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
_c = _f.value;
_d = false;
const file = _c;
writeDelegate(file);
if (fs.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto.createHash("sha256");
const pipeline = util.promisify(stream.pipeline);
yield pipeline(fs.createReadStream(file), hash);
result.write(hash.digest());
count++;
if (!hasMatch) {
hasMatch = true;
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
finally {
try {
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
}
finally { if (e_1) throw e_1.error; }
}
result.end();
if (hasMatch) {
writeDelegate(`Found ${count} files to hash.`);
return result.digest("hex");
}
writeDelegate("No matches found for glob");
return "";
});
}
/***/ }),
/***/ 1653:
@@ -82510,7 +82617,6 @@ exports.run = run;
const cache = __importStar(__nccwpck_require__(5116));
const core = __importStar(__nccwpck_require__(7484));
const exec = __importStar(__nccwpck_require__(5236));
const fs = __importStar(__nccwpck_require__(3024));
const restore_cache_1 = __nccwpck_require__(5391);
const inputs_1 = __nccwpck_require__(9612);
function run() {
@@ -82518,18 +82624,13 @@ function run() {
try {
if (inputs_1.enableCache) {
yield saveCache();
// node will stay alive if any promises are not resolved,
// which is a possibility if HTTP requests are dangling
// due to retries or timeouts. We know that if we got here
// that all promises that we care about have successfully
// resolved, so simply exit with success.
process.exit(0);
}
}
catch (error) {
const err = error;
core.setFailed(err.message);
}
process.exit(0);
});
}
function saveCache() {
@@ -82548,23 +82649,8 @@ function saveCache() {
yield pruneCache();
}
core.info(`Saving cache path: ${inputs_1.cacheLocalPath}`);
if (!fs.existsSync(inputs_1.cacheLocalPath) && !inputs_1.ignoreNothingToCache) {
throw new Error(`Cache path ${inputs_1.cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`);
}
try {
yield cache.saveCache([inputs_1.cacheLocalPath], cacheKey);
core.info(`cache saved with the key: ${cacheKey}`);
}
catch (e) {
if (e instanceof Error &&
e.message ===
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.") {
core.info("No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.");
}
else {
throw e;
}
}
});
}
function pruneCache() {
@@ -82614,25 +82700,23 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.githubToken = exports.toolDir = exports.toolBinDir = exports.ignoreNothingToCache = exports.pruneCache = exports.cacheDependencyGlob = exports.cacheLocalPath = exports.cacheSuffix = exports.enableCache = exports.checkSum = exports.pythonVersion = exports.version = void 0;
exports.githubToken = exports.toolDir = exports.toolBinDir = exports.pruneCache = exports.cacheDependencyGlob = exports.cacheLocalPath = exports.cacheSuffix = exports.enableCache = exports.checkSum = exports.version = void 0;
const core = __importStar(__nccwpck_require__(7484));
const node_path_1 = __importDefault(__nccwpck_require__(6760));
exports.version = core.getInput("version");
exports.pythonVersion = core.getInput("python-version");
exports.checkSum = core.getInput("checksum");
exports.enableCache = core.getInput("enable-cache") === "true";
exports.cacheSuffix = core.getInput("cache-suffix") || "";
exports.cacheLocalPath = getCacheLocalPath();
exports.cacheDependencyGlob = core.getInput("cache-dependency-glob");
exports.pruneCache = core.getInput("prune-cache") === "true";
exports.ignoreNothingToCache = core.getInput("ignore-nothing-to-cache") === "true";
exports.toolBinDir = getToolBinDir();
exports.toolDir = getToolDir();
exports.githubToken = core.getInput("github-token");
function getToolBinDir() {
const toolBinDirInput = core.getInput("tool-bin-dir");
if (toolBinDirInput !== "") {
return expandTilde(toolBinDirInput);
return toolBinDirInput;
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
@@ -82645,7 +82729,7 @@ function getToolBinDir() {
function getToolDir() {
const toolDirInput = core.getInput("tool-dir");
if (toolDirInput !== "") {
return expandTilde(toolDirInput);
return toolDirInput;
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
@@ -82658,19 +82742,13 @@ function getToolDir() {
function getCacheLocalPath() {
const cacheLocalPathInput = core.getInput("cache-local-path");
if (cacheLocalPathInput !== "") {
return expandTilde(cacheLocalPathInput);
return cacheLocalPathInput;
}
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${node_path_1.default.sep}setup-uv-cache`;
}
throw Error("Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input");
}
function expandTilde(input) {
if (input.startsWith("~")) {
return `${process.env.HOME}${input.substring(1)}`;
}
return input;
}
/***/ }),
@@ -82821,14 +82899,6 @@ module.exports = require("net");
/***/ }),
/***/ 7598:
/***/ ((module) => {
"use strict";
module.exports = require("node:crypto");
/***/ }),
/***/ 8474:
/***/ ((module) => {
@@ -82837,14 +82907,6 @@ module.exports = require("node:events");
/***/ }),
/***/ 3024:
/***/ ((module) => {
"use strict";
module.exports = require("node:fs");
/***/ }),
/***/ 6760:
/***/ ((module) => {

594
dist/setup/index.js generated vendored

File diff suppressed because it is too large Load Diff

2141
dist/update-known-checksums/index.js generated vendored

File diff suppressed because it is too large Load Diff

56
package-lock.json generated
View File

@@ -9,7 +9,7 @@
"version": "1.0.0",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.3.0",
"@actions/cache": "^3.2.4",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0",
@@ -20,9 +20,9 @@
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@types/node": "^22.9.1",
"@types/node": "^22.7.9",
"@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.3",
"@vercel/ncc": "^0.38.2",
"jest": "^29.7.0",
"js-yaml": "^4.1.0",
"ts-jest": "^29.2.5",
@@ -30,11 +30,11 @@
}
},
"node_modules/@actions/cache": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.3.0.tgz",
"integrity": "sha512-+eCsMTIZUEm+QA9GqjollOhCdvRrZ1JV8d9Rp34zVNizBkYITO8dhKczP5Xps1dFzc5n59p7vYVtZrGt18bb5Q==",
"version": "3.2.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz",
"integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==",
"dependencies": {
"@actions/core": "^1.11.1",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1",
@@ -42,7 +42,8 @@
"@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0",
"semver": "^6.3.1"
"semver": "^6.3.1",
"uuid": "^3.3.3"
}
},
"node_modules/@actions/cache/node_modules/@actions/glob": {
@@ -2025,11 +2026,11 @@
}
},
"node_modules/@types/node": {
"version": "22.9.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.1.tgz",
"integrity": "sha512-p8Yy/8sw1caA8CdRIQBG5tiLHmxtQKObCijiAa9Ez+d4+PRffM4054xbju0msf+cvhJpnFEeNjxmVT/0ipktrg==",
"version": "22.7.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.9.tgz",
"integrity": "sha512-jrTfRC7FM6nChvU7X2KqcrgquofrWLFDeYC1hKfwNWomVvrn7JIksqf344WN2X/y8xrgqBd2dJATZV4GbatBfg==",
"dependencies": {
"undici-types": "~6.19.8"
"undici-types": "~6.19.2"
}
},
"node_modules/@types/node-fetch": {
@@ -2090,9 +2091,9 @@
"dev": true
},
"node_modules/@vercel/ncc": {
"version": "0.38.3",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.3.tgz",
"integrity": "sha512-rnK6hJBS6mwc+Bkab+PGPs9OiS0i/3kdTO+CkI8V0/VrW3vmz7O2Pxjw/owOlmo6PKEIxRSeZKv/kuL9itnpYA==",
"version": "0.38.2",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.2.tgz",
"integrity": "sha512-3yel3jaxUg9pHBv4+KeC9qlbdZPug+UMtUOlhvpDYCMSgcNSrS2Hv1LoqMsOV7hf2lYscx+BESfJOIla1WsmMQ==",
"dev": true,
"bin": {
"ncc": "dist/ncc/cli.js"
@@ -4886,11 +4887,11 @@
},
"dependencies": {
"@actions/cache": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.3.0.tgz",
"integrity": "sha512-+eCsMTIZUEm+QA9GqjollOhCdvRrZ1JV8d9Rp34zVNizBkYITO8dhKczP5Xps1dFzc5n59p7vYVtZrGt18bb5Q==",
"version": "3.2.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz",
"integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==",
"requires": {
"@actions/core": "^1.11.1",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^2.1.1",
@@ -4898,7 +4899,8 @@
"@azure/abort-controller": "^1.1.0",
"@azure/ms-rest-js": "^2.6.0",
"@azure/storage-blob": "^12.13.0",
"semver": "^6.3.1"
"semver": "^6.3.1",
"uuid": "^3.3.3"
},
"dependencies": {
"@actions/glob": {
@@ -6464,11 +6466,11 @@
}
},
"@types/node": {
"version": "22.9.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.1.tgz",
"integrity": "sha512-p8Yy/8sw1caA8CdRIQBG5tiLHmxtQKObCijiAa9Ez+d4+PRffM4054xbju0msf+cvhJpnFEeNjxmVT/0ipktrg==",
"version": "22.7.9",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.7.9.tgz",
"integrity": "sha512-jrTfRC7FM6nChvU7X2KqcrgquofrWLFDeYC1hKfwNWomVvrn7JIksqf344WN2X/y8xrgqBd2dJATZV4GbatBfg==",
"requires": {
"undici-types": "~6.19.8"
"undici-types": "~6.19.2"
}
},
"@types/node-fetch": {
@@ -6528,9 +6530,9 @@
"dev": true
},
"@vercel/ncc": {
"version": "0.38.3",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.3.tgz",
"integrity": "sha512-rnK6hJBS6mwc+Bkab+PGPs9OiS0i/3kdTO+CkI8V0/VrW3vmz7O2Pxjw/owOlmo6PKEIxRSeZKv/kuL9itnpYA==",
"version": "0.38.2",
"resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.38.2.tgz",
"integrity": "sha512-3yel3jaxUg9pHBv4+KeC9qlbdZPug+UMtUOlhvpDYCMSgcNSrS2Hv1LoqMsOV7hf2lYscx+BESfJOIla1WsmMQ==",
"dev": true
},
"abort-controller": {

View File

@@ -12,7 +12,7 @@
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-known-checksums src/update-known-checksums.ts",
"test": "jest",
"act": "act pull_request -W .github/workflows/test.yml --container-architecture linux/amd64 -s GITHUB_TOKEN=\"$(gh auth token)\"",
"update-known-checksums": "RUNNER_TEMP=known_checksums node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
"update-known-checksums": "node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
"all": "npm run build && npm run format && npm run lint && npm run package && npm test"
},
"repository": {
@@ -23,7 +23,7 @@
"author": "@eifinger",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.3.0",
"@actions/cache": "^3.2.4",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0",
@@ -34,9 +34,9 @@
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@types/node": "^22.9.1",
"@types/node": "^22.7.9",
"@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.3",
"@vercel/ncc": "^0.38.2",
"jest": "^29.7.0",
"js-yaml": "^4.1.0",
"ts-jest": "^29.2.5",

View File

@@ -1,4 +1,5 @@
import * as cache from "@actions/cache";
import * as glob from "@actions/glob";
import * as core from "@actions/core";
import {
cacheDependencyGlob,
@@ -6,7 +7,6 @@ import {
cacheSuffix,
} from "../utils/inputs";
import { getArch, getPlatform } from "../utils/platforms";
import { hashFiles } from "../hash/hash-files";
export const STATE_CACHE_KEY = "cache-key";
export const STATE_CACHE_MATCHED_KEY = "cache-matched-key";
@@ -39,10 +39,15 @@ async function computeKeys(version: string): Promise<string> {
core.info(
`Searching files using cache dependency glob: ${cacheDependencyGlob.split("\n").join(",")}`,
);
cacheDependencyPathHash += await hashFiles(cacheDependencyGlob, true);
cacheDependencyPathHash += await glob.hashFiles(
cacheDependencyGlob,
undefined,
undefined,
true,
);
if (cacheDependencyPathHash === "-") {
throw new Error(
`No file matched to [${cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`,
`No file in ${process.cwd()} matched to [${cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`,
);
}
} else {

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,5 @@
import { promises as fs } from "node:fs";
import * as tc from "@actions/tool-cache";
import { KNOWN_CHECKSUMS } from "./known-checksums";
export async function updateChecksums(
filePath: string,
downloadUrls: string[],
@@ -8,50 +7,31 @@ export async function updateChecksums(
await fs.rm(filePath);
await fs.appendFile(
filePath,
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: { [key: string]: string } = {\n",
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: {[key: string]: string} = {\n",
);
let firstLine = true;
for (const downloadUrl of downloadUrls) {
const content = await downloadAssetContent(downloadUrl);
const checksum = content.split(" ")[0].trim();
const key = getKey(downloadUrl);
if (key === undefined) {
continue;
}
const checksum = await getOrDownloadChecksum(key, downloadUrl);
if (!firstLine) {
await fs.appendFile(filePath, ",\n");
}
await fs.appendFile(filePath, ` "${key}":\n "${checksum}"`);
await fs.appendFile(filePath, ` '${key}':\n '${checksum}'`);
firstLine = false;
}
await fs.appendFile(filePath, ",\n};\n");
await fs.appendFile(filePath, "}\n");
}
function getKey(downloadUrl: string): string | undefined {
function getKey(downloadUrl: string): string {
// https://github.com/astral-sh/uv/releases/download/0.3.2/uv-aarch64-apple-darwin.tar.gz.sha256
const parts = downloadUrl.split("/");
const fileName = parts[parts.length - 1];
if (fileName.startsWith("source")) {
return undefined;
}
const name = fileName.split(".")[0].split("uv-")[1];
const version = parts[parts.length - 2];
return `${name}-${version}`;
}
async function getOrDownloadChecksum(
key: string,
downloadUrl: string,
): Promise<string> {
let checksum = "";
if (key in KNOWN_CHECKSUMS) {
checksum = KNOWN_CHECKSUMS[key];
} else {
const content = await downloadAssetContent(downloadUrl);
checksum = content.split(" ")[0].trim();
}
return checksum;
}
async function downloadAssetContent(downloadUrl: string): Promise<string> {
const downloadPath = await tc.downloadTool(downloadUrl);
const content = await fs.readFile(downloadPath, "utf8");

View File

@@ -2,7 +2,6 @@ import * as core from "@actions/core";
import * as tc from "@actions/tool-cache";
import * as exec from "@actions/exec";
import * as path from "node:path";
import { promises as fs } from "node:fs";
import type { Architecture, Platform } from "../utils/platforms";
import { validateChecksum } from "./checksum/checksum";
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
@@ -14,11 +13,12 @@ export async function downloadLatest(
githubToken: string | undefined,
): Promise<{ cachedToolDir: string; version: string }> {
const artifact = `uv-${arch}-${platform}`;
let extension = ".tar.gz";
let downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/latest/download/${artifact}`;
if (platform === "pc-windows-msvc") {
extension = ".zip";
downloadUrl += ".zip";
} else {
downloadUrl += ".tar.gz";
}
const downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/latest/download/${artifact}${extension}`;
core.info(`Downloading uv from "${downloadUrl}" ...`);
const downloadPath = await tc.downloadTool(
@@ -29,9 +29,7 @@ export async function downloadLatest(
let uvExecutablePath: string;
let uvDir: string;
if (platform === "pc-windows-msvc") {
const fullPathWithExtension = `${downloadPath}${extension}`;
await fs.copyFile(downloadPath, fullPathWithExtension);
uvDir = await tc.extractZip(fullPathWithExtension);
uvDir = await tc.extractZip(downloadPath);
// On windows extracting the zip does not create an intermediate directory
uvExecutablePath = path.join(uvDir, "uv.exe");
} else {

View File

@@ -1,7 +1,6 @@
import * as core from "@actions/core";
import * as tc from "@actions/tool-cache";
import * as path from "node:path";
import { promises as fs } from "node:fs";
import { OWNER, REPO, TOOL_CACHE_NAME } from "../utils/constants";
import type { Architecture, Platform } from "../utils/platforms";
import { validateChecksum } from "./checksum/checksum";
@@ -31,11 +30,12 @@ export async function downloadVersion(
): Promise<{ version: string; cachedToolDir: string }> {
const resolvedVersion = await resolveVersion(version, githubToken);
const artifact = `uv-${arch}-${platform}`;
let extension = ".tar.gz";
let downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/download/${resolvedVersion}/${artifact}`;
if (platform === "pc-windows-msvc") {
extension = ".zip";
downloadUrl += ".zip";
} else {
downloadUrl += ".tar.gz";
}
const downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/download/${resolvedVersion}/${artifact}${extension}`;
core.info(`Downloading uv from "${downloadUrl}" ...`);
const downloadPath = await tc.downloadTool(
@@ -53,9 +53,7 @@ export async function downloadVersion(
let uvDir: string;
if (platform === "pc-windows-msvc") {
const fullPathWithExtension = `${downloadPath}${extension}`;
await fs.copyFile(downloadPath, fullPathWithExtension);
uvDir = await tc.extractZip(fullPathWithExtension);
uvDir = await tc.extractZip(downloadPath);
// On windows extracting the zip does not create an intermediate directory
} else {
const extractedDir = await tc.extractTar(downloadPath);

View File

@@ -1,48 +0,0 @@
import * as crypto from "node:crypto";
import * as core from "@actions/core";
import * as fs from "node:fs";
import * as stream from "node:stream";
import * as util from "node:util";
import { create } from "@actions/glob";
/**
* Hashes files matching the given glob pattern.
*
* Copied from https://github.com/actions/toolkit/blob/20ed2908f19538e9dfb66d8083f1171c0a50a87c/packages/glob/src/internal-hash-files.ts#L9-L49
* But supports hashing files outside the GITHUB_WORKSPACE.
* @param pattern The glob pattern to match files.
* @param verbose Whether to log the files being hashed.
*/
export async function hashFiles(
pattern: string,
verbose = false,
): Promise<string> {
const globber = await create(pattern);
let hasMatch = false;
const writeDelegate = verbose ? core.info : core.debug;
const result = crypto.createHash("sha256");
let count = 0;
for await (const file of globber.globGenerator()) {
writeDelegate(file);
if (fs.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto.createHash("sha256");
const pipeline = util.promisify(stream.pipeline);
await pipeline(fs.createReadStream(file), hash);
result.write(hash.digest());
count++;
if (!hasMatch) {
hasMatch = true;
}
}
result.end();
if (hasMatch) {
writeDelegate(`Found ${count} files to hash.`);
return result.digest("hex");
}
writeDelegate("No matches found for glob");
return "";
}

View File

@@ -1,7 +1,6 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as fs from "node:fs";
import {
STATE_CACHE_MATCHED_KEY,
STATE_CACHE_KEY,
@@ -9,7 +8,6 @@ import {
import {
cacheLocalPath,
enableCache,
ignoreNothingToCache,
pruneCache as shouldPruneCache,
} from "./utils/inputs";
@@ -17,17 +15,12 @@ export async function run(): Promise<void> {
try {
if (enableCache) {
await saveCache();
// node will stay alive if any promises are not resolved,
// which is a possibility if HTTP requests are dangling
// due to retries or timeouts. We know that if we got here
// that all promises that we care about have successfully
// resolved, so simply exit with success.
process.exit(0);
}
} catch (error) {
const err = error as Error;
core.setFailed(err.message);
}
process.exit(0);
}
async function saveCache(): Promise<void> {
@@ -48,27 +41,9 @@ async function saveCache(): Promise<void> {
}
core.info(`Saving cache path: ${cacheLocalPath}`);
if (!fs.existsSync(cacheLocalPath) && !ignoreNothingToCache) {
throw new Error(
`Cache path ${cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`,
);
}
try {
await cache.saveCache([cacheLocalPath], cacheKey);
core.info(`cache saved with the key: ${cacheKey}`);
} catch (e) {
if (
e instanceof Error &&
e.message ===
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved."
) {
core.info(
"No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.",
);
} else {
throw e;
}
}
}
async function pruneCache(): Promise<void> {

View File

@@ -18,7 +18,6 @@ import {
checkSum,
enableCache,
githubToken,
pythonVersion,
toolBinDir,
toolDir,
version,
@@ -46,13 +45,12 @@ async function run(): Promise<void> {
addUvToPath(setupResult.uvDir);
addToolBinToPath();
setToolDir();
setupPython();
addMatchers();
setCacheDir(cacheLocalPath);
core.setOutput("uv-version", setupResult.version);
core.info(`Successfully installed uv version ${setupResult.version}`);
addMatchers();
setCacheDir(cacheLocalPath);
if (enableCache) {
await restoreCache(setupResult.version);
}
@@ -135,13 +133,6 @@ function setToolDir(): void {
}
}
function setupPython(): void {
if (pythonVersion !== "") {
core.exportVariable("UV_PYTHON", pythonVersion);
core.info(`Set UV_PYTHON to ${pythonVersion}`);
}
}
function setCacheDir(cacheLocalPath: string): void {
core.exportVariable("UV_CACHE_DIR", cacheLocalPath);
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath}`);

View File

@@ -2,15 +2,12 @@ import * as core from "@actions/core";
import path from "node:path";
export const version = core.getInput("version");
export const pythonVersion = core.getInput("python-version");
export const checkSum = core.getInput("checksum");
export const enableCache = core.getInput("enable-cache") === "true";
export const cacheSuffix = core.getInput("cache-suffix") || "";
export const cacheLocalPath = getCacheLocalPath();
export const cacheDependencyGlob = core.getInput("cache-dependency-glob");
export const pruneCache = core.getInput("prune-cache") === "true";
export const ignoreNothingToCache =
core.getInput("ignore-nothing-to-cache") === "true";
export const toolBinDir = getToolBinDir();
export const toolDir = getToolDir();
export const githubToken = core.getInput("github-token");
@@ -18,7 +15,7 @@ export const githubToken = core.getInput("github-token");
function getToolBinDir(): string | undefined {
const toolBinDirInput = core.getInput("tool-bin-dir");
if (toolBinDirInput !== "") {
return expandTilde(toolBinDirInput);
return toolBinDirInput;
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
@@ -34,7 +31,7 @@ function getToolBinDir(): string | undefined {
function getToolDir(): string | undefined {
const toolDirInput = core.getInput("tool-dir");
if (toolDirInput !== "") {
return expandTilde(toolDirInput);
return toolDirInput;
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
@@ -50,7 +47,7 @@ function getToolDir(): string | undefined {
function getCacheLocalPath(): string {
const cacheLocalPathInput = core.getInput("cache-local-path");
if (cacheLocalPathInput !== "") {
return expandTilde(cacheLocalPathInput);
return cacheLocalPathInput;
}
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${path.sep}setup-uv-cache`;
@@ -59,10 +56,3 @@ function getCacheLocalPath(): string {
"Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input",
);
}
function expandTilde(input: string): string {
if (input.startsWith("~")) {
return `${process.env.HOME}${input.substring(1)}`;
}
return input;
}