5
0
mirror of https://github.com/astral-sh/setup-uv.git synced 2025-12-15 11:07:14 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Kevin Stillhammer
dc92027fb3 Add link to example workflow 2024-08-25 16:18:25 +02:00
66 changed files with 53584 additions and 39121 deletions

View File

@@ -1,20 +0,0 @@
# Check http://editorconfig.org for more information
# This is the main config file for this project:
root = true
[*]
charset = utf-8
trim_trailing_whitespace = true
end_of_line = lf
indent_style = space
insert_final_newline = true
indent_size = 2
[*.{rs,py,pyi}]
indent_size = 4
[*.snap]
trim_trailing_whitespace = false
[*.md]
max_line_length = 100

4
.eslintignore Normal file
View File

@@ -0,0 +1,4 @@
dist/
lib/
node_modules/
jest.config.js

57
.eslintrc.json Normal file
View File

@@ -0,0 +1,57 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/recommended"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"no-shadow": "off",
"@typescript-eslint/no-shadow": ["error"],
"i18n-text/no-en": "off",
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-comment": "error",
"camelcase": "off",
"@typescript-eslint/consistent-type-assertions": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

2
.gitattributes vendored
View File

@@ -1,2 +1,2 @@
* text=auto eol=lf
dist/** -diff linguist-generated=true
dist/** -diff linguist-generated=true

30
.github/python.json vendored
View File

@@ -1,18 +1,18 @@
{
"problemMatcher": [
{
"owner": "python",
"pattern": [
"problemMatcher": [
{
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
"file": 1,
"line": 2
},
{
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
"message": 2
"owner": "python",
"pattern": [
{
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
"file": 1,
"line": 2
},
{
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
"message": 2
}
]
}
]
}
]
}
]
}

View File

@@ -1,5 +1,5 @@
name-template: "v$RESOLVED_VERSION 🌈"
tag-template: "v$RESOLVED_VERSION"
name-template: 'v$RESOLVED_VERSION 🌈'
tag-template: 'v$RESOLVED_VERSION'
categories:
- title: "🚨 Breaking changes"
labels:
@@ -19,32 +19,30 @@ categories:
labels:
- "maintenance"
- "ci"
- "update-known-checksums"
- title: "📚 Documentation"
labels:
- "documentation"
- title: "⬆️ Dependency updates"
labels:
- "dependencies"
change-template: "- $TITLE @$AUTHOR (#$NUMBER)"
change-template: '- $TITLE @$AUTHOR (#$NUMBER)'
change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
version-resolver:
major:
labels:
- "major"
- "breaking-change"
- 'major'
- 'breaking-change'
minor:
labels:
- "minor"
- "new-feature"
- "enhancement"
- 'minor'
- 'new-feature'
- 'enhancement'
patch:
labels:
- "patch"
- "bugfix"
- "default-version-update"
- 'patch'
- 'bugfix'
default: patch
template: |
## Changes
$CHANGES
$CHANGES

View File

@@ -9,7 +9,11 @@ on:
push:
branches:
- main
paths-ignore:
- '**.md'
pull_request:
paths-ignore:
- '**.md'
workflow_dispatch:
jobs:

View File

@@ -13,12 +13,12 @@ name: "CodeQL"
on:
push:
branches: [main]
branches: [ main ]
pull_request:
# The branches below must be a subset of the branches above
branches: [main]
branches: [ main ]
schedule:
- cron: "31 7 * * 3"
- cron: '31 7 * * 3'
jobs:
analyze:
@@ -32,40 +32,40 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ["TypeScript"]
language: [ 'TypeScript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://git.io/codeql-language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Checkout repository
uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
source-root: src
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
source-root: src
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3

View File

@@ -16,4 +16,4 @@ jobs:
- name: 🚀 Run Release Drafter
uses: release-drafter/release-drafter@v6.0.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,4 +1,4 @@
name: "test-cache-windows"
name: 'test-cache-windows'
on:
pull_request:
push:
@@ -11,7 +11,10 @@ concurrency:
jobs:
test-setup-cache:
runs-on: windows-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
steps:
- uses: actions/checkout@v4
- name: Setup with cache
@@ -22,7 +25,10 @@ jobs:
- run: uv sync
working-directory: __tests__\fixtures\uv-project
test-restore-cache:
runs-on: windows-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [windows-latest]
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
@@ -30,8 +36,8 @@ jobs:
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- name: Cache was hit
run: |
if ($env:CACHE_HIT -ne "true") {

View File

@@ -1,4 +1,4 @@
name: "test-cache"
name: 'test-cache'
on:
pull_request:
push:
@@ -14,23 +14,21 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
enable-cache: [ "true", "false", "auto" ]
os: ["ubuntu-latest", "selfhosted-ubuntu-arm64"]
os: [ubuntu-latest, macos-latest, macos-14]
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: ${{ matrix.enable-cache }}
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-${{ matrix.os }}-${{ matrix.enable-cache }}
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache:
runs-on: ${{ matrix.os }}
strategy:
matrix:
enable-cache: [ "true", "false", "auto" ]
os: [ "ubuntu-latest", "selfhosted-ubuntu-arm64" ]
os: [ubuntu-latest, macos-latest, macos-14]
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
@@ -38,50 +36,8 @@ jobs:
id: restore
uses: ./
with:
enable-cache: ${{ matrix.enable-cache }}
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-${{ matrix.os }}-${{ matrix.enable-cache }}
- name: Cache was hit
if: ${{ matrix.enable-cache == 'true' || (matrix.enable-cache == 'auto' && matrix.os == 'ubuntu-latest') }}
run: |
if [ "$CACHE_HIT" != "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- name: Cache was not hit
if: ${{ matrix.enable-cache == 'false' || (matrix.enable-cache == 'auto' && matrix.os == 'selfhosted-ubuntu-arm64') }}
run: |
if [ "$CACHE_HIT" == "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-setup-cache-requirements-txt:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-requirements-txt
- run: |
uv venv
uv pip install -r requirements.txt
working-directory: __tests__/fixtures/requirements-txt-project
test-restore-cache-requirements-txt:
runs-on: ubuntu-latest
needs: test-setup-cache
steps:
- uses: actions/checkout@v4
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-requirements-txt
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
- name: Cache was hit
run: |
if [ "$CACHE_HIT" != "true" ]; then
@@ -89,66 +45,23 @@ jobs:
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: |
uv venv
uv pip install -r requirements.txt
working-directory: __tests__/fixtures/requirements-txt-project
test-setup-cache-dependency-glob:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache-dependency-glob:
runs-on: ubuntu-latest
needs: test-setup-cache-dependency-glob
steps:
- uses: actions/checkout@v4
- name: Change pyproject.toml
run: |
echo '[tool.uv]' >> __tests__/fixtures/uv-project/pyproject.toml
echo 'dev-dependencies = []' >> __tests__/fixtures/uv-project/pyproject.toml
- name: Restore with cache
id: restore
uses: ./
with:
enable-cache: true
cache-dependency-glob: |
__tests__/fixtures/uv-project/uv.lock
**/pyproject.toml
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-dependency-glob
ignore-nothing-to-cache: true
- name: Cache was not hit
run: |
if [ "$CACHE_HIT" == "true" ]; then
exit 1
fi
env:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
test-setup-cache-local:
runs-on: selfhosted-ubuntu-arm64
runs-on: oracle-aarch64
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
cache-local-path: /tmp/uv-cache
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-restore-cache-local:
runs-on: selfhosted-ubuntu-arm64
runs-on: oracle-aarch64
needs: test-setup-cache-local
steps:
- uses: actions/checkout@v4
@@ -156,9 +69,9 @@ jobs:
id: restore
uses: ./
with:
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}-test-setup-cache-local
cache-local-path: /tmp/uv-cache
enable-cache: true
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
cache-local-path: /tmp/uv-cache
- name: Cache was hit
run: |
if [ "$CACHE_HIT" != "true" ]; then
@@ -168,62 +81,3 @@ jobs:
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-tilde-expansion-cache-local-path:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Create cache directory
run: mkdir -p ~/uv-cache
shell: bash
- name: Setup with cache
uses: ./
with:
cache-local-path: ~/uv-cache/cache-local-path
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-tilde-expansion-cache-dependency-glob:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Create cache directory
run: mkdir -p ~/uv-cache
shell: bash
- name: Create cache dependency glob file
run: touch ~/uv-cache.glob
shell: bash
- name: Setup with cache
uses: ./
with:
enable-cache: true
cache-local-path: ~/uv-cache/cache-dependency-glob
cache-dependency-glob: "~/uv-cache.glob"
- run: uv sync
working-directory: __tests__/fixtures/uv-project
cleanup-tilde-expansion-tests:
needs:
- test-tilde-expansion-cache-local-path
- test-tilde-expansion-cache-dependency-glob
runs-on: selfhosted-ubuntu-arm64
steps:
- name: Remove cache directory
run: rm -rf ~/uv-cache
shell: bash
- name: Remove cache dependency glob file
run: rm -f ~/uv-cache.glob
shell: bash
test-no-python-version:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Fake pyproject.toml at root
run: cp __tests__/fixtures/old-python-constraint-project/pyproject.toml pyproject.toml
- name: Setup with cache
uses: ./
with:
enable-cache: true
- run: uv sync
working-directory: __tests__/fixtures/old-python-constraint-project

View File

@@ -1,4 +1,4 @@
name: "test-windows"
name: 'test-windows'
on:
pull_request:
push:
@@ -23,5 +23,7 @@ jobs:
}
- name: Setup uv
uses: ./
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- run: uv sync
working-directory: __tests__\fixtures\uv-project

View File

@@ -1,4 +1,4 @@
name: "test"
name: 'test'
on:
pull_request:
push:
@@ -11,83 +11,24 @@ concurrency:
jobs:
build:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14, oracle-aarch64]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
node-version: '20'
- run: |
npm install
- run: |
npm run all
- name: Make sure no changes from linters are detected
run: |
git diff --exit-code
test-default-version:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14]
steps:
- uses: actions/checkout@v4
- name: Install latest version
uses: ./
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-specific-version:
runs-on: ubuntu-latest
strategy:
matrix:
uv-version: ["0.3.0", "0.3.2", "0.3", "0.3.x", ">=0.3.0"]
steps:
- uses: actions/checkout@v4
- name: Install version ${{ matrix.uv-version }}
uses: ./
with:
version: ${{ matrix.uv-version }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-semver-range:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install version 0.3
id: setup-uv
uses: ./
with:
version: "0.3"
- name: Correct version gets installed
run: |
if [ "$UV_VERSION" != "0.3.5" ]; then
exit 1
fi
env:
UV_VERSION: ${{ steps.setup-uv.outputs.uv-version }}
test-checksum:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest]
checksum:
["4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"]
exclude:
- os: macos-latest
checksum: "4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd"
include:
- os: macos-latest
checksum: "a70cbfbf3bb5c08b2f84963b4f12c94e08fbb2468ba418a3bfe1066fbe9e7218"
steps:
- uses: actions/checkout@v4
- name: Checksum matches expected
uses: ./
with:
version: "0.3.2"
checksum: ${{ matrix.checksum }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-with-explicit-token:
runs-on: ubuntu-latest
os: [ubuntu-latest, macos-latest, macos-14, oracle-aarch64]
steps:
- uses: actions/checkout@v4
- name: Install default version
@@ -96,6 +37,50 @@ jobs:
github-token: ${{ secrets.GITHUB_TOKEN }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-specific-version:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, macos-14, oracle-aarch64]
uv-version: ['latest','0.3.0','0.3.2']
steps:
- uses: actions/checkout@v4
- name: Install version ${{ matrix.uv-version }}
uses: ./
with:
version: ${{ matrix.uv-version }}
github-token: ${{ secrets.GITHUB_TOKEN }}
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-checksum:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, oracle-aarch64]
checksum: ['4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd']
exclude:
- os: oracle-aarch64
checksum: '4d9279ad5ca596b1e2d703901d508430eb07564dc4d8837de9e2fca9c90f8ecd'
include:
- os: oracle-aarch64
checksum: 'e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8'
steps:
- uses: actions/checkout@v4
- name: Checksum matches expected
uses: ./
with:
version: '0.3.2'
checksum: ${{ matrix.checksum }}
github-token: ${{ secrets.GITHUB_TOKEN }}
test-without-github-token:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install default version
uses: ./
- run: uv sync
working-directory: __tests__/fixtures/uv-project
test-uvx:
runs-on: ubuntu-latest
steps:
@@ -103,67 +88,3 @@ jobs:
- name: Install default version
uses: ./
- run: uvx ruff --version
test-tool-install:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os:
[
ubuntu-latest,
macos-latest,
macos-14,
windows-latest,
]
steps:
- uses: actions/checkout@v4
- name: Install default version
uses: ./
- run: uv tool install ruff
- run: ruff --version
test-tilde-expansion-tool-dirs:
runs-on: selfhosted-ubuntu-arm64
steps:
- uses: actions/checkout@v4
- name: Setup with cache
uses: ./
with:
tool-bin-dir: "~/tool-bin-dir"
tool-dir: "~/tool-dir"
- name: "Check if tool dirs are expanded"
run: |
if ! echo "$PATH" | grep -q "/home/ubuntu/tool-bin-dir"; then
echo "PATH does not contain /home/ubuntu/tool-bin-dir: $PATH"
exit 1
fi
if [ "$UV_TOOL_DIR" != "/home/ubuntu/tool-dir" ]; then
echo "UV_TOOL_DIR does not contain /home/ubuntu/tool-dir: $UV_TOOL_DIR"
exit 1
fi
test-python-version:
runs-on: ubuntu-latest
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
steps:
- uses: actions/checkout@v4
- name: Install latest version
uses: ./
with:
python-version: 3.13.1t
- name: Verify UV_PYTHON is set to correct version
run: |
echo "$UV_PYTHON"
if [ "$UV_PYTHON" != "3.13.1t" ]; then
exit 1
fi
shell: bash
- name: Verify packages can be installed
run: uv pip install --python=3.13.1t pip
shell: bash
- name: Verify python version is correct
run: |
python --version
if [ "$(python --version)" != "Python 3.13.1" ]; then
exit 1
fi
shell: bash

View File

@@ -0,0 +1,26 @@
name: 'Update default version and checksums'
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: '20'
- name: Update default version and checksums
id: update-default-version
run: node dist/update-default-version/index.js src/download/checksum/known-checksums.ts action.yml ${{ secrets.GITHUB_TOKEN }}
- run: npm install && npm run all
- name: Create Pull Request
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6.1.0
with:
commit-message: "chore: update checksums"
title: "chore: update default version to ${{ steps.update-default-version.outputs.latest-version }}"
body: "chore: update default version to ${{ steps.update-default-version.outputs.latest-version }}"
base: main
labels: "automated-pr,bugfix"
branch: update-default-version-pr
delete-branch: true

View File

@@ -1,32 +0,0 @@
name: "Update known checksums"
on:
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "20"
- name: Update known checksums
id: update-known-checksums
run:
node dist/update-known-checksums/index.js
src/download/checksum/known-checksums.ts ${{ secrets.GITHUB_TOKEN }}
- run: npm install && npm run all
- name: Create Pull Request
uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7.0.5
with:
commit-message: "chore: update known checksums"
title:
"chore: update known checksums for ${{
steps.update-known-checksums.outputs.latest-version }}"
body:
"chore: update known checksums for ${{
steps.update-known-checksums.outputs.latest-version }}"
base: main
labels: "automated-pr,update-known-checksums"
branch: update-known-checksums-pr
delete-branch: true

View File

@@ -5,9 +5,9 @@ name: Update Major Minor Tags
on:
push:
branches-ignore:
- "**"
- '**'
tags:
- "v*.*.*"
- 'v*.*.*'
jobs:
update_major_minor_tags:
@@ -16,4 +16,4 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Run Update semver
uses: haya14busa/action-update-semver@v1.2.1
uses: haya14busa/action-update-semver@v1.2.1

5
.gitignore vendored
View File

@@ -96,7 +96,4 @@ Thumbs.db
# Ignore built ts files
__tests__/runner/*
lib/**/*
# Idea IDEs (PyCharm, WebStorm, IntelliJ, etc)
.idea/
lib/**/*

3
.prettierignore Normal file
View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

10
.prettierrc.json Normal file
View File

@@ -0,0 +1,10 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid"
}

375
README.md
View File

@@ -2,148 +2,92 @@
Set up your GitHub Actions workflow with a specific version of [uv](https://docs.astral.sh/uv/).
- Install a version of uv and add it to PATH
- Cache the installed version of uv to speed up consecutive runs on self-hosted runners
- Register problem matchers for error output
- (Optional) Persist the uv's cache in the GitHub Actions Cache
- (Optional) Verify the checksum of the downloaded uv executable
* Install a version of uv and add it to the path
* Cache the installed version of uv to speed up consecutive runs on self-hosted runners
* Register problem matchers for error output
* Optional: Cache the uv cache
* Optional: Verify the checksum of the downloaded uv executable
## Contents
- [Usage](#usage)
- [Install the latest version (default)](#install-the-latest-version-default)
- [Install a specific version](#install-a-specific-version)
- [Install a version by supplying a semver range](#install-a-version-by-supplying-a-semver-range)
- [Python version](#python-version)
- [Validate checksum](#validate-checksum)
- [Enable Caching](#enable-caching)
- [Cache dependency glob](#cache-dependency-glob)
- [Local cache path](#local-cache-path)
- [Disable cache pruning](#disable-cache-pruning)
- [Ignore nothing to cache](#ignore-nothing-to-cache)
- [GitHub authentication token](#github-authentication-token)
- [UV_TOOL_DIR](#uv_tool_dir)
- [UV_TOOL_BIN_DIR](#uv_tool_bin_dir)
- [Tilde Expansion](#tilde-expansion)
- [How it works](#how-it-works)
- [FAQ](#faq)
* [Usage](#usage)
* [Install specific version](#install-specific-version)
* [Install latest version](#install-latest-version)
* [Validate checksum](#validate-checksum)
* [Enable Caching](#enable-caching)
* [Local cache path](#local-cache-path)
* [Cache dependency glob](#cache-dependency-glob)
* [API rate limit](#api-rate-limit)
* [How it works](#how-it-works)
* [FAQ](#faq)
## Usage
### Install the latest version (default)
Example workflow in a real world project can be found [here](https://github.com/eifinger/hass-weenect/blob/main/.github/workflows/ci.yml)
### Install specific version
You can also specify a specific version of uv
```yaml
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v4
- name: Install a specific version
uses: eifinger/setup-uv@v1
with:
version: "latest"
version: '0.3.0'
```
For an example workflow, see
[here](https://github.com/charliermarsh/autobot/blob/e42c66659bf97b90ca9ff305a19cc99952d0d43f/.github/workflows/ci.yaml).
### Install latest version
### Install a specific version
By default this action installs the version defined as `default` in `action.yml`.
This gets automatically updated in a new release of this action when a new version of uv is released.
If you don't want to wait for a new release of this action you can use use `version: latest`.
> [!WARNING]
> Using the `latest` version means that the uv executable gets downloaded every single time instead of loaded from the tools cache.
> This can take up to 20s depending on the download speed.
> This does not affect the uv cache.
```yaml
- name: Install a specific version of uv
uses: astral-sh/setup-uv@v4
- name: Install a specific version
uses: eifinger/setup-uv@v1
with:
version: "0.4.4"
```
### Install a version by supplying a semver range
You can specify a [semver range](https://github.com/npm/node-semver?tab=readme-ov-file#ranges)
to install the latest version that satisfies the range.
```yaml
- name: Install a semver range of uv
uses: astral-sh/setup-uv@v4
with:
version: ">=0.4.0"
```
```yaml
- name: Pinning a minor version of uv
uses: astral-sh/setup-uv@v4
with:
version: "0.4.x"
```
### Python version
You can use the input `python-version` to
- set the environment variable `UV_PYTHON` for the rest of your workflow
- create a new virtual environment with the specified python version
- activate the virtual environment for the rest of your workflow
This will override any python version specifications in `pyproject.toml` and `.python-version`
```yaml
- name: Install the latest version of uv and set the python version to 3.13t
uses: astral-sh/setup-uv@v4
with:
python-version: 3.13t
- run: uv pip install --python=3.13t pip
```
You can combine this with a matrix to test multiple python versions:
```yaml
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
- name: Install the latest version of uv and set the python version
uses: astral-sh/setup-uv@v4
with:
python-version: ${{ matrix.python-version }}
- name: Test with python ${{ matrix.python-version }}
run: uv run --frozen pytest
version: 'latest'
```
### Validate checksum
You can specify a checksum to validate the downloaded executable. Checksums up to the default version
are automatically verified by this action. The sha256 hashes can be found on the
[releases page](https://github.com/astral-sh/uv/releases) of the uv repo.
You can also specify a checksum to validate the downloaded file.
Checksums up to the default version are automatically verified by this action.
The sha265 hashes can be found on the [releases page](https://github.com/astral-sh/uv/releases)
of the uv repo.
```yaml
- name: Install a specific version and validate the checksum
uses: astral-sh/setup-uv@v4
uses: eifinger/setup-uv@v1
with:
version: "0.3.1"
checksum: "e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8"
version: '0.3.1'
checksum: 'e11b01402ab645392c7ad6044db63d37e4fd1e745e015306993b07695ea5f9f8'
```
### Enable caching
If you enable caching, the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will be uploaded to
the GitHub Actions cache. This can speed up runs that reuse the cache by several minutes.
> [!TIP]
>
> On self-hosted runners this is usually not needed since the cache generated by uv on the runner's
> filesystem is not removed after a run. For more details see [Local cache path](#local-cache-path).
If you enable caching the [uv cache](https://docs.astral.sh/uv/concepts/cache/) will
be cached to the GitHub Actions Cache. This can speed up runs which can reuse the cache
by several minutes. The cache will always be reused on self-hosted runners.
You can optionally define a custom cache key suffix.
```yaml
- name: Enable caching and define a custom cache key suffix
id: setup-uv
uses: astral-sh/setup-uv@v4
uses: eifinger/setup-uv@v1
with:
enable-cache: true
cache-suffix: "optional-suffix"
cache-suffix: 'optional-suffix'
```
When the cache was successfully restored, the output `cache-hit` will be set to `true` and you can
use it in subsequent steps. For example, to use the cache in the above case:
When the cache was successfully restored the output `cache-hit` will be set to `true` and you can use it in subsequent steps.
For the example above you can use it like this:
```yaml
- name: Do something if the cache was restored
@@ -151,248 +95,95 @@ use it in subsequent steps. For example, to use the cache in the above case:
run: echo "Cache was restored"
```
#### Cache dependency glob
#### Local cache path
If you want to control when the GitHub Actions cache is invalidated, specify a glob pattern with the
`cache-dependency-glob` input. The GitHub Actions cache will be invalidated if any file matching the glob pattern
changes. If you use relative paths, they are relative to the repository root.
> [!NOTE]
>
> The default is
> ```yaml
> cache-dependency-glob: |
> **/requirements*.txt
> **/uv.lock
> ```
```yaml
- name: Define a cache dependency glob
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "**/pyproject.toml"
```
```yaml
- name: Define a list of cache dependency globs
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: |
**/requirements*.txt
**/pyproject.toml
```
```yaml
- name: Define an absolute cache dependency glob
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: "/tmp/my-folder/requirements*.txt"
```
```yaml
- name: Never invalidate the cache
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
cache-dependency-glob: ""
```
### Local cache path
This action controls where uv stores its cache on the runner's filesystem by setting `UV_CACHE_DIR`.
It defaults to `setup-uv-cache` in the `TMP` dir, `D:\a\_temp\uv-tool-dir` on Windows and
`/tmp/setup-uv-cache` on Linux/macOS. You can change the default by specifying the path with the
`cache-local-path` input.
If you want to save the cache to a local path other than the default path (`/tmp/setup-uv-cache`)
you can specify the path with the `cache-local-path` input.
```yaml
- name: Define a custom uv cache path
uses: astral-sh/setup-uv@v4
with:
cache-local-path: "/path/to/cache"
```
### Disable cache pruning
By default, the uv cache is pruned after every run, removing pre-built wheels, but retaining any
wheels that were built from source. On GitHub-hosted runners, it's typically faster to omit those
pre-built wheels from the cache (and instead re-download them from the registry on each run).
However, on self-hosted or local runners, preserving the cache may be more efficient. See
the [documentation](https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration) for
more information.
If you want to persist the entire cache across runs, disable cache pruning with the `prune-cache`
input.
```yaml
- name: Don't prune the cache before saving it
uses: astral-sh/setup-uv@v4
uses: eifinger/setup-uv@v1
with:
enable-cache: true
prune-cache: false
cache-local-path: '/path/to/cache'
```
### Ignore nothing to cache
#### Cache dependency glob
By default, the action will fail if caching is enabled but there is nothing to upload (the uv cache directory does not exist).
If you want to ignore this, set the `ignore-nothing-to-cache` input to `true`.
If you want to control when the cache is invalidated you can specify a glob pattern with the `cache-dependency-glob` input.
The cache will be invalidated if any file matching the glob pattern changes.
The glob matches files relative to the repository root.
```yaml
- name: Ignore nothing to cache
uses: astral-sh/setup-uv@v4
- name: Define a cache dependency glob
uses: eifinger/setup-uv@v1
with:
enable-cache: true
ignore-nothing-to-cache: true
cache-dependency-glob: 'uv.lock'
```
### GitHub authentication token
This action uses the GitHub API to fetch the uv release artifacts. To avoid hitting the GitHub API
rate limit too quickly, an authentication token can be provided via the `github-token` input. By
default, the `GITHUB_TOKEN` secret is used, which is automatically provided by GitHub Actions.
If the default
[permissions for the GitHub token](https://docs.github.com/en/actions/security-for-github-actions/security-guides/automatic-token-authentication#permissions-for-the-github_token)
are not sufficient, you can provide a custom GitHub token with the necessary permissions.
```yaml
- name: Install the latest version of uv with a custom GitHub token
uses: astral-sh/setup-uv@v4
- name: Define a cache dependency glob
uses: eifinger/setup-uv@v1
with:
github-token: ${{ secrets.CUSTOM_GITHUB_TOKEN }}
enable-cache: true
cache-dependency-glob: '**requirements*.txt'
```
### UV_TOOL_DIR
### API rate limit
On Windows `UV_TOOL_DIR` is set to `uv-tool-dir` in the `TMP` dir (e.g. `D:\a\_temp\uv-tool-dir`).
On GitHub hosted runners this is on the much faster `D:` drive.
On all other platforms the tool environments are placed in the
[default location](https://docs.astral.sh/uv/concepts/tools/#tools-directory).
If you want to change this behaviour (especially on self-hosted runners) you can use the `tool-dir`
input:
To avoid hitting the error `API rate limit exceeded` you can supply a GitHub token with the `github-token` input.
```yaml
- name: Install the latest version of uv with a custom tool dir
uses: astral-sh/setup-uv@v4
- name: Install uv and supply a GitHub token
uses: eifinger/setup-uv@v1
with:
tool-dir: "/path/to/tool/dir"
```
### UV_TOOL_BIN_DIR
On Windows `UV_TOOL_BIN_DIR` is set to `uv-tool-bin-dir` in the `TMP` dir (e.g.
`D:\a\_temp\uv-tool-bin-dir`). On GitHub hosted runners this is on the much faster `D:` drive. This
path is also automatically added to the PATH.
On all other platforms the tool binaries get installed to the
[default location](https://docs.astral.sh/uv/concepts/tools/#the-bin-directory).
If you want to change this behaviour (especially on self-hosted runners) you can use the
`tool-bin-dir` input:
```yaml
- name: Install the latest version of uv with a custom tool bin dir
uses: astral-sh/setup-uv@v4
with:
tool-bin-dir: "/path/to/tool-bin/dir"
```
### Tilde Expansion
This action supports expanding the `~` character to the user's home directory for the following inputs:
- `cache-local-path`
- `tool-dir`
- `tool-bin-dir`
- `cache-dependency-glob`
```yaml
- name: Expand the tilde character
uses: astral-sh/setup-uv@v4
with:
cache-local-path: "~/path/to/cache"
tool-dir: "~/path/to/tool/dir"
tool-bin-dir: "~/path/to/tool-bin/dir"
cache-dependency-glob: "~/my-cache-buster"
github-token: ${{ secrets.GITHUB_TOKEN }}
```
## How it works
This action downloads uv from the uv repo's official
[GitHub Releases](https://github.com/astral-sh/uv) and uses the
[GitHub Actions Toolkit](https://github.com/actions/toolkit) to cache it as a tool to speed up
consecutive runs on self-hosted runners.
This action downloads uv from the releases of the [uv repo](https://github.com/astral-sh/uv) and uses the [GitHub Actions Toolkit](https://github.com/actions/toolkit) to cache it as a tool to speed up consecutive runs on self-hosted runners.
The installed version of uv is then added to the runner PATH, enabling subsequent steps to invoke it
by name (`uv`).
The installed version of uv is then added to the runner path so other steps can just use it by calling `uv`.
## FAQ
### Do I still need `actions/setup-python` alongside `setup-uv`?
### Do I still need actions/setup-python when using this action?
No. This action is modelled as a drop-in replacement for `actions/setup-python` when using uv. With
`setup-uv`, you can install a specific version of Python using `uv python install` rather than
relying on `actions/setup-python`.
No! This action was modelled as a drop-in replacement for `actions/setup-python` when using uv.
For example:
A simple example workflow could look like this:
```yaml
- name: Checkout the repository
uses: actions/checkout@main
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v4
uses: eifinger/setup-uv@v1
with:
enable-cache: true
- name: Test
run: uv run --frozen pytest
```
To install a specific version of Python, use
[`uv python install`](https://docs.astral.sh/uv/guides/install-python/):
```yaml
- name: Install the latest version of uv
uses: astral-sh/setup-uv@v4
with:
enable-cache: true
- name: Install Python 3.12
run: uv python install 3.12
```
### What is the default version?
By default, this action installs the latest version of uv.
By default this action installs the version defined as `default` in `action.yml`.
When a new release of uv is published this triggers an automatic release of this action with the new version as `default`.
If you require the installed version in subsequent steps of your workflow, use the `uv-version`
output:
If you have to know the version installed for other steps of your workflow you can use the `uv-version` output:
```yaml
- name: Checkout the repository
uses: actions/checkout@main
- name: Install the default version of uv
id: setup-uv
uses: astral-sh/setup-uv@v4
uses: eifinger/setup-uv@v1
- name: Print the installed version
run: echo "Installed uv version is ${{ steps.setup-uv.outputs.uv-version }}"
```
## Acknowledgements
---
`setup-uv` was initially written and published by [Kevin Stillhammer](https://github.com/eifinger)
before moving under the official [Astral](https://github.com/astral-sh) GitHub organization. You can
support Kevin's work in open source on [Buy me a coffee](https://www.buymeacoffee.com/eifinger) or
[PayPal](https://paypal.me/kevinstillhammer).
## License
MIT
<div align="center">
<a target="_blank" href="https://astral.sh" style="background:none">
<img src="https://raw.githubusercontent.com/astral-sh/uv/main/assets/svg/Astral.svg" alt="Made by Astral">
</a>
</div>
[<img src="https://raw.githubusercontent.com/eifinger/setup-uv/main/docs/images/bmc-button.svg" width=150 height=40 style="margin: 5px"/>](https://www.buymeacoffee.com/eifinger)
[<img src="https://raw.githubusercontent.com/eifinger/setup-uv/main/docs/images/paypal-button.svg" width=150 height=40 style="margin: 5px"/>](https://paypal.me/kevinstillhammer)

View File

@@ -0,0 +1,37 @@
import {expect, test, it} from '@jest/globals'
import {
isknownVersion,
validateChecksum
} from '../../../src/download/checksum/checksum'
test('checksum should match', async () => {
const validChecksum =
'f3da96ec7e995debee7f5d52ecd034dfb7074309a1da42f76429ecb814d813a3'
const filePath = '__tests__/fixtures/checksumfile'
// string params don't matter only test the checksum mechanism, not known checksums
await validateChecksum(
validChecksum,
filePath,
'aarch64',
'pc-windows-msvc',
'1.2.3'
)
})
type KnownVersionFixture = {version: string; known: boolean}
it.each<KnownVersionFixture>([
{
version: '0.3.0',
known: true
},
{
version: '0.0.15',
known: false
}
])(
'isknownVersion should return $known for version $version',
({version, known}) => {
expect(isknownVersion(version)).toBe(known)
}
)

View File

@@ -1,37 +0,0 @@
import { expect, test, it } from "@jest/globals";
import {
isknownVersion,
validateChecksum,
} from "../../../src/download/checksum/checksum";
test("checksum should match", async () => {
const validChecksum =
"f3da96ec7e995debee7f5d52ecd034dfb7074309a1da42f76429ecb814d813a3";
const filePath = "__tests__/fixtures/checksumfile";
// string params don't matter only test the checksum mechanism, not known checksums
await validateChecksum(
validChecksum,
filePath,
"aarch64",
"pc-windows-msvc",
"1.2.3",
);
});
type KnownVersionFixture = { version: string; known: boolean };
it.each<KnownVersionFixture>([
{
version: "0.3.0",
known: true,
},
{
version: "0.0.15",
known: false,
},
])(
"isknownVersion should return $known for version $version",
({ version, known }) => {
expect(isknownVersion(version)).toBe(known);
},
);

View File

@@ -1,13 +0,0 @@
[project]
name = "old-python-constraint-project"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.8,<=3.9"
dependencies = [
"ruff>=0.6.2",
]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"

View File

@@ -1,2 +0,0 @@
def hello() -> str:
return "Hello from uv-project!"

View File

@@ -1,38 +0,0 @@
version = 1
requires-python = ">=3.12"
[[package]]
name = "ruff"
version = "0.6.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/23/f4/279d044f66b79261fd37df76bf72b64471afab5d3b7906a01499c4451910/ruff-0.6.2.tar.gz", hash = "sha256:239ee6beb9e91feb8e0ec384204a763f36cb53fb895a1a364618c6abb076b3be", size = 2460281 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/72/4b/47dd7a69287afb4069fa42c198e899463605460a58120196711bfcf0446b/ruff-0.6.2-py3-none-linux_armv6l.whl", hash = "sha256:5c8cbc6252deb3ea840ad6a20b0f8583caab0c5ef4f9cca21adc5a92b8f79f3c", size = 9695871 },
{ url = "https://files.pythonhosted.org/packages/ae/c3/8aac62ac4638c14a740ee76a755a925f2d0d04580ab790a9887accb729f6/ruff-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:17002fe241e76544448a8e1e6118abecbe8cd10cf68fde635dad480dba594570", size = 9459354 },
{ url = "https://files.pythonhosted.org/packages/2f/cf/77fbd8d4617b9b9c503f9bffb8552c4e3ea1a58dc36975e7a9104ffb0f85/ruff-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3dbeac76ed13456f8158b8f4fe087bf87882e645c8e8b606dd17b0b66c2c1158", size = 9163871 },
{ url = "https://files.pythonhosted.org/packages/05/1c/765192bab32b79efbb498b06f0b9dcb3629112b53b8777ae1d19b8209e09/ruff-0.6.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:094600ee88cda325988d3f54e3588c46de5c18dae09d683ace278b11f9d4d534", size = 10096250 },
{ url = "https://files.pythonhosted.org/packages/08/d0/86f3cb0f6934c99f759c232984a5204d67a26745cad2d9edff6248adf7d2/ruff-0.6.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:316d418fe258c036ba05fbf7dfc1f7d3d4096db63431546163b472285668132b", size = 9475376 },
{ url = "https://files.pythonhosted.org/packages/cd/cc/4c8d0e225b559a3fae6092ec310d7150d3b02b4669e9223f783ef64d82c0/ruff-0.6.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d72b8b3abf8a2d51b7b9944a41307d2f442558ccb3859bbd87e6ae9be1694a5d", size = 10295634 },
{ url = "https://files.pythonhosted.org/packages/db/96/d2699cfb1bb5a01c68122af43454c76c31331e1c8a9bd97d653d7c82524b/ruff-0.6.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2aed7e243be68487aa8982e91c6e260982d00da3f38955873aecd5a9204b1d66", size = 11024941 },
{ url = "https://files.pythonhosted.org/packages/8b/a9/6ecd66af8929e0f2a1ed308a4137f3521789f28f0eb97d32c2ca3aa7000c/ruff-0.6.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d371f7fc9cec83497fe7cf5eaf5b76e22a8efce463de5f775a1826197feb9df8", size = 10606894 },
{ url = "https://files.pythonhosted.org/packages/e4/73/2ee4cd19f44992fedac1cc6db9e3d825966072f6dcbd4032f21cbd063170/ruff-0.6.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8f310d63af08f583363dfb844ba8f9417b558199c58a5999215082036d795a1", size = 11552886 },
{ url = "https://files.pythonhosted.org/packages/60/4c/c0f1cd35ce4a93c54a6bb1ee6934a3a205fa02198dd076678193853ceea1/ruff-0.6.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db6880c53c56addb8638fe444818183385ec85eeada1d48fc5abe045301b2f1", size = 10264945 },
{ url = "https://files.pythonhosted.org/packages/c4/89/e45c9359b9cdd4245512ea2b9f2bb128a997feaa5f726fc9e8c7a66afadf/ruff-0.6.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1175d39faadd9a50718f478d23bfc1d4da5743f1ab56af81a2b6caf0a2394f23", size = 10100007 },
{ url = "https://files.pythonhosted.org/packages/06/74/0bd4e0a7ed5f6908df87892f9bf60a2356c0fd74102d8097298bd9b4f346/ruff-0.6.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939f9c86d51635fe486585389f54582f0d65b8238e08c327c1534844b3bb9a", size = 9559267 },
{ url = "https://files.pythonhosted.org/packages/54/03/3dc6dc9419f276f05805bf888c279e3e0b631284abd548d9e87cebb93aec/ruff-0.6.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d0d62ca91219f906caf9b187dea50d17353f15ec9bb15aae4a606cd697b49b4c", size = 9905304 },
{ url = "https://files.pythonhosted.org/packages/5c/5b/d6a72a6a6bbf097c09de468326ef5fa1c9e7aa5e6e45979bc0d984b0dbe7/ruff-0.6.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7438a7288f9d67ed3c8ce4d059e67f7ed65e9fe3aa2ab6f5b4b3610e57e3cb56", size = 10341480 },
{ url = "https://files.pythonhosted.org/packages/79/a9/0f2f21fe15ba537c46598f96aa9ae4a3d4b9ec64926664617ca6a8c772f4/ruff-0.6.2-py3-none-win32.whl", hash = "sha256:279d5f7d86696df5f9549b56b9b6a7f6c72961b619022b5b7999b15db392a4da", size = 7961901 },
{ url = "https://files.pythonhosted.org/packages/b0/80/fff12ffe11853d9f4ea3e5221e6dd2e93640a161c05c9579833e09ad40a7/ruff-0.6.2-py3-none-win_amd64.whl", hash = "sha256:d9f3469c7dd43cd22eb1c3fc16926fb8258d50cb1b216658a07be95dd117b0f2", size = 8783320 },
{ url = "https://files.pythonhosted.org/packages/56/91/577cdd64cce5e74d3f8b5ecb93f29566def569c741eb008aed4f331ef821/ruff-0.6.2-py3-none-win_arm64.whl", hash = "sha256:f28fcd2cd0e02bdf739297516d5643a945cc7caf09bd9bcb4d932540a5ea4fa9", size = 8225886 },
]
[[package]]
name = "uv-project"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "ruff" },
]
[package.metadata]
requires-dist = [{ name = "ruff" }]

View File

@@ -1 +0,0 @@
print("Hello world")

View File

@@ -1 +0,0 @@
ruff>=0.6.2

View File

@@ -1,60 +1,38 @@
name: "astral-sh/setup-uv"
description:
"Set up your GitHub Actions workflow with a specific version of uv."
author: "astral-sh"
name: 'Python setup uv'
description: 'Set up your GitHub Actions workflow with a specific version of uv'
author: 'eifinger'
inputs:
version:
description: "The version of uv to install"
default: "latest"
python-version:
description: "The version of Python to set UV_PYTHON to"
required: false
version:
description: 'The version of uv to install'
default: '0.3.3'
checksum:
description: "The checksum of the uv version to install"
description: 'The checksum of the uv version to install'
required: false
github-token:
description:
"Used to increase the rate limit when retrieving versions and downloading uv."
description: 'Used to increase the rate limit when retrieving versions and downloading uv.'
required: false
default: ${{ github.token }}
enable-cache:
description: "Enable caching of the uv cache"
default: "auto"
description: 'Enable caching of the uv cache'
default: 'false'
cache-dependency-glob:
description:
"Glob pattern to match files relative to the repository root to control
the cache."
default: |
**/uv.lock
**/requirements*.txt
description: 'Glob pattern to match files relative to the repository root to control the cache. e.g. "uv.lock"'
required: false
cache-suffix:
description: "Suffix for the cache key"
description: 'Suffix for the cache key'
required: false
cache-local-path:
description: "Local path to store the cache."
default: ""
prune-cache:
description: "Prune cache before saving."
default: "true"
ignore-nothing-to-cache:
description: "Ignore when nothing is found to cache."
default: "false"
tool-dir:
description: "Custom path to set UV_TOOL_DIR to."
required: false
tool-bin-dir:
description: "Custom path to set UV_TOOL_BIN_DIR to."
required: false
description: 'Local path to store the cache.'
default: '/tmp/setup-uv-cache'
outputs:
uv-version:
description: "The installed uv version. Useful when using latest."
cache-hit:
description: "A boolean value to indicate a cache entry was found"
runs:
using: "node20"
main: "dist/setup/index.js"
post: "dist/save-cache/index.js"
using: 'node20'
main: 'dist/setup/index.js'
post: 'dist/save-cache/index.js'
post-if: success()
branding:
icon: "package"
color: "black"
icon: 'package'
color: 'blue'

View File

@@ -1,31 +0,0 @@
{
"$schema": "https://biomejs.dev/schemas/1.9.2/schema.json",
"vcs": {
"enabled": true,
"clientKind": "git",
"useIgnoreFile": false
},
"files": {
"ignoreUnknown": false,
"ignore": ["dist", "lib", "node_modules"]
},
"formatter": {
"enabled": true,
"indentStyle": "space"
},
"organizeImports": {
"enabled": true
},
"linter": {
"enabled": true,
"rules": {
"recommended": true
}
},
"javascript": {
"formatter": {
"quoteStyle": "double",
"trailingCommas": "all"
}
}
}

13578
dist/save-cache/index.js generated vendored

File diff suppressed because one or more lines are too long

1
dist/save-cache/index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

1
dist/save-cache/sourcemap-register.js generated vendored Normal file

File diff suppressed because one or more lines are too long

453
dist/setup/37.index.js generated vendored Normal file
View File

@@ -0,0 +1,453 @@
"use strict";
exports.id = 37;
exports.ids = [37];
exports.modules = {
/***/ 4037:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
/* harmony export */ });
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
let s = 0;
const S = {
START_BOUNDARY: s++,
HEADER_FIELD_START: s++,
HEADER_FIELD: s++,
HEADER_VALUE_START: s++,
HEADER_VALUE: s++,
HEADER_VALUE_ALMOST_DONE: s++,
HEADERS_ALMOST_DONE: s++,
PART_DATA_START: s++,
PART_DATA: s++,
END: s++
};
let f = 1;
const F = {
PART_BOUNDARY: f,
LAST_BOUNDARY: f *= 2
};
const LF = 10;
const CR = 13;
const SPACE = 32;
const HYPHEN = 45;
const COLON = 58;
const A = 97;
const Z = 122;
const lower = c => c | 0x20;
const noop = () => {};
class MultipartParser {
/**
* @param {string} boundary
*/
constructor(boundary) {
this.index = 0;
this.flags = 0;
this.onHeaderEnd = noop;
this.onHeaderField = noop;
this.onHeadersEnd = noop;
this.onHeaderValue = noop;
this.onPartBegin = noop;
this.onPartData = noop;
this.onPartEnd = noop;
this.boundaryChars = {};
boundary = '\r\n--' + boundary;
const ui8a = new Uint8Array(boundary.length);
for (let i = 0; i < boundary.length; i++) {
ui8a[i] = boundary.charCodeAt(i);
this.boundaryChars[ui8a[i]] = true;
}
this.boundary = ui8a;
this.lookbehind = new Uint8Array(this.boundary.length + 8);
this.state = S.START_BOUNDARY;
}
/**
* @param {Uint8Array} data
*/
write(data) {
let i = 0;
const length_ = data.length;
let previousIndex = this.index;
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
const boundaryLength = this.boundary.length;
const boundaryEnd = boundaryLength - 1;
const bufferLength = data.length;
let c;
let cl;
const mark = name => {
this[name + 'Mark'] = i;
};
const clear = name => {
delete this[name + 'Mark'];
};
const callback = (callbackSymbol, start, end, ui8a) => {
if (start === undefined || start !== end) {
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
}
};
const dataCallback = (name, clear) => {
const markSymbol = name + 'Mark';
if (!(markSymbol in this)) {
return;
}
if (clear) {
callback(name, this[markSymbol], i, data);
delete this[markSymbol];
} else {
callback(name, this[markSymbol], data.length, data);
this[markSymbol] = 0;
}
};
for (i = 0; i < length_; i++) {
c = data[i];
switch (state) {
case S.START_BOUNDARY:
if (index === boundary.length - 2) {
if (c === HYPHEN) {
flags |= F.LAST_BOUNDARY;
} else if (c !== CR) {
return;
}
index++;
break;
} else if (index - 1 === boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
state = S.END;
flags = 0;
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
index = 0;
callback('onPartBegin');
state = S.HEADER_FIELD_START;
} else {
return;
}
break;
}
if (c !== boundary[index + 2]) {
index = -2;
}
if (c === boundary[index + 2]) {
index++;
}
break;
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD;
mark('onHeaderField');
index = 0;
// falls through
case S.HEADER_FIELD:
if (c === CR) {
clear('onHeaderField');
state = S.HEADERS_ALMOST_DONE;
break;
}
index++;
if (c === HYPHEN) {
break;
}
if (c === COLON) {
if (index === 1) {
// empty header field
return;
}
dataCallback('onHeaderField', true);
state = S.HEADER_VALUE_START;
break;
}
cl = lower(c);
if (cl < A || cl > Z) {
return;
}
break;
case S.HEADER_VALUE_START:
if (c === SPACE) {
break;
}
mark('onHeaderValue');
state = S.HEADER_VALUE;
// falls through
case S.HEADER_VALUE:
if (c === CR) {
dataCallback('onHeaderValue', true);
callback('onHeaderEnd');
state = S.HEADER_VALUE_ALMOST_DONE;
}
break;
case S.HEADER_VALUE_ALMOST_DONE:
if (c !== LF) {
return;
}
state = S.HEADER_FIELD_START;
break;
case S.HEADERS_ALMOST_DONE:
if (c !== LF) {
return;
}
callback('onHeadersEnd');
state = S.PART_DATA_START;
break;
case S.PART_DATA_START:
state = S.PART_DATA;
mark('onPartData');
// falls through
case S.PART_DATA:
previousIndex = index;
if (index === 0) {
// boyer-moore derrived algorithm to safely skip non-boundary data
i += boundaryEnd;
while (i < bufferLength && !(data[i] in boundaryChars)) {
i += boundaryLength;
}
i -= boundaryEnd;
c = data[i];
}
if (index < boundary.length) {
if (boundary[index] === c) {
if (index === 0) {
dataCallback('onPartData', true);
}
index++;
} else {
index = 0;
}
} else if (index === boundary.length) {
index++;
if (c === CR) {
// CR = part boundary
flags |= F.PART_BOUNDARY;
} else if (c === HYPHEN) {
// HYPHEN = end boundary
flags |= F.LAST_BOUNDARY;
} else {
index = 0;
}
} else if (index - 1 === boundary.length) {
if (flags & F.PART_BOUNDARY) {
index = 0;
if (c === LF) {
// unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY;
callback('onPartEnd');
callback('onPartBegin');
state = S.HEADER_FIELD_START;
break;
}
} else if (flags & F.LAST_BOUNDARY) {
if (c === HYPHEN) {
callback('onPartEnd');
state = S.END;
flags = 0;
} else {
index = 0;
}
} else {
index = 0;
}
}
if (index > 0) {
// when matching a possible boundary, keep a lookbehind reference
// in case it turns out to be a false lead
lookbehind[index - 1] = c;
} else if (previousIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
callback('onPartData', 0, previousIndex, _lookbehind);
previousIndex = 0;
mark('onPartData');
// reconsider the current character even so it interrupted the sequence
// it could be the beginning of a new sequence
i--;
}
break;
case S.END:
break;
default:
throw new Error(`Unexpected state entered: ${state}`);
}
}
dataCallback('onHeaderField');
dataCallback('onHeaderValue');
dataCallback('onPartData');
// Update properties for the next call
this.index = index;
this.state = state;
this.flags = flags;
}
end() {
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
this.onPartEnd();
} else if (this.state !== S.END) {
throw new Error('MultipartParser.end(): stream ended unexpectedly');
}
}
}
function _fileName(headerValue) {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
if (!m) {
return;
}
const match = m[2] || m[3] || '';
let filename = match.slice(match.lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
return String.fromCharCode(code);
});
return filename;
}
async function toFormData(Body, ct) {
if (!/multipart/i.test(ct)) {
throw new TypeError('Failed to fetch');
}
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (!m) {
throw new TypeError('no or bad content-type header, no multipart boundary');
}
const parser = new MultipartParser(m[1] || m[2]);
let headerField;
let headerValue;
let entryValue;
let entryName;
let contentType;
let filename;
const entryChunks = [];
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
const onPartData = ui8a => {
entryValue += decoder.decode(ui8a, {stream: true});
};
const appendToFile = ui8a => {
entryChunks.push(ui8a);
};
const appendFileToFormData = () => {
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
formData.append(entryName, file);
};
const appendEntryToFormData = () => {
formData.append(entryName, entryValue);
};
const decoder = new TextDecoder('utf-8');
decoder.decode();
parser.onPartBegin = function () {
parser.onPartData = onPartData;
parser.onPartEnd = appendEntryToFormData;
headerField = '';
headerValue = '';
entryValue = '';
entryName = '';
contentType = '';
filename = null;
entryChunks.length = 0;
};
parser.onHeaderField = function (ui8a) {
headerField += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderValue = function (ui8a) {
headerValue += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderEnd = function () {
headerValue += decoder.decode();
headerField = headerField.toLowerCase();
if (headerField === 'content-disposition') {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
if (m) {
entryName = m[2] || m[3] || '';
}
filename = _fileName(headerValue);
if (filename) {
parser.onPartData = appendToFile;
parser.onPartEnd = appendFileToFormData;
}
} else if (headerField === 'content-type') {
contentType = headerValue;
}
headerValue = '';
headerField = '';
};
for await (const chunk of Body) {
parser.write(chunk);
}
parser.end();
return formData;
}
/***/ })
};
;
//# sourceMappingURL=37.index.js.map

1
dist/setup/37.index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

21730
dist/setup/index.js generated vendored

File diff suppressed because one or more lines are too long

1
dist/setup/index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

1056
dist/setup/licenses.txt generated vendored Normal file

File diff suppressed because it is too large Load Diff

1
dist/setup/sourcemap-register.js generated vendored Normal file

File diff suppressed because one or more lines are too long

35031
dist/update-checksums/index.js generated vendored Normal file

File diff suppressed because one or more lines are too long

1
dist/update-checksums/index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

1
dist/update-checksums/sourcemap-register.js generated vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
dist/update-default-version/index.js.map generated vendored Normal file

File diff suppressed because one or more lines are too long

1
dist/update-default-version/sourcemap-register.js generated vendored Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 36 KiB

View File

@@ -0,0 +1,15 @@
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" width="545" height="153" viewBox="0 0 545 153">
<defs>
<style>
.cls-1{fill:#009ee3;}.cls-1,.cls-2,.cls-3{fill-rule:evenodd;}.cls-2{fill:#113984;}.cls-3{fill:#172c70;}</style>
</defs>
<title>paypal-seeklogo.com</title>
<path transform="scale(1, 1)" d="M0 24.48C0 10.9601 10.9601 0 24.48 0H520.2C533.72 0 544.68 10.9601 544.68 24.48V128.52C544.68 142.04 533.72 153 520.2 153H24.48C10.9601 153 0 142.04 0 128.52V24.48Z" fill="#ebf2ff"/>
<g transform="scale(0.8, 0.8) translate(45, 25)">
<path class="cls-1" d="M192.95,386.87h38.74c20.8,0,28.63,10.53,27.42,26-2,25.54-17.44,39.67-37.92,39.67H210.85c-2.81,0-4.7,1.86-5.46,6.9L201,488.74c-0.29,1.9-1.29,3-2.79,3.15H173.87c-2.29,0-3.1-1.75-2.5-5.54l14.84-93.93C186.79,388.66,188.85,386.87,192.95,386.87Z" transform="translate(-143.48 -354.54)"/>
<path class="cls-2" d="M361.14,385.13c13.07,0,25.13,7.09,23.48,24.76-2,21-13.25,32.62-31,32.67H338.11c-2.23,0-3.31,1.82-3.89,5.55l-3,19.07c-0.45,2.88-1.93,4.3-4.11,4.3H312.68c-2.3,0-3.1-1.47-2.59-4.76L322,390.29c0.59-3.76,2-5.16,4.57-5.16h34.54Zm-23.5,40.92h11.75c7.35-.28,12.23-5.37,12.72-14.55,0.3-5.67-3.53-9.73-9.62-9.7l-11.06.05-3.79,24.2h0Zm86.21,39.58c1.32-1.2,2.66-1.82,2.47-.34l-0.47,3.54c-0.24,1.85.49,2.83,2.21,2.83h12.82c2.16,0,3.21-.87,3.74-4.21l7.9-49.58c0.4-2.49-.21-3.71-2.1-3.71H436.32c-1.27,0-1.89.71-2.22,2.65l-0.52,3.05c-0.27,1.59-1,1.87-1.68.27-2.39-5.66-8.49-8.2-17-8-19.77.41-33.1,15.42-34.53,34.66-1.1,14.88,9.56,26.57,23.62,26.57,10.2,0,14.76-3,19.9-7.7h0ZM413.11,458c-8.51,0-14.44-6.79-13.21-15.11s9.19-15.11,17.7-15.11,14.44,6.79,13.21,15.11S421.63,458,413.11,458h0Zm64.5-44h-13c-2.68,0-3.77,2-2.92,4.46l16.14,47.26L462,488.21c-1.33,1.88-.3,3.59,1.57,3.59h14.61a4.47,4.47,0,0,0,4.34-2.13l49.64-71.2c1.53-2.19.81-4.49-1.7-4.49H516.63c-2.37,0-3.32.94-4.68,2.91l-20.7,30L482,416.82C481.46,415,480.11,414,477.62,414Z" transform="translate(-143.48 -354.54)"/>
<path class="cls-1" d="M583.8,385.13c13.07,0,25.13,7.09,23.48,24.76-2,21-13.25,32.62-31,32.67H560.78c-2.23,0-3.31,1.82-3.89,5.55l-3,19.07c-0.45,2.88-1.93,4.3-4.11,4.3H535.35c-2.3,0-3.1-1.47-2.59-4.76l11.93-76.45c0.59-3.76,2-5.16,4.57-5.16H583.8Zm-23.5,40.92h11.75c7.35-.28,12.23-5.37,12.72-14.55,0.3-5.67-3.53-9.73-9.62-9.7l-11.06.05-3.79,24.2h0Zm86.21,39.58c1.32-1.2,2.66-1.82,2.47-.34l-0.47,3.54c-0.24,1.85.49,2.83,2.21,2.83h12.82c2.16,0,3.21-.87,3.74-4.21l7.9-49.58c0.4-2.49-.21-3.71-2.1-3.71H659c-1.27,0-1.89.71-2.22,2.65l-0.52,3.05c-0.27,1.59-1,1.87-1.68.27-2.39-5.66-8.49-8.2-17-8-19.77.41-33.1,15.42-34.53,34.66-1.1,14.88,9.56,26.57,23.62,26.57,10.2,0,14.76-3,19.9-7.7h0ZM635.78,458c-8.51,0-14.44-6.79-13.21-15.11s9.19-15.11,17.7-15.11,14.44,6.79,13.21,15.11S644.29,458,635.78,458h0Zm59.13,13.74h-14.8a1.75,1.75,0,0,1-1.81-2l13-82.36a2.55,2.55,0,0,1,2.46-2h14.8a1.75,1.75,0,0,1,1.81,2l-13,82.36A2.55,2.55,0,0,1,694.91,471.76Z" transform="translate(-143.48 -354.54)"/>
<path class="cls-2" d="M168.72,354.54h38.78c10.92,0,23.88.35,32.54,8,5.79,5.11,8.83,13.24,8.13,22-2.38,29.61-20.09,46.2-43.85,46.2H185.2c-3.26,0-5.41,2.16-6.33,8l-5.34,34c-0.35,2.2-1.3,3.5-3,3.66H146.6c-2.65,0-3.59-2-2.9-6.42L160.9,361C161.59,356.62,164,354.54,168.72,354.54Z" transform="translate(-143.48 -354.54)"/>
<path class="cls-3" d="M179.43,435.29l6.77-42.87c0.59-3.76,2.65-5.56,6.75-5.56h38.74c6.41,0,11.6,1,15.66,2.85-3.89,26.36-20.94,41-43.26,41H185C182.44,430.72,180.56,432,179.43,435.29Z" transform="translate(-143.48 -354.54)"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

@@ -1,9 +1,9 @@
module.exports = {
clearMocks: true,
moduleFileExtensions: ["js", "ts"],
testMatch: ["**/*.test.ts"],
moduleFileExtensions: ['js', 'ts'],
testMatch: ['**/*.test.ts'],
transform: {
"^.+\\.ts$": "ts-jest",
'^.+\\.ts$': 'ts-jest'
},
verbose: true,
};
verbose: true
}

6185
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,40 +6,52 @@
"main": "dist/index.js",
"scripts": {
"build": "tsc",
"format": "biome format --fix",
"format-check": "biome format",
"lint": "biome lint --fix",
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-known-checksums src/update-known-checksums.ts",
"format": "prettier --write '**/*.ts'",
"format-check": "prettier --check '**/*.ts'",
"lint": "eslint src/**/*.ts --fix",
"package": "ncc build -o dist/setup src/setup-uv.ts && ncc build -o dist/save-cache src/save-cache.ts && ncc build -o dist/update-default-version src/update-default-version.ts",
"test": "jest",
"act": "act pull_request -W .github/workflows/test.yml --container-architecture linux/amd64 -s GITHUB_TOKEN=\"$(gh auth token)\"",
"update-known-checksums": "RUNNER_TEMP=known_checksums node dist/update-known-checksums/index.js src/download/checksum/known-checksums.ts \"$(gh auth token)\"",
"update-default-version": "node dist/update-default-version/index.js src/download/checksum/known-checksums.ts action.yml \"$(gh auth token)\"",
"all": "npm run build && npm run format && npm run lint && npm run package && npm test"
},
"repository": {
"type": "git",
"url": "git+https://github.com/astral-sh/setup-uv.git"
"url": "git+https://github.com/eifinger/setup-uv.git"
},
"keywords": ["actions", "python", "setup", "uv"],
"keywords": [
"actions",
"python",
"setup",
"uv"
],
"author": "@eifinger",
"license": "MIT",
"dependencies": {
"@actions/cache": "^4.0.0",
"@actions/core": "^1.11.1",
"@actions/cache": "^3.2.4",
"@actions/core": "^1.10.1",
"@actions/exec": "^1.1.1",
"@actions/github": "^6.0.0",
"@actions/glob": "^0.5.0",
"@actions/glob": "^0.4.0",
"@actions/io": "^1.1.3",
"@actions/tool-cache": "^2.0.1",
"@octokit/rest": "^21.0.2"
},
"devDependencies": {
"@biomejs/biome": "1.9.4",
"@types/node": "^22.10.2",
"@types/node": "^20.14.9",
"@types/semver": "^7.5.8",
"@vercel/ncc": "^0.38.3",
"@typescript-eslint/eslint-plugin": "^7.15.0",
"@typescript-eslint/parser": "^7.15.0",
"@vercel/ncc": "^0.38.1",
"eslint": "^8.57.0",
"eslint-plugin-github": "^5.0.1",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-jest": "^28.6.0",
"eslint-plugin-prettier": "^5.2.1",
"jest": "^29.7.0",
"js-yaml": "^4.1.0",
"prettier": "^3.3.3",
"ts-jest": "^29.2.5",
"typescript": "^5.7.2"
"typescript": "^5.4.5"
}
}

View File

@@ -1,105 +1,65 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import {
cacheDependencyGlob,
cacheLocalPath,
cacheSuffix,
pythonVersion as pythonVersionInput,
} from "../utils/inputs";
import { getArch, getPlatform } from "../utils/platforms";
import { hashFiles } from "../hash/hash-files";
import * as exec from "@actions/exec";
import * as cache from '@actions/cache'
import * as glob from '@actions/glob'
import * as core from '@actions/core'
import path from 'path'
import {cacheDependencyGlob, cacheLocalPath, cacheSuffix} from '../utils/inputs'
import {getArch, getPlatform} from '../utils/platforms'
export const STATE_CACHE_KEY = "cache-key";
export const STATE_CACHE_MATCHED_KEY = "cache-matched-key";
const CACHE_VERSION = "1";
export const STATE_CACHE_KEY = 'cache-key'
export const STATE_CACHE_MATCHED_KEY = 'cache-matched-key'
const CACHE_VERSION = '1'
export async function restoreCache(version: string): Promise<void> {
const cacheKey = await computeKeys(version);
const cacheKey = await computeKeys(version)
let matchedKey: string | undefined;
let matchedKey: string | undefined
core.info(
`Trying to restore uv cache from GitHub Actions cache with key: ${cacheKey}`,
);
`Trying to restore uv cache from GitHub Actions cache with key: ${cacheKey}`
)
try {
matchedKey = await cache.restoreCache([cacheLocalPath], cacheKey);
matchedKey = await cache.restoreCache([cacheLocalPath], cacheKey)
} catch (err) {
const message = (err as Error).message;
core.warning(message);
core.setOutput("cache-hit", false);
return;
const message = (err as Error).message
core.warning(message)
core.setOutput('cache-hit', false)
return
}
core.saveState(STATE_CACHE_KEY, cacheKey);
core.saveState(STATE_CACHE_KEY, cacheKey)
handleMatchResult(matchedKey, cacheKey);
handleMatchResult(matchedKey, cacheKey)
}
async function computeKeys(version: string): Promise<string> {
let cacheDependencyPathHash = "-";
if (cacheDependencyGlob !== "") {
core.info(
`Searching files using cache dependency glob: ${cacheDependencyGlob.split("\n").join(",")}`,
);
cacheDependencyPathHash += await hashFiles(cacheDependencyGlob, true);
if (cacheDependencyPathHash === "-") {
core.warning(
`No file matched to [${cacheDependencyGlob.split("\n").join(",")}]. The cache will never get invalidated. Make sure you have checked out the target repository and configured the cache-dependency-glob input correctly.`,
);
let cacheDependencyPathHash = '-'
if (cacheDependencyGlob !== '') {
const fullCacheDependencyGlob = `${process.env['GITHUB_WORKSPACE']}${path.sep}${cacheDependencyGlob}`
cacheDependencyPathHash += await glob.hashFiles(fullCacheDependencyGlob)
if (cacheDependencyPathHash === '-') {
throw new Error(
`No file in ${process.cwd()} matched to [${cacheDependencyGlob}], make sure you have checked out the target repository`
)
}
} else {
cacheDependencyPathHash += 'no-dependency-glob'
}
if (cacheDependencyPathHash === "-") {
cacheDependencyPathHash = "-no-dependency-glob";
}
const suffix = cacheSuffix ? `-${cacheSuffix}` : "";
const pythonVersion = await getPythonVersion();
return `setup-uv-${CACHE_VERSION}-${getArch()}-${getPlatform()}-${version}-${pythonVersion}${cacheDependencyPathHash}${suffix}`;
}
async function getPythonVersion(): Promise<string> {
if (pythonVersionInput !== "") {
return pythonVersionInput;
}
let output = "";
const options: exec.ExecOptions = {
silent: !core.isDebug(),
listeners: {
stdout: (data: Buffer) => {
output += data.toString();
},
},
};
try {
const execArgs = ["python", "find"];
await exec.exec("uv", execArgs, options);
const pythonPath = output.trim();
output = "";
await exec.exec(pythonPath, ["--version"], options);
// output is like "Python 3.8.10"
return output.split(" ")[1].trim();
} catch (error) {
const err = error as Error;
core.debug(`Failed to get python version from uv. Error: ${err.message}`);
return "unknown";
}
const suffix = cacheSuffix ? `-${cacheSuffix}` : ''
return `setup-uv-${CACHE_VERSION}-${getArch()}-${getPlatform()}-${version}${cacheDependencyPathHash}${suffix}`
}
function handleMatchResult(
matchedKey: string | undefined,
primaryKey: string,
primaryKey: string
): void {
if (!matchedKey) {
core.info(`No GitHub Actions cache found for key: ${primaryKey}`);
core.setOutput("cache-hit", false);
return;
core.info(`No GitHub Actions cache found for key: ${primaryKey}`)
core.setOutput('cache-hit', false)
return
}
core.saveState(STATE_CACHE_MATCHED_KEY, matchedKey);
core.saveState(STATE_CACHE_MATCHED_KEY, matchedKey)
core.info(
`uv cache restored from GitHub Actions cache with key: ${matchedKey}`,
);
core.setOutput("cache-hit", true);
`uv cache restored from GitHub Actions cache with key: ${matchedKey}`
)
core.setOutput('cache-hit', true)
}

View File

@@ -1,57 +1,55 @@
import * as fs from "node:fs";
import * as crypto from "node:crypto";
import * as fs from 'fs'
import * as crypto from 'crypto'
import * as core from "@actions/core";
import { KNOWN_CHECKSUMS } from "./known-checksums";
import type { Architecture, Platform } from "../../utils/platforms";
import * as core from '@actions/core'
import {KNOWN_CHECKSUMS} from './known-checksums'
import {Architecture, Platform} from '../../utils/platforms'
export async function validateChecksum(
checkSum: string | undefined,
downloadPath: string,
arch: Architecture,
platform: Platform,
version: string,
version: string
): Promise<void> {
let isValid: boolean | undefined = undefined;
if (checkSum !== undefined && checkSum !== "") {
isValid = await validateFileCheckSum(downloadPath, checkSum);
let isValid = true
if (checkSum !== undefined && checkSum !== '') {
isValid = await validateFileCheckSum(downloadPath, checkSum)
} else {
core.debug("Checksum not provided. Checking known checksums.");
const key = `${arch}-${platform}-${version}`;
core.debug(`Checksum not provided. Checking known checksums.`)
const key = `${arch}-${platform}-${version}`
if (key in KNOWN_CHECKSUMS) {
const knownChecksum = KNOWN_CHECKSUMS[`${arch}-${platform}-${version}`];
core.debug(`Checking checksum for ${arch}-${platform}-${version}.`);
isValid = await validateFileCheckSum(downloadPath, knownChecksum);
const knownChecksum = KNOWN_CHECKSUMS[`${arch}-${platform}-${version}`]
core.debug(`Checking checksum for ${arch}-${platform}-${version}.`)
isValid = await validateFileCheckSum(downloadPath, knownChecksum)
} else {
core.debug(`No known checksum found for ${key}.`);
core.debug(`No known checksum found for ${key}.`)
}
}
if (isValid === false) {
throw new Error(`Checksum for ${downloadPath} did not match ${checkSum}.`);
}
if (isValid === true) {
core.debug(`Checksum for ${downloadPath} is valid.`);
if (!isValid) {
throw new Error(`Checksum for ${downloadPath} did not match ${checkSum}.`)
}
core.debug(`Checksum for ${downloadPath} is valid.`)
}
async function validateFileCheckSum(
filePath: string,
expected: string,
expected: string
): Promise<boolean> {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha256");
const stream = fs.createReadStream(filePath);
stream.on("error", (err) => reject(err));
stream.on("data", (chunk) => hash.update(chunk));
stream.on("end", () => {
const actual = hash.digest("hex");
resolve(actual === expected);
});
});
const hash = crypto.createHash('sha256')
const stream = fs.createReadStream(filePath)
stream.on('error', err => reject(err))
stream.on('data', chunk => hash.update(chunk))
stream.on('end', () => {
const actual = hash.digest('hex')
resolve(actual === expected)
})
})
}
export function isknownVersion(version: string): boolean {
const pattern = new RegExp(`^.*-.*-${version}$`);
return Object.keys(KNOWN_CHECKSUMS).some((key) => pattern.test(key));
const pattern = new RegExp(`^.*-.*-${version}$`)
return Object.keys(KNOWN_CHECKSUMS).some(key => pattern.test(key))
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,59 +1,39 @@
import { promises as fs } from "node:fs";
import * as tc from "@actions/tool-cache";
import { KNOWN_CHECKSUMS } from "./known-checksums";
import {promises as fs} from 'fs'
import * as tc from '@actions/tool-cache'
export async function updateChecksums(
filePath: string,
downloadUrls: string[],
downloadUrls: string[]
): Promise<void> {
await fs.rm(filePath);
await fs.rm(filePath)
await fs.appendFile(
filePath,
"// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: { [key: string]: string } = {\n",
);
let firstLine = true;
'// AUTOGENERATED_DO_NOT_EDIT\nexport const KNOWN_CHECKSUMS: {[key: string]: string} = {\n'
)
let firstLine = true
for (const downloadUrl of downloadUrls) {
const key = getKey(downloadUrl);
if (key === undefined) {
continue;
}
const checksum = await getOrDownloadChecksum(key, downloadUrl);
const content = await downloadAssetContent(downloadUrl)
const checksum = content.split(' ')[0].trim()
const key = getKey(downloadUrl)
if (!firstLine) {
await fs.appendFile(filePath, ",\n");
await fs.appendFile(filePath, ',\n')
}
await fs.appendFile(filePath, ` "${key}":\n "${checksum}"`);
firstLine = false;
await fs.appendFile(filePath, ` '${key}':\n '${checksum}'`)
firstLine = false
}
await fs.appendFile(filePath, ",\n};\n");
await fs.appendFile(filePath, '}\n')
}
function getKey(downloadUrl: string): string | undefined {
function getKey(downloadUrl: string): string {
// https://github.com/astral-sh/uv/releases/download/0.3.2/uv-aarch64-apple-darwin.tar.gz.sha256
const parts = downloadUrl.split("/");
const fileName = parts[parts.length - 1];
if (fileName.startsWith("source")) {
return undefined;
}
const name = fileName.split(".")[0].split("uv-")[1];
const version = parts[parts.length - 2];
return `${name}-${version}`;
}
async function getOrDownloadChecksum(
key: string,
downloadUrl: string,
): Promise<string> {
let checksum = "";
if (key in KNOWN_CHECKSUMS) {
checksum = KNOWN_CHECKSUMS[key];
} else {
const content = await downloadAssetContent(downloadUrl);
checksum = content.split(" ")[0].trim();
}
return checksum;
const parts = downloadUrl.split('/')
const fileName = parts[parts.length - 1]
const name = fileName.split('.')[0].split('uv-')[1]
const version = parts[parts.length - 2]
return `${name}-${version}`
}
async function downloadAssetContent(downloadUrl: string): Promise<string> {
const downloadPath = await tc.downloadTool(downloadUrl);
const content = await fs.readFile(downloadPath, "utf8");
return content;
const downloadPath = await tc.downloadTool(downloadUrl)
const content = await fs.readFile(downloadPath, 'utf8')
return content
}

View File

@@ -0,0 +1,66 @@
import * as core from '@actions/core'
import * as tc from '@actions/tool-cache'
import * as exec from '@actions/exec'
import * as path from 'path'
import {Architecture, Platform} from '../utils/platforms'
import {validateChecksum} from './checksum/checksum'
import {OWNER, REPO, TOOL_CACHE_NAME} from '../utils/utils'
export async function downloadLatest(
platform: Platform,
arch: Architecture,
checkSum: string | undefined,
githubToken: string | undefined
): Promise<{cachedToolDir: string; version: string}> {
const artifact = `uv-${arch}-${platform}`
let downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/latest/download/${artifact}`
if (platform === 'pc-windows-msvc') {
downloadUrl += '.zip'
} else {
downloadUrl += '.tar.gz'
}
core.info(`Downloading uv from "${downloadUrl}" ...`)
const downloadPath = await tc.downloadTool(
downloadUrl,
undefined,
githubToken
)
let uvExecutablePath: string
let uvDir: string
if (platform === 'pc-windows-msvc') {
uvDir = await tc.extractZip(downloadPath)
// On windows extracting the zip does not create an intermediate directory
uvExecutablePath = path.join(uvDir, 'uv.exe')
} else {
const extractedDir = await tc.extractTar(downloadPath)
uvDir = path.join(extractedDir, artifact)
uvExecutablePath = path.join(uvDir, 'uv')
}
const version = await getVersion(uvExecutablePath)
await validateChecksum(checkSum, downloadPath, arch, platform, version)
const cachedToolDir = await tc.cacheDir(uvDir, TOOL_CACHE_NAME, version, arch)
return {cachedToolDir, version}
}
async function getVersion(uvExecutablePath: string): Promise<string> {
// Parse the output of `uv --version` to get the version
// The output looks like
// uv 0.3.1 (be17d132a 2024-08-21)
const options: exec.ExecOptions = {
silent: !core.isDebug()
}
const execArgs = ['--version']
let output = ''
options.listeners = {
stdout: (data: Buffer) => {
output += data.toString()
}
}
await exec.exec(uvExecutablePath, execArgs, options)
const parts = output.split(' ')
return parts[1]
}

View File

@@ -1,30 +1,18 @@
import * as core from "@actions/core";
import * as tc from "@actions/tool-cache";
import * as path from "node:path";
import { promises as fs } from "node:fs";
import {
GITHUB_COM_API,
OWNER,
REPO,
TOOL_CACHE_NAME,
} from "../utils/constants";
import type { Architecture, Platform } from "../utils/platforms";
import { validateChecksum } from "./checksum/checksum";
import * as github from "@actions/github";
import * as core from '@actions/core'
import * as tc from '@actions/tool-cache'
import * as path from 'path'
import {OWNER, REPO, TOOL_CACHE_NAME} from '../utils/utils'
import {Architecture, Platform} from '../utils/platforms'
import {validateChecksum} from './checksum/checksum'
export function tryGetFromToolCache(
arch: Architecture,
version: string,
): { version: string; installedPath: string | undefined } {
core.debug(`Trying to get uv from tool cache for ${version}...`);
const cachedVersions = tc.findAllVersions(TOOL_CACHE_NAME, arch);
core.debug(`Cached versions: ${cachedVersions}`);
let resolvedVersion = tc.evaluateVersions(cachedVersions, version);
if (resolvedVersion === "") {
resolvedVersion = version;
}
const installedPath = tc.find(TOOL_CACHE_NAME, resolvedVersion, arch);
return { version: resolvedVersion, installedPath };
version: string
): string | undefined {
core.debug(`Trying to get uv from tool cache for ${version}...`)
const cachedVersions = tc.findAllVersions(TOOL_CACHE_NAME, arch)
core.debug(`Cached versions: ${cachedVersions}`)
return tc.find(TOOL_CACHE_NAME, version, arch)
}
export async function downloadVersion(
@@ -32,89 +20,32 @@ export async function downloadVersion(
arch: Architecture,
version: string,
checkSum: string | undefined,
githubToken: string,
): Promise<{ version: string; cachedToolDir: string }> {
const resolvedVersion = await resolveVersion(version, githubToken);
const artifact = `uv-${arch}-${platform}`;
let extension = ".tar.gz";
if (platform === "pc-windows-msvc") {
extension = ".zip";
githubToken: string | undefined
): Promise<string> {
const artifact = `uv-${arch}-${platform}`
let downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/download/${version}/${artifact}`
if (platform === 'pc-windows-msvc') {
downloadUrl += '.zip'
} else {
downloadUrl += '.tar.gz'
}
const downloadUrl = `https://github.com/${OWNER}/${REPO}/releases/download/${resolvedVersion}/${artifact}${extension}`;
core.info(`Downloading uv from "${downloadUrl}" ...`);
core.info(`Downloading uv from "${downloadUrl}" ...`)
const downloadPath = await tc.downloadTool(
downloadUrl,
undefined,
githubToken,
);
await validateChecksum(
checkSum,
downloadPath,
arch,
platform,
resolvedVersion,
);
githubToken
)
await validateChecksum(checkSum, downloadPath, arch, platform, version)
let uvDir: string;
if (platform === "pc-windows-msvc") {
const fullPathWithExtension = `${downloadPath}${extension}`;
await fs.copyFile(downloadPath, fullPathWithExtension);
uvDir = await tc.extractZip(fullPathWithExtension);
let uvDir: string
if (platform === 'pc-windows-msvc') {
uvDir = await tc.extractZip(downloadPath)
// On windows extracting the zip does not create an intermediate directory
} else {
const extractedDir = await tc.extractTar(downloadPath);
uvDir = path.join(extractedDir, artifact);
const extractedDir = await tc.extractTar(downloadPath)
uvDir = path.join(extractedDir, artifact)
}
const cachedToolDir = await tc.cacheDir(
uvDir,
TOOL_CACHE_NAME,
resolvedVersion,
arch,
);
return { version: resolvedVersion, cachedToolDir };
}
export async function resolveVersion(
versionInput: string,
githubToken: string,
): Promise<string> {
const version =
versionInput === "latest"
? await getLatestVersion(githubToken)
: versionInput;
if (tc.isExplicitVersion(version)) {
core.debug(`Version ${version} is an explicit version.`);
return version;
}
const availableVersions = await getAvailableVersions(githubToken);
const resolvedVersion = tc.evaluateVersions(availableVersions, version);
if (resolvedVersion === "") {
throw new Error(`No version found for ${version}`);
}
return resolvedVersion;
}
async function getAvailableVersions(githubToken: string): Promise<string[]> {
const octokit = github.getOctokit(githubToken, { baseUrl: GITHUB_COM_API });
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
owner: OWNER,
repo: REPO,
});
return response.map((release) => release.tag_name);
}
async function getLatestVersion(githubToken: string) {
const octokit = github.getOctokit(githubToken, { baseUrl: GITHUB_COM_API });
const { data: latestRelease } = await octokit.rest.repos.getLatestRelease({
owner: OWNER,
repo: REPO,
});
if (!latestRelease) {
throw new Error("Could not determine latest release.");
}
return latestRelease.tag_name;
return await tc.cacheDir(uvDir, TOOL_CACHE_NAME, version, arch)
}

View File

@@ -1,48 +0,0 @@
import * as crypto from "node:crypto";
import * as core from "@actions/core";
import * as fs from "node:fs";
import * as stream from "node:stream";
import * as util from "node:util";
import { create } from "@actions/glob";
/**
* Hashes files matching the given glob pattern.
*
* Copied from https://github.com/actions/toolkit/blob/20ed2908f19538e9dfb66d8083f1171c0a50a87c/packages/glob/src/internal-hash-files.ts#L9-L49
* But supports hashing files outside the GITHUB_WORKSPACE.
* @param pattern The glob pattern to match files.
* @param verbose Whether to log the files being hashed.
*/
export async function hashFiles(
pattern: string,
verbose = false,
): Promise<string> {
const globber = await create(pattern);
let hasMatch = false;
const writeDelegate = verbose ? core.info : core.debug;
const result = crypto.createHash("sha256");
let count = 0;
for await (const file of globber.globGenerator()) {
writeDelegate(file);
if (fs.statSync(file).isDirectory()) {
writeDelegate(`Skip directory '${file}'.`);
continue;
}
const hash = crypto.createHash("sha256");
const pipeline = util.promisify(stream.pipeline);
await pipeline(fs.createReadStream(file), hash);
result.write(hash.digest());
count++;
if (!hasMatch) {
hasMatch = true;
}
}
result.end();
if (hasMatch) {
writeDelegate(`Found ${count} files to hash.`);
return result.digest("hex");
}
writeDelegate("No matches found for glob");
return "";
}

View File

@@ -1,84 +1,49 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as fs from "node:fs";
import {
STATE_CACHE_MATCHED_KEY,
STATE_CACHE_KEY,
} from "./cache/restore-cache";
import {
cacheLocalPath,
enableCache,
ignoreNothingToCache,
pruneCache as shouldPruneCache,
} from "./utils/inputs";
import * as cache from '@actions/cache'
import * as core from '@actions/core'
import * as exec from '@actions/exec'
import {STATE_CACHE_MATCHED_KEY, STATE_CACHE_KEY} from './cache/restore-cache'
import {cacheLocalPath, enableCache} from './utils/inputs'
export async function run(): Promise<void> {
try {
if (enableCache) {
await saveCache();
// node will stay alive if any promises are not resolved,
// which is a possibility if HTTP requests are dangling
// due to retries or timeouts. We know that if we got here
// that all promises that we care about have successfully
// resolved, so simply exit with success.
process.exit(0);
await saveCache()
}
} catch (error) {
const err = error as Error;
core.setFailed(err.message);
const err = error as Error
core.setFailed(err.message)
}
process.exit(0)
}
async function saveCache(): Promise<void> {
const cacheKey = core.getState(STATE_CACHE_KEY);
const matchedKey = core.getState(STATE_CACHE_MATCHED_KEY);
const cacheKey = core.getState(STATE_CACHE_KEY)
const matchedKey = core.getState(STATE_CACHE_MATCHED_KEY)
if (!cacheKey) {
core.warning("Error retrieving cache key from state.");
return;
}
if (matchedKey === cacheKey) {
core.info(`Cache hit occurred on key ${cacheKey}, not saving cache.`);
return;
core.warning('Error retrieving cache key from state.')
return
} else if (matchedKey === cacheKey) {
core.info(`Cache hit occurred on key ${cacheKey}, not saving cache.`)
return
}
if (shouldPruneCache) {
await pruneCache();
}
await pruneCache()
core.info(`Saving cache path: ${cacheLocalPath}`);
if (!fs.existsSync(cacheLocalPath) && !ignoreNothingToCache) {
throw new Error(
`Cache path ${cacheLocalPath} does not exist on disk. This likely indicates that there are no dependencies to cache. Consider disabling the cache input if it is not needed.`,
);
}
try {
await cache.saveCache([cacheLocalPath], cacheKey);
core.info(`cache saved with the key: ${cacheKey}`);
} catch (e) {
if (
e instanceof Error &&
e.message ===
"Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved."
) {
core.info(
"No cacheable paths were found. Ignoring because ignore-nothing-to-save is enabled.",
);
} else {
throw e;
}
}
core.info(`Saving cache path: ${cacheLocalPath}`)
await cache.saveCache([cacheLocalPath], cacheKey)
core.info(`cache saved with the key: ${cacheKey}`)
}
async function pruneCache(): Promise<void> {
const options: exec.ExecOptions = {
silent: !core.isDebug(),
};
const execArgs = ["cache", "prune", "--ci"];
silent: !core.isDebug()
}
const execArgs = ['cache', 'prune', '--ci']
core.info("Pruning cache...");
await exec.exec("uv", execArgs, options);
core.info('Pruning cache...')
await exec.exec('uv', execArgs, options)
}
run();
run()

View File

@@ -1,66 +1,51 @@
import * as core from "@actions/core";
import * as path from "node:path";
import {
downloadVersion,
tryGetFromToolCache,
resolveVersion,
} from "./download/download-version";
import { restoreCache } from "./cache/restore-cache";
import * as core from '@actions/core'
import * as path from 'path'
import {downloadVersion, tryGetFromToolCache} from './download/download-version'
import {restoreCache} from './cache/restore-cache'
import {
type Architecture,
getArch,
getPlatform,
type Platform,
} from "./utils/platforms";
import {downloadLatest} from './download/download-latest'
import {Architecture, getArch, getPlatform, Platform} from './utils/platforms'
import {
cacheLocalPath,
checkSum,
enableCache,
githubToken,
pythonVersion,
toolBinDir,
toolDir,
version,
} from "./utils/inputs";
import * as exec from "@actions/exec";
version
} from './utils/inputs'
async function run(): Promise<void> {
const platform = getPlatform();
const arch = getArch();
const platform = getPlatform()
const arch = getArch()
try {
if (platform === undefined) {
throw new Error(`Unsupported platform: ${process.platform}`);
throw new Error(`Unsupported platform: ${process.platform}`)
}
if (arch === undefined) {
throw new Error(`Unsupported architecture: ${process.arch}`);
throw new Error(`Unsupported architecture: ${process.arch}`)
}
const setupResult = await setupUv(
platform,
arch,
version,
checkSum,
githubToken,
);
githubToken
)
addUvToPath(setupResult.uvDir);
addToolBinToPath();
setToolDir();
await setupPython();
addMatchers();
setCacheDir(cacheLocalPath);
addUvToPath(setupResult.uvDir)
core.setOutput('uv-version', version)
core.info(`Successfully installed uv version ${version}`)
core.setOutput("uv-version", setupResult.version);
core.info(`Successfully installed uv version ${setupResult.version}`);
addMatchers()
setCacheDir(cacheLocalPath)
if (enableCache) {
await restoreCache(setupResult.version);
await restoreCache(setupResult.version)
}
process.exit(0);
} catch (err) {
core.setFailed((err as Error).message);
core.setFailed((err as Error).message)
}
process.exit(0)
}
async function setupUv(
@@ -68,93 +53,47 @@ async function setupUv(
arch: Architecture,
versionInput: string,
checkSum: string | undefined,
githubToken: string,
): Promise<{ uvDir: string; version: string }> {
const resolvedVersion = await resolveVersion(versionInput, githubToken);
const toolCacheResult = tryGetFromToolCache(arch, resolvedVersion);
if (toolCacheResult.installedPath) {
core.info(`Found uv in tool-cache for ${toolCacheResult.version}`);
return {
uvDir: toolCacheResult.installedPath,
version: toolCacheResult.version,
};
githubToken: string | undefined
): Promise<{uvDir: string; version: string}> {
let installedPath: string | undefined
let cachedToolDir: string
let version: string
if (versionInput === 'latest') {
const result = await downloadLatest(platform, arch, checkSum, githubToken)
version = result.version
cachedToolDir = result.cachedToolDir
} else {
version = versionInput
installedPath = tryGetFromToolCache(arch, versionInput)
if (installedPath) {
core.info(`Found uv in tool-cache for ${versionInput}`)
return {uvDir: installedPath, version}
}
cachedToolDir = await downloadVersion(
platform,
arch,
versionInput,
checkSum,
githubToken
)
}
const downloadVersionResult = await downloadVersion(
platform,
arch,
resolvedVersion,
checkSum,
githubToken,
);
return {
uvDir: downloadVersionResult.cachedToolDir,
version: downloadVersionResult.version,
};
return {uvDir: cachedToolDir, version}
}
function addUvToPath(cachedPath: string): void {
core.addPath(cachedPath);
core.info(`Added ${cachedPath} to the path`);
}
function addToolBinToPath(): void {
if (toolBinDir !== undefined) {
core.exportVariable("UV_TOOL_BIN_DIR", toolBinDir);
core.info(`Set UV_TOOL_BIN_DIR to ${toolBinDir}`);
core.addPath(toolBinDir);
core.info(`Added ${toolBinDir} to the path`);
} else {
if (process.env.XDG_BIN_HOME !== undefined) {
core.addPath(process.env.XDG_BIN_HOME);
core.info(`Added ${process.env.XDG_BIN_HOME} to the path`);
} else if (process.env.XDG_DATA_HOME !== undefined) {
core.addPath(`${process.env.XDG_DATA_HOME}/../bin`);
core.info(`Added ${process.env.XDG_DATA_HOME}/../bin to the path`);
} else {
core.addPath(`${process.env.HOME}/.local/bin`);
core.info(`Added ${process.env.HOME}/.local/bin to the path`);
}
}
}
function setToolDir(): void {
if (toolDir !== undefined) {
core.exportVariable("UV_TOOL_DIR", toolDir);
core.info(`Set UV_TOOL_DIR to ${toolDir}`);
}
}
async function setupPython(): Promise<void> {
if (pythonVersion !== "") {
core.exportVariable("UV_PYTHON", pythonVersion);
core.info(`Set UV_PYTHON to ${pythonVersion}`);
const options: exec.ExecOptions = {
silent: !core.isDebug(),
};
const execArgs = ["venv", "--python", pythonVersion];
core.info("Activating python venv...");
await exec.exec("uv", execArgs, options);
let venvBinPath = ".venv/bin";
if (process.platform === "win32") {
venvBinPath = ".venv/Scripts";
}
core.addPath(venvBinPath);
core.exportVariable("VIRTUAL_ENV", venvBinPath);
}
core.addPath(cachedPath)
core.info(`Added ${cachedPath} to the path`)
}
function setCacheDir(cacheLocalPath: string): void {
core.exportVariable("UV_CACHE_DIR", cacheLocalPath);
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath}`);
core.exportVariable('UV_CACHE_DIR', cacheLocalPath)
core.info(`Set UV_CACHE_DIR to ${cacheLocalPath}`)
}
function addMatchers(): void {
const matchersPath = path.join(__dirname, `..${path.sep}..`, ".github");
core.info(`##[add-matcher]${path.join(matchersPath, "python.json")}`);
const matchersPath = path.join(__dirname, `..${path.sep}..`, '.github')
core.info(`##[add-matcher]${path.join(matchersPath, 'python.json')}`)
}
run();
run()

View File

@@ -0,0 +1,65 @@
import * as github from '@actions/github'
import * as core from '@actions/core'
import {OWNER, REPO} from './utils/utils'
import {createReadStream, promises as fs} from 'fs'
import * as readline from 'readline'
import * as semver from 'semver'
import {updateChecksums} from './download/checksum/update-known-checksums'
async function run(): Promise<void> {
const checksumFilePath = process.argv.slice(2)[0]
const defaultVersionFilePath = process.argv.slice(2)[1]
const github_token = process.argv.slice(2)[2]
const octokit = github.getOctokit(github_token)
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
owner: OWNER,
repo: REPO
})
const downloadUrls: string[] = response.flatMap(release =>
release.assets
.filter(asset => asset.name.endsWith('.sha256'))
.map(asset => asset.browser_download_url)
)
await updateChecksums(checksumFilePath, downloadUrls)
const latestVersion = response
.map(release => release.tag_name)
.sort(semver.rcompare)[0]
core.setOutput('latest-version', latestVersion)
await updateDefaultVersion(defaultVersionFilePath, latestVersion)
}
async function updateDefaultVersion(
filePath: string,
latestVersion: string
): Promise<void> {
const fileStream = createReadStream(filePath)
const rl = readline.createInterface({
input: fileStream
})
let foundDescription = false
const lines = []
for await (let line of rl) {
if (
!foundDescription &&
line.includes("description: 'The version of uv to install'")
) {
foundDescription = true
} else if (foundDescription && line.includes('default: ')) {
line = line.replace(/'[^']*'/, `'${latestVersion}'`)
foundDescription = false
}
lines.push(line)
}
await fs.writeFile(filePath, lines.join('\n'))
}
run()

View File

@@ -1,32 +0,0 @@
import * as github from "@actions/github";
import * as core from "@actions/core";
import { GITHUB_COM_API, OWNER, REPO } from "./utils/constants";
import * as semver from "semver";
import { updateChecksums } from "./download/checksum/update-known-checksums";
async function run(): Promise<void> {
const checksumFilePath = process.argv.slice(2)[0];
const github_token = process.argv.slice(2)[1];
const octokit = github.getOctokit(github_token, { baseUrl: GITHUB_COM_API });
const response = await octokit.paginate(octokit.rest.repos.listReleases, {
owner: OWNER,
repo: REPO,
});
const downloadUrls: string[] = response.flatMap((release) =>
release.assets
.filter((asset) => asset.name.endsWith(".sha256"))
.map((asset) => asset.browser_download_url),
);
await updateChecksums(checksumFilePath, downloadUrls);
const latestVersion = response
.map((release) => release.tag_name)
.sort(semver.rcompare)[0];
core.setOutput("latest-version", latestVersion);
}
run();

View File

@@ -1,4 +0,0 @@
export const REPO = "uv";
export const OWNER = "astral-sh";
export const TOOL_CACHE_NAME = "uv";
export const GITHUB_COM_API = "https://api.github.com";

View File

@@ -1,76 +1,9 @@
import * as core from "@actions/core";
import path from "node:path";
import * as core from '@actions/core'
export const version = core.getInput("version");
export const pythonVersion = core.getInput("python-version");
export const checkSum = core.getInput("checksum");
export const enableCache = getEnableCache();
export const cacheSuffix = core.getInput("cache-suffix") || "";
export const cacheLocalPath = getCacheLocalPath();
export const cacheDependencyGlob = core.getInput("cache-dependency-glob");
export const pruneCache = core.getInput("prune-cache") === "true";
export const ignoreNothingToCache =
core.getInput("ignore-nothing-to-cache") === "true";
export const toolBinDir = getToolBinDir();
export const toolDir = getToolDir();
export const githubToken = core.getInput("github-token");
function getEnableCache(): boolean {
const enableCacheInput = core.getInput("enable-cache");
if (enableCacheInput === "auto") {
return process.env.RUNNER_ENVIRONMENT === "github-hosted";
}
return enableCacheInput === "true";
}
function getToolBinDir(): string | undefined {
const toolBinDirInput = core.getInput("tool-bin-dir");
if (toolBinDirInput !== "") {
return expandTilde(toolBinDirInput);
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${path.sep}uv-tool-bin-dir`;
}
throw Error(
"Could not determine UV_TOOL_BIN_DIR. Please make sure RUNNER_TEMP is set or provide the tool-bin-dir input",
);
}
return undefined;
}
function getToolDir(): string | undefined {
const toolDirInput = core.getInput("tool-dir");
if (toolDirInput !== "") {
return expandTilde(toolDirInput);
}
if (process.platform === "win32") {
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${path.sep}uv-tool-dir`;
}
throw Error(
"Could not determine UV_TOOL_DIR. Please make sure RUNNER_TEMP is set or provide the tool-dir input",
);
}
return undefined;
}
function getCacheLocalPath(): string {
const cacheLocalPathInput = core.getInput("cache-local-path");
if (cacheLocalPathInput !== "") {
return expandTilde(cacheLocalPathInput);
}
if (process.env.RUNNER_TEMP !== undefined) {
return `${process.env.RUNNER_TEMP}${path.sep}setup-uv-cache`;
}
throw Error(
"Could not determine UV_CACHE_DIR. Please make sure RUNNER_TEMP is set or provide the cache-local-path input",
);
}
function expandTilde(input: string): string {
if (input.startsWith("~")) {
return `${process.env.HOME}${input.substring(1)}`;
}
return input;
}
export const version = core.getInput('version')
export const checkSum = core.getInput('checksum')
export const enableCache = core.getInput('enable-cache') === 'true'
export const cacheSuffix = core.getInput('cache-suffix') || ''
export const cacheLocalPath = core.getInput('cache-local-path')
export const githubToken = core.getInput('github-token')
export const cacheDependencyGlob = core.getInput('cache-dependency-glob')

View File

@@ -1,33 +1,33 @@
export type Platform =
| "unknown-linux-gnu"
| "unknown-linux-musl"
| "unknown-linux-musleabihf"
| "apple-darwin"
| "pc-windows-msvc";
export type Architecture = "i686" | "x86_64" | "aarch64";
| 'unknown-linux-gnu'
| 'unknown-linux-musl'
| 'unknown-linux-musleabihf'
| 'apple-darwin'
| 'pc-windows-msvc'
export type Architecture = 'i686' | 'x86_64' | 'aarch64'
export function getArch(): Architecture | undefined {
const arch = process.arch;
const archMapping: { [key: string]: Architecture } = {
ia32: "i686",
x64: "x86_64",
arm64: "aarch64",
};
const arch = process.arch
const archMapping: {[key: string]: Architecture} = {
ia32: 'i686',
x64: 'x86_64',
arm64: 'aarch64'
}
if (arch in archMapping) {
return archMapping[arch];
return archMapping[arch]
}
}
export function getPlatform(): Platform | undefined {
const platform = process.platform;
const platformMapping: { [key: string]: Platform } = {
linux: "unknown-linux-gnu",
darwin: "apple-darwin",
win32: "pc-windows-msvc",
};
const platform = process.platform
const platformMapping: {[key: string]: Platform} = {
linux: 'unknown-linux-gnu',
darwin: 'apple-darwin',
win32: 'pc-windows-msvc'
}
if (platform in platformMapping) {
return platformMapping[platform];
return platformMapping[platform]
}
}

3
src/utils/utils.ts Normal file
View File

@@ -0,0 +1,3 @@
export const REPO = 'uv'
export const OWNER = 'astral-sh'
export const TOOL_CACHE_NAME = 'uv'

View File

@@ -1,12 +1,12 @@
{
"compilerOptions": {
"target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
"outDir": "./lib" /* Redirect output structure to the directory. */,
"rootDir": "./src" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,
"strict": true /* Enable all strict type-checking options. */,
"noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */,
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}