mirror of
				https://gitea.com/actions/cache.git
				synced 2025-10-29 07:47:12 +00:00 
			
		
		
		
	Compare commits
	
		
			3 Commits
		
	
	
		
			joshmgross
			...
			v1.0.1
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | 86dff562ab | ||
|   | 0f810ad45a | ||
|   | 9d8c7b4041 | 
| @@ -1,16 +0,0 @@ | ||||
| { | ||||
|   "env": { "node": true, "jest": true }, | ||||
|   "parser": "@typescript-eslint/parser", | ||||
|   "parserOptions": { "ecmaVersion": 2020, "sourceType": "module" }, | ||||
|   "extends": [ | ||||
|     "eslint:recommended", | ||||
|     "plugin:@typescript-eslint/eslint-recommended", | ||||
|     "plugin:@typescript-eslint/recommended", | ||||
|     "plugin:import/errors", | ||||
|     "plugin:import/warnings", | ||||
|     "plugin:import/typescript", | ||||
|     "plugin:prettier/recommended", | ||||
|     "prettier/@typescript-eslint" | ||||
|   ], | ||||
|   "plugins": ["@typescript-eslint", "jest"] | ||||
| } | ||||
							
								
								
									
										26
									
								
								.github/workflows/workflow.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										26
									
								
								.github/workflows/workflow.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,11 +1,6 @@ | ||||
| name: Tests | ||||
|  | ||||
| on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - master | ||||
|     paths-ignore: | ||||
|       - '**.md' | ||||
|   push: | ||||
|     branches: | ||||
|       - master | ||||
| @@ -15,39 +10,22 @@ on: | ||||
| jobs: | ||||
|   test: | ||||
|     name: Test on ${{ matrix.os }} | ||||
|  | ||||
|     strategy: | ||||
|       matrix: | ||||
|         os: [ubuntu-latest, windows-latest, macOS-latest] | ||||
|  | ||||
|     runs-on: ${{ matrix.os }} | ||||
|  | ||||
|      | ||||
|     steps: | ||||
|     - uses: actions/checkout@v1 | ||||
|  | ||||
|        | ||||
|     - uses: actions/setup-node@v1 | ||||
|       with: | ||||
|         node-version: '12.x' | ||||
|  | ||||
|     - name: Get npm cache directory | ||||
|       id: npm-cache | ||||
|       run: | | ||||
|         echo "::set-output name=dir::$(npm config get cache)" | ||||
|  | ||||
|     - uses: actions/cache@v1 | ||||
|       with: | ||||
|         path: ${{ steps.npm-cache.outputs.dir }} | ||||
|         key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} | ||||
|         restore-keys: | | ||||
|           ${{ runner.os }}-node- | ||||
|  | ||||
|     - run: npm ci | ||||
|  | ||||
|     - name: Prettier Format Check | ||||
|       run: npm run format-check | ||||
|  | ||||
|     - name: ESLint Check | ||||
|       run: npm run lint | ||||
|  | ||||
|     - name: Build & Test | ||||
|       run: npm run test | ||||
|   | ||||
							
								
								
									
										48
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										48
									
								
								README.md
									
									
									
									
									
								
							| @@ -2,7 +2,7 @@ | ||||
|  | ||||
| This GitHub Action allows caching dependencies and build outputs to improve workflow execution time. | ||||
|  | ||||
| <a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a> | ||||
| <a href="https://github.com/actions/cache"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a> | ||||
|  | ||||
| ## Documentation | ||||
|  | ||||
| @@ -28,7 +28,7 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory | ||||
| ### Example workflow | ||||
|  | ||||
| ```yaml | ||||
| name: Caching Primes | ||||
| name: Example Caching with npm | ||||
|  | ||||
| on: push | ||||
|  | ||||
| @@ -39,41 +39,27 @@ jobs: | ||||
|     steps: | ||||
|     - uses: actions/checkout@v1 | ||||
|  | ||||
|     - name: Cache Primes | ||||
|       id: cache-primes | ||||
|     - name: Cache node modules | ||||
|       uses: actions/cache@v1 | ||||
|       with: | ||||
|         path: prime-numbers | ||||
|         key: ${{ runner.os }}-primes | ||||
|         path: node_modules | ||||
|         key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} | ||||
|         restore-keys: | | ||||
|           ${{ runner.os }}-node- | ||||
|  | ||||
|     - name: Generate Prime Numbers | ||||
|       if: steps.cache-primes.outputs.cache-hit != 'true' | ||||
|       run: /generate-primes.sh -d prime-numbers | ||||
|     - name: Install Dependencies | ||||
|       run: npm install | ||||
|      | ||||
|     - name: Use Prime Numbers | ||||
|       run: /primes.sh -d prime-numbers | ||||
|     - name: Build | ||||
|       run: npm run build | ||||
|  | ||||
|     - name: Test | ||||
|       run: npm run test | ||||
| ``` | ||||
|  | ||||
| ## Implementation Examples | ||||
|  | ||||
| Every programming language and framework has it's own way of caching.  | ||||
|  | ||||
| See [Examples](examples.md) for a list of `actions/cache` implementations for use with: | ||||
|  | ||||
| - [C# - Nuget](./examples.md#c---nuget) | ||||
| - [Elixir - Mix](./examples.md#elixir---mix) | ||||
| - [Go - Modules](./examples.md#go---modules) | ||||
| - [Java - Gradle](./examples.md#java---gradle) | ||||
| - [Java - Maven](./examples.md#java---maven) | ||||
| - [Node - npm](./examples.md#node---npm) | ||||
| - [Node - Yarn](./examples.md#node---yarn) | ||||
| - [PHP - Composer](./examples.md#php---composer) | ||||
| - [Python - pip](./examples.md#python---pip) | ||||
| - [Ruby - Gem](./examples.md#ruby---gem) | ||||
| - [Rust - Cargo](./examples.md#rust---cargo) | ||||
| - [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage) | ||||
| - [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods) | ||||
| ## Ecosystem Examples | ||||
|  | ||||
| See [Examples](examples.md) | ||||
|  | ||||
| ## Cache Limits | ||||
|  | ||||
| @@ -92,7 +78,7 @@ steps: | ||||
|     id: cache | ||||
|     with: | ||||
|       path: path/to/dependencies | ||||
|       key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} | ||||
|       key: ${{ runner.os }}-${{ hashFiles('**/lockfiles')}} | ||||
|    | ||||
|   - name: Install Dependencies | ||||
|     if: steps.cache.outputs.cache-hit != 'true' | ||||
|   | ||||
| @@ -1 +0,0 @@ | ||||
| hello world | ||||
| @@ -1,236 +0,0 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as os from "os"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import { Events, Outputs, State } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
|  | ||||
| jest.mock("@actions/core"); | ||||
| jest.mock("os"); | ||||
|  | ||||
| afterEach(() => { | ||||
|     delete process.env[Events.Key]; | ||||
| }); | ||||
|  | ||||
| test("getArchiveFileSize returns file size", () => { | ||||
|     const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); | ||||
|  | ||||
|     const size = actionUtils.getArchiveFileSize(filePath); | ||||
|  | ||||
|     expect(size).toBe(11); | ||||
| }); | ||||
|  | ||||
| test("isExactKeyMatch with undefined cache entry returns false", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry = undefined; | ||||
|  | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
| }); | ||||
|  | ||||
| test("isExactKeyMatch with empty cache entry returns false", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = {}; | ||||
|  | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
| }); | ||||
|  | ||||
| test("isExactKeyMatch with different keys returns false", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-" | ||||
|     }; | ||||
|  | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
| }); | ||||
|  | ||||
| test("isExactKeyMatch with different key accents returns false", () => { | ||||
|     const key = "linux-áccent"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-accent" | ||||
|     }; | ||||
|  | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); | ||||
| }); | ||||
|  | ||||
| test("isExactKeyMatch with same key returns true", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-rust" | ||||
|     }; | ||||
|  | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true); | ||||
| }); | ||||
|  | ||||
| test("isExactKeyMatch with same key and different casing returns true", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "LINUX-RUST" | ||||
|     }; | ||||
|  | ||||
|     expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true); | ||||
| }); | ||||
|  | ||||
| test("setOutputAndState with undefined entry to set cache-hit output", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry = undefined; | ||||
|  | ||||
|     const setOutputMock = jest.spyOn(core, "setOutput"); | ||||
|     const saveStateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     actionUtils.setOutputAndState(key, cacheEntry); | ||||
|  | ||||
|     expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); | ||||
|     expect(setOutputMock).toHaveBeenCalledTimes(1); | ||||
|  | ||||
|     expect(saveStateMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("setOutputAndState with exact match to set cache-hit output and state", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-rust" | ||||
|     }; | ||||
|  | ||||
|     const setOutputMock = jest.spyOn(core, "setOutput"); | ||||
|     const saveStateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     actionUtils.setOutputAndState(key, cacheEntry); | ||||
|  | ||||
|     expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true"); | ||||
|     expect(setOutputMock).toHaveBeenCalledTimes(1); | ||||
|  | ||||
|     expect(saveStateMock).toHaveBeenCalledWith( | ||||
|         State.CacheResult, | ||||
|         JSON.stringify(cacheEntry) | ||||
|     ); | ||||
|     expect(saveStateMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("setOutputAndState with no exact match to set cache-hit output and state", () => { | ||||
|     const key = "linux-rust"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43" | ||||
|     }; | ||||
|  | ||||
|     const setOutputMock = jest.spyOn(core, "setOutput"); | ||||
|     const saveStateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     actionUtils.setOutputAndState(key, cacheEntry); | ||||
|  | ||||
|     expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); | ||||
|     expect(setOutputMock).toHaveBeenCalledTimes(1); | ||||
|  | ||||
|     expect(saveStateMock).toHaveBeenCalledWith( | ||||
|         State.CacheResult, | ||||
|         JSON.stringify(cacheEntry) | ||||
|     ); | ||||
|     expect(saveStateMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("getCacheState with no state returns undefined", () => { | ||||
|     const getStateMock = jest.spyOn(core, "getState"); | ||||
|     getStateMock.mockImplementation(() => { | ||||
|         return ""; | ||||
|     }); | ||||
|  | ||||
|     const state = actionUtils.getCacheState(); | ||||
|  | ||||
|     expect(state).toBe(undefined); | ||||
|  | ||||
|     expect(getStateMock).toHaveBeenCalledWith(State.CacheResult); | ||||
|     expect(getStateMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("getCacheState with valid state", () => { | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const getStateMock = jest.spyOn(core, "getState"); | ||||
|     getStateMock.mockImplementation(() => { | ||||
|         return JSON.stringify(cacheEntry); | ||||
|     }); | ||||
|  | ||||
|     const state = actionUtils.getCacheState(); | ||||
|  | ||||
|     expect(state).toEqual(cacheEntry); | ||||
|  | ||||
|     expect(getStateMock).toHaveBeenCalledWith(State.CacheResult); | ||||
|     expect(getStateMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("logWarning logs a message with a warning prefix", () => { | ||||
|     const message = "A warning occurred."; | ||||
|  | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|  | ||||
|     actionUtils.logWarning(message); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`); | ||||
| }); | ||||
|  | ||||
| test("isValidEvent returns false for unknown event", () => { | ||||
|     const event = "foo"; | ||||
|     process.env[Events.Key] = event; | ||||
|  | ||||
|     const isValidEvent = actionUtils.isValidEvent(); | ||||
|  | ||||
|     expect(isValidEvent).toBe(false); | ||||
| }); | ||||
|  | ||||
| test("resolvePath with no ~ in path", () => { | ||||
|     const filePath = ".cache/yarn"; | ||||
|  | ||||
|     const resolvedPath = actionUtils.resolvePath(filePath); | ||||
|  | ||||
|     const expectedPath = path.resolve(filePath); | ||||
|     expect(resolvedPath).toBe(expectedPath); | ||||
| }); | ||||
|  | ||||
| test("resolvePath with ~ in path", () => { | ||||
|     const filePath = "~/.cache/yarn"; | ||||
|  | ||||
|     const homedir = jest.requireActual("os").homedir(); | ||||
|     const homedirMock = jest.spyOn(os, "homedir"); | ||||
|     homedirMock.mockImplementation(() => { | ||||
|         return homedir; | ||||
|     }); | ||||
|  | ||||
|     const resolvedPath = actionUtils.resolvePath(filePath); | ||||
|  | ||||
|     const expectedPath = path.join(homedir, ".cache/yarn"); | ||||
|     expect(resolvedPath).toBe(expectedPath); | ||||
| }); | ||||
|  | ||||
| test("resolvePath with home not found", () => { | ||||
|     const filePath = "~/.cache/yarn"; | ||||
|     const homedirMock = jest.spyOn(os, "homedir"); | ||||
|     homedirMock.mockImplementation(() => { | ||||
|         return ""; | ||||
|     }); | ||||
|  | ||||
|     expect(() => actionUtils.resolvePath(filePath)).toThrow( | ||||
|         "Unable to resolve `~` to HOME" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("isValidEvent returns true for push event", () => { | ||||
|     const event = Events.Push; | ||||
|     process.env[Events.Key] = event; | ||||
|  | ||||
|     const isValidEvent = actionUtils.isValidEvent(); | ||||
|  | ||||
|     expect(isValidEvent).toBe(true); | ||||
| }); | ||||
|  | ||||
| test("isValidEvent returns true for pull request event", () => { | ||||
|     const event = Events.PullRequest; | ||||
|     process.env[Events.Key] = event; | ||||
|  | ||||
|     const isValidEvent = actionUtils.isValidEvent(); | ||||
|  | ||||
|     expect(isValidEvent).toBe(true); | ||||
| }); | ||||
							
								
								
									
										22
									
								
								__tests__/main.test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								__tests__/main.test.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,22 @@ | ||||
| import * as core from "@actions/core"; | ||||
|  | ||||
| import { Inputs } from "../src/constants"; | ||||
| import run from "../src/restore"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
|  | ||||
| test("restore with no path", async () => { | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: path" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with no key", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: key" | ||||
|     ); | ||||
| }); | ||||
| @@ -1,451 +0,0 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { Events, Inputs } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import run from "../src/restore"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
|  | ||||
| jest.mock("@actions/exec"); | ||||
| jest.mock("@actions/io"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
| jest.mock("../src/cacheHttpClient"); | ||||
|  | ||||
| beforeAll(() => { | ||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||
|         return path.resolve(filePath); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( | ||||
|         (key, cacheResult) => { | ||||
|             const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|             return actualUtils.isExactKeyMatch(key, cacheResult); | ||||
|         } | ||||
|     ); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.isValidEvent(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getSupportedEvents(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | ||||
|         return Promise.resolve(tool); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| beforeEach(() => { | ||||
|     process.env[Events.Key] = Events.Push; | ||||
| }); | ||||
|  | ||||
| afterEach(() => { | ||||
|     testUtils.clearInputs(); | ||||
|     delete process.env[Events.Key]; | ||||
| }); | ||||
|  | ||||
| test("restore with invalid event outputs warning", async () => { | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const invalidEvent = "commit_comment"; | ||||
|     process.env[Events.Key] = invalidEvent; | ||||
|     await run(); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` | ||||
|     ); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("restore with no path should fail", async () => { | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: path" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with no key", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: key" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with too many keys should fail", async () => { | ||||
|     const key = "node-test"; | ||||
|     const restoreKeys = [...Array(20).keys()].map(x => x.toString()); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key, | ||||
|         restoreKeys | ||||
|     }); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         `Key Validation Error: Keys are limited to a maximum of 10.` | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with large key should fail", async () => { | ||||
|     const key = "foo".repeat(512); // Over the 512 character limit | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
|     }); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         `Key Validation Error: ${key} cannot be larger than 512 characters.` | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with invalid key should fail", async () => { | ||||
|     const key = "comma,comma"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
|     }); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         `Key Validation Error: ${key} cannot contain commas.` | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with no cache found", async () => { | ||||
|     const key = "node-test"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
|     }); | ||||
|  | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     clientMock.mockImplementation(() => { | ||||
|         return Promise.resolve(null); | ||||
|     }); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Cache not found for input keys: ${key}.` | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with server error should fail", async () => { | ||||
|     const key = "node-test"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
|     }); | ||||
|  | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     clientMock.mockImplementation(() => { | ||||
|         throw new Error("HTTP Error Occurred"); | ||||
|     }); | ||||
|  | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("restore with restore keys and no cache found", async () => { | ||||
|     const key = "node-test"; | ||||
|     const restoreKey = "node-"; | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key, | ||||
|         restoreKeys: [restoreKey] | ||||
|     }); | ||||
|  | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     clientMock.mockImplementation(() => { | ||||
|         return Promise.resolve(null); | ||||
|     }); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Cache not found for input keys: ${key}, ${restoreKey}.` | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with cache found", async () => { | ||||
|     const key = "node-test"; | ||||
|     const cachePath = path.resolve("node_modules"); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
|     }); | ||||
|  | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: key, | ||||
|         scope: "refs/heads/master", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     getCacheMock.mockImplementation(() => { | ||||
|         return Promise.resolve(cacheEntry); | ||||
|     }); | ||||
|     const tempPath = "/foo/bar"; | ||||
|  | ||||
|     const createTempDirectoryMock = jest.spyOn( | ||||
|         actionUtils, | ||||
|         "createTempDirectory" | ||||
|     ); | ||||
|     createTempDirectoryMock.mockImplementation(() => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
|  | ||||
|     const archivePath = path.join(tempPath, "cache.tgz"); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
|  | ||||
|     const fileSize = 142; | ||||
|     const getArchiveFileSizeMock = jest | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
|  | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|               "-xz", | ||||
|               "--force-local", | ||||
|               "-f", | ||||
|               archivePath.replace(/\\/g, "/"), | ||||
|               "-C", | ||||
|               cachePath.replace(/\\/g, "/") | ||||
|           ] | ||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("restore with a pull request event and cache found", async () => { | ||||
|     const key = "node-test"; | ||||
|     const cachePath = path.resolve("node_modules"); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
|     }); | ||||
|  | ||||
|     process.env[Events.Key] = Events.PullRequest; | ||||
|  | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: key, | ||||
|         scope: "refs/heads/master", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     getCacheMock.mockImplementation(() => { | ||||
|         return Promise.resolve(cacheEntry); | ||||
|     }); | ||||
|     const tempPath = "/foo/bar"; | ||||
|  | ||||
|     const createTempDirectoryMock = jest.spyOn( | ||||
|         actionUtils, | ||||
|         "createTempDirectory" | ||||
|     ); | ||||
|     createTempDirectoryMock.mockImplementation(() => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
|  | ||||
|     const archivePath = path.join(tempPath, "cache.tgz"); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
|  | ||||
|     const fileSize = 62915000; | ||||
|     const getArchiveFileSizeMock = jest | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
|  | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|               "-xz", | ||||
|               "--force-local", | ||||
|               "-f", | ||||
|               archivePath.replace(/\\/g, "/"), | ||||
|               "-C", | ||||
|               cachePath.replace(/\\/g, "/") | ||||
|           ] | ||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("restore with cache found for restore key", async () => { | ||||
|     const key = "node-test"; | ||||
|     const restoreKey = "node-"; | ||||
|     const cachePath = path.resolve("node_modules"); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key, | ||||
|         restoreKeys: [restoreKey] | ||||
|     }); | ||||
|  | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const stateMock = jest.spyOn(core, "saveState"); | ||||
|  | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: restoreKey, | ||||
|         scope: "refs/heads/master", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|     const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); | ||||
|     getCacheMock.mockImplementation(() => { | ||||
|         return Promise.resolve(cacheEntry); | ||||
|     }); | ||||
|     const tempPath = "/foo/bar"; | ||||
|  | ||||
|     const createTempDirectoryMock = jest.spyOn( | ||||
|         actionUtils, | ||||
|         "createTempDirectory" | ||||
|     ); | ||||
|     createTempDirectoryMock.mockImplementation(() => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
|  | ||||
|     const archivePath = path.join(tempPath, "cache.tgz"); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
|  | ||||
|     const fileSize = 142; | ||||
|     const getArchiveFileSizeMock = jest | ||||
|         .spyOn(actionUtils, "getArchiveFileSize") | ||||
|         .mockReturnValue(fileSize); | ||||
|  | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
|         cacheEntry.archiveLocation, | ||||
|         archivePath | ||||
|     ); | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|               "-xz", | ||||
|               "--force-local", | ||||
|               "-f", | ||||
|               archivePath.replace(/\\/g, "/"), | ||||
|               "-C", | ||||
|               cachePath.replace(/\\/g, "/") | ||||
|           ] | ||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Cache restored from key: ${restoreKey}` | ||||
|     ); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
| @@ -1,368 +0,0 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { Events, Inputs } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import run from "../src/save"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
|  | ||||
| jest.mock("@actions/core"); | ||||
| jest.mock("@actions/exec"); | ||||
| jest.mock("@actions/io"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
| jest.mock("../src/cacheHttpClient"); | ||||
|  | ||||
| beforeAll(() => { | ||||
|     jest.spyOn(core, "getInput").mockImplementation((name, options) => { | ||||
|         return jest.requireActual("@actions/core").getInput(name, options); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => { | ||||
|         return jest.requireActual("../src/utils/actionUtils").getCacheState(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( | ||||
|         (key, cacheResult) => { | ||||
|             return jest | ||||
|                 .requireActual("../src/utils/actionUtils") | ||||
|                 .isExactKeyMatch(key, cacheResult); | ||||
|         } | ||||
|     ); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.isValidEvent(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getSupportedEvents(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||
|         return path.resolve(filePath); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { | ||||
|         return Promise.resolve("/foo/bar"); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | ||||
|         return Promise.resolve(tool); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| beforeEach(() => { | ||||
|     process.env[Events.Key] = Events.Push; | ||||
| }); | ||||
|  | ||||
| afterEach(() => { | ||||
|     testUtils.clearInputs(); | ||||
|     delete process.env[Events.Key]; | ||||
| }); | ||||
|  | ||||
| test("save with invalid event outputs warning", async () => { | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     const invalidEvent = "commit_comment"; | ||||
|     process.env[Events.Key] = invalidEvent; | ||||
|     await run(); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` | ||||
|     ); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("save with no primary key in state outputs warning", async () => { | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|  | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|  | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|         }) | ||||
|         // Cache Key State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return ""; | ||||
|         }); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         `Error retrieving key from state.` | ||||
|     ); | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("save with exact match returns early", async () => { | ||||
|     const infoMock = jest.spyOn(core, "info"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|  | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: primaryKey, | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|  | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|         }) | ||||
|         // Cache Key State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` | ||||
|     ); | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(0); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("save with missing input outputs warning", async () => { | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|  | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|  | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|         }) | ||||
|         // Cache Key State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: path" | ||||
|     ); | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("save with large cache outputs warning", async () => { | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|  | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|  | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|         }) | ||||
|         // Cache Key State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
|  | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit | ||||
|     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { | ||||
|         return cacheSize; | ||||
|     }); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|             "-cz", | ||||
|             "--force-local", | ||||
|             "-f", | ||||
|             archivePath.replace(/\\/g, "/"), | ||||
|             "-C", | ||||
|             cachePath.replace(/\\/g, "/"), | ||||
|             "." | ||||
|         ] | ||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         "Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache." | ||||
|     ); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("save with server error outputs warning", async () => { | ||||
|     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|  | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|  | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|         }) | ||||
|         // Cache Key State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
|  | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const cacheId = 4; | ||||
|     const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { | ||||
|         return Promise.resolve(cacheId); | ||||
|     }); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const saveCacheMock = jest | ||||
|         .spyOn(cacheHttpClient, "saveCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             throw new Error("HTTP Error Occurred"); | ||||
|         }); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||
|  | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|             "-cz", | ||||
|             "--force-local", | ||||
|             "-f", | ||||
|             archivePath.replace(/\\/g, "/"), | ||||
|             "-C", | ||||
|             cachePath.replace(/\\/g, "/"), | ||||
|             "." | ||||
|         ] | ||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|  | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("save with valid inputs uploads a cache", async () => { | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|  | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|     const cacheEntry: ArtifactCacheEntry = { | ||||
|         cacheKey: "Linux-node-", | ||||
|         scope: "refs/heads/master", | ||||
|         creationTime: "2019-11-13T19:18:02+00:00", | ||||
|         archiveLocation: "www.actionscache.test/download" | ||||
|     }; | ||||
|  | ||||
|     jest.spyOn(core, "getState") | ||||
|         // Cache Entry State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return JSON.stringify(cacheEntry); | ||||
|         }) | ||||
|         // Cache Key State | ||||
|         .mockImplementationOnce(() => { | ||||
|             return primaryKey; | ||||
|         }); | ||||
|  | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const cacheId = 4; | ||||
|     const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { | ||||
|         return Promise.resolve(cacheId); | ||||
|     }); | ||||
|  | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||
|  | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const args = IS_WINDOWS | ||||
|         ? [ | ||||
|             "-cz", | ||||
|             "--force-local", | ||||
|             "-f", | ||||
|             archivePath.replace(/\\/g, "/"), | ||||
|             "-C", | ||||
|             cachePath.replace(/\\/g, "/"), | ||||
|             "." | ||||
|         ] | ||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); | ||||
|  | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
							
								
								
									
										287
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										287
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1496,61 +1496,41 @@ const fs = __importStar(__webpack_require__(747)); | ||||
| const Handlers_1 = __webpack_require__(941); | ||||
| const HttpClient_1 = __webpack_require__(874); | ||||
| const RestClient_1 = __webpack_require__(105); | ||||
| const utils = __importStar(__webpack_require__(443)); | ||||
| function isSuccessStatusCode(statusCode) { | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
| function getCacheApiUrl() { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "").replace("pipelines", "artifactcache"); | ||||
|     if (!baseUrl) { | ||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||
|     } | ||||
|     core.debug(`Cache Url: ${baseUrl}`); | ||||
|     return `${baseUrl}_apis/artifactcache/`; | ||||
| } | ||||
| function createAcceptHeader(type, apiVersion) { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
| function getRequestOptions() { | ||||
|     const requestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||
|     }; | ||||
|     return requestOptions; | ||||
| } | ||||
| function createRestClient() { | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
| } | ||||
| function getCacheEntry(keys) { | ||||
|     var _a; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
|         const response = yield restClient.get(resource, getRequestOptions()); | ||||
|         if (response.statusCode === 204) { | ||||
|             return null; | ||||
|         } | ||||
|         if (!isSuccessStatusCode(response.statusCode)) { | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|             throw new Error("Cache not found."); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
|         core.debug(`Cache Result:`); | ||||
|         core.debug(JSON.stringify(cacheResult)); | ||||
|         if (!cacheResult || !cacheResult.archiveLocation) { | ||||
|             throw new Error("Cache not found."); | ||||
|         } | ||||
|         return cacheResult; | ||||
|     }); | ||||
| } | ||||
| exports.getCacheEntry = getCacheEntry; | ||||
| function downloadCache(cacheEntry, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const stream = fs.createWriteStream(archivePath); | ||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||
|         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); | ||||
|         yield pipeResponseToStream(downloadResponse, stream); | ||||
|     }); | ||||
| } | ||||
| exports.downloadCache = downloadCache; | ||||
| function pipeResponseToStream(response, stream) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         return new Promise(resolve => { | ||||
| @@ -1560,107 +1540,48 @@ function pipeResponseToStream(response, stream) { | ||||
|         }); | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath) { | ||||
| function saveCache(stream, key) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const stream = fs.createWriteStream(archivePath); | ||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||
|         const downloadResponse = yield httpClient.get(archiveLocation); | ||||
|         yield pipeResponseToStream(downloadResponse, stream); | ||||
|     }); | ||||
| } | ||||
| exports.downloadCache = downloadCache; | ||||
| // Reserve Cache | ||||
| function reserveCache(key) { | ||||
|     var _a, _b, _c; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const reserveCacheRequest = { | ||||
|             key | ||||
|         }; | ||||
|         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); | ||||
|         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); | ||||
|     }); | ||||
| } | ||||
| exports.reserveCache = reserveCache; | ||||
| function getContentRange(start, end) { | ||||
|     // Format: `bytes start-end/filesize | ||||
|     // start and end are inclusive | ||||
|     // filesize can be * | ||||
|     // For a 200 byte chunk starting at byte 0: | ||||
|     // Content-Range: bytes 0-199/* | ||||
|     return `bytes ${start}-${end}/*`; | ||||
| } | ||||
| // function bufferToStream(buffer: Buffer): NodeJS.ReadableStream { | ||||
| //     const stream = new Duplex(); | ||||
| //     stream.push(buffer); | ||||
| //     stream.push(null); | ||||
| //     return stream; | ||||
| // } | ||||
| function uploadChunk(restClient, resourceUrl, data, start, end) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||
|         const postUrl = cacheUrl + resource; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         requestOptions.additionalHeaders = { | ||||
|             "Content-Type": "application/octet-stream", | ||||
|             "Content-Range": getContentRange(start, end) | ||||
|             "Content-Type": "application/octet-stream" | ||||
|         }; | ||||
|         return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function commitCache(restClient, cacheId, filesize) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         const commitCacheRequest = { size: filesize }; | ||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function uploadFile(restClient, cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Upload Chunks | ||||
|         const fileSize = fs.statSync(archivePath).size; | ||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||
|         const responses = []; | ||||
|         const fd = fs.openSync(archivePath, "r"); | ||||
|         const concurrency = 16; // # of HTTP requests in parallel | ||||
|         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks | ||||
|         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
|         const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|         core.debug("Awaiting all uploads"); | ||||
|         let offset = 0; | ||||
|         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||
|             while (offset < fileSize) { | ||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||
|                 const start = offset; | ||||
|                 const end = offset + chunkSize - 1; | ||||
|                 offset += MAX_CHUNK_SIZE; | ||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||
|             } | ||||
|         }))); | ||||
|         fs.closeSync(fd); | ||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); | ||||
|         if (failedResponse) { | ||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); | ||||
|         } | ||||
|         return; | ||||
|     }); | ||||
| } | ||||
| function saveCache(cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         core.debug("Upload cache"); | ||||
|         yield uploadFile(restClient, cacheId, archivePath); | ||||
|         core.debug("Commiting cache"); | ||||
|         // Commit Cache | ||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||
|         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); | ||||
|         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
|             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); | ||||
|         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         core.info("Cache saved successfully"); | ||||
|     }); | ||||
| } | ||||
| exports.saveCache = saveCache; | ||||
| function getRequestOptions() { | ||||
|     const requestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||
|     }; | ||||
|     return requestOptions; | ||||
| } | ||||
| function createAcceptHeader(type, apiVersion) { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
| function getCacheUrl() { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     let cacheUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "").replace("pipelines", "artifactcache"); | ||||
|     if (!cacheUrl) { | ||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||
|     } | ||||
|     core.debug(`Cache Url: ${cacheUrl}`); | ||||
|     return cacheUrl; | ||||
| } | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
| @@ -2218,7 +2139,6 @@ var __importStar = (this && this.__importStar) || function (mod) { | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const fs = __importStar(__webpack_require__(747)); | ||||
| const os = __importStar(__webpack_require__(87)); | ||||
| const path = __importStar(__webpack_require__(622)); | ||||
| const uuidV4 = __importStar(__webpack_require__(826)); | ||||
| @@ -2250,10 +2170,6 @@ function createTempDirectory() { | ||||
|     }); | ||||
| } | ||||
| exports.createTempDirectory = createTempDirectory; | ||||
| function getArchiveFileSize(path) { | ||||
|     return fs.statSync(path).size; | ||||
| } | ||||
| exports.getArchiveFileSize = getArchiveFileSize; | ||||
| function isExactKeyMatch(key, cacheResult) { | ||||
|     return !!(cacheResult && | ||||
|         cacheResult.cacheKey && | ||||
| @@ -2262,14 +2178,6 @@ function isExactKeyMatch(key, cacheResult) { | ||||
|         }) === 0); | ||||
| } | ||||
| exports.isExactKeyMatch = isExactKeyMatch; | ||||
| function setCacheState(state) { | ||||
|     core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); | ||||
| } | ||||
| exports.setCacheState = setCacheState; | ||||
| function setCacheHitOutput(isCacheHit) { | ||||
|     core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); | ||||
| } | ||||
| exports.setCacheHitOutput = setCacheHitOutput; | ||||
| function setOutputAndState(key, cacheResult) { | ||||
|     setCacheHitOutput(isExactKeyMatch(key, cacheResult)); | ||||
|     // Store the cache result if it exists | ||||
| @@ -2279,17 +2187,17 @@ exports.setOutputAndState = setOutputAndState; | ||||
| function getCacheState() { | ||||
|     const stateData = core.getState(constants_1.State.CacheResult); | ||||
|     core.debug(`State: ${stateData}`); | ||||
|     if (stateData) { | ||||
|         return JSON.parse(stateData); | ||||
|     } | ||||
|     return undefined; | ||||
|     return (stateData && JSON.parse(stateData)); | ||||
| } | ||||
| exports.getCacheState = getCacheState; | ||||
| function logWarning(message) { | ||||
|     const warningPrefix = "[warning]"; | ||||
|     core.info(`${warningPrefix}${message}`); | ||||
| function setCacheState(state) { | ||||
|     core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); | ||||
| } | ||||
| exports.logWarning = logWarning; | ||||
| exports.setCacheState = setCacheState; | ||||
| function setCacheHitOutput(isCacheHit) { | ||||
|     core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); | ||||
| } | ||||
| exports.setCacheHitOutput = setCacheHitOutput; | ||||
| function resolvePath(filePath) { | ||||
|     if (filePath[0] === "~") { | ||||
|         const home = os.homedir(); | ||||
| @@ -2301,18 +2209,6 @@ function resolvePath(filePath) { | ||||
|     return path.resolve(filePath); | ||||
| } | ||||
| exports.resolvePath = resolvePath; | ||||
| function getSupportedEvents() { | ||||
|     return [constants_1.Events.Push, constants_1.Events.PullRequest]; | ||||
| } | ||||
| exports.getSupportedEvents = getSupportedEvents; | ||||
| // Currently the cache token is only authorized for push and pull_request events | ||||
| // All other events will fail when reading and saving the cache | ||||
| // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context | ||||
| function isValidEvent() { | ||||
|     const githubEvent = process.env[constants_1.Events.Key] || ""; | ||||
|     return getSupportedEvents().includes(githubEvent); | ||||
| } | ||||
| exports.isValidEvent = isValidEvent; | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
| @@ -2940,25 +2836,19 @@ function isUnixExecutable(stats) { | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| var Inputs; | ||||
| (function (Inputs) { | ||||
|     Inputs["Key"] = "key"; | ||||
|     Inputs["Path"] = "path"; | ||||
|     Inputs["RestoreKeys"] = "restore-keys"; | ||||
|     Inputs.Key = "key"; | ||||
|     Inputs.Path = "path"; | ||||
|     Inputs.RestoreKeys = "restore-keys"; | ||||
| })(Inputs = exports.Inputs || (exports.Inputs = {})); | ||||
| var Outputs; | ||||
| (function (Outputs) { | ||||
|     Outputs["CacheHit"] = "cache-hit"; | ||||
|     Outputs.CacheHit = "cache-hit"; | ||||
| })(Outputs = exports.Outputs || (exports.Outputs = {})); | ||||
| var State; | ||||
| (function (State) { | ||||
|     State["CacheKey"] = "CACHE_KEY"; | ||||
|     State["CacheResult"] = "CACHE_RESULT"; | ||||
|     State.CacheKey = "CACHE_KEY"; | ||||
|     State.CacheResult = "CACHE_RESULT"; | ||||
| })(State = exports.State || (exports.State = {})); | ||||
| var Events; | ||||
| (function (Events) { | ||||
|     Events["Key"] = "GITHUB_EVENT_NAME"; | ||||
|     Events["Push"] = "push"; | ||||
|     Events["PullRequest"] = "pull_request"; | ||||
| })(Events = exports.Events || (exports.Events = {})); | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
| @@ -3069,22 +2959,16 @@ Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const exec_1 = __webpack_require__(986); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const fs = __importStar(__webpack_require__(747)); | ||||
| const path = __importStar(__webpack_require__(622)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||
| const constants_1 = __webpack_require__(694); | ||||
| const utils = __importStar(__webpack_require__(443)); | ||||
| function run() { | ||||
|     var _a, _b; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             // Validate inputs, this can cause task failure | ||||
|             if (!utils.isValidEvent()) { | ||||
|                 utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils | ||||
|                     .getSupportedEvents() | ||||
|                     .join(", ")} events are supported at this time.`); | ||||
|                 return; | ||||
|             } | ||||
|             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||
|             let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||
|             core.debug(`Cache Path: ${cachePath}`); | ||||
|             const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true }); | ||||
|             core.saveState(constants_1.State.CacheKey, primaryKey); | ||||
| @@ -3112,43 +2996,38 @@ function run() { | ||||
|             } | ||||
|             try { | ||||
|                 const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); | ||||
|                 if (!cacheEntry || !((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) { | ||||
|                 if (!cacheEntry) { | ||||
|                     core.info(`Cache not found for input keys: ${keys.join(", ")}.`); | ||||
|                     return; | ||||
|                 } | ||||
|                 const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); | ||||
|                 let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); | ||||
|                 core.debug(`Archive Path: ${archivePath}`); | ||||
|                 // Store the cache result | ||||
|                 utils.setCacheState(cacheEntry); | ||||
|                 // Download the cache from the cache entry | ||||
|                 yield cacheHttpClient.downloadCache((_b = cacheEntry) === null || _b === void 0 ? void 0 : _b.archiveLocation, archivePath); | ||||
|                 yield exec_1.exec(`md5sum`, [archivePath]); | ||||
|                 const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|                 core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); | ||||
|                 // Create directory to extract tar into | ||||
|                 yield io.mkdirP(cachePath); | ||||
|                 yield cacheHttpClient.downloadCache(cacheEntry, archivePath); | ||||
|                 io.mkdirP(cachePath); | ||||
|                 // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|                 // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|                 const args = ["-xz"]; | ||||
|                 const IS_WINDOWS = process.platform === "win32"; | ||||
|                 const args = IS_WINDOWS | ||||
|                     ? [ | ||||
|                         "-xz", | ||||
|                         "--force-local", | ||||
|                         "-f", | ||||
|                         archivePath.replace(/\\/g, "/"), | ||||
|                         "-C", | ||||
|                         cachePath.replace(/\\/g, "/") | ||||
|                     ] | ||||
|                     : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|                 if (IS_WINDOWS) { | ||||
|                     args.push("--force-local"); | ||||
|                     archivePath = archivePath.replace(/\\/g, "/"); | ||||
|                     cachePath = cachePath.replace(/\\/g, "/"); | ||||
|                 } | ||||
|                 args.push(...["-f", archivePath, "-C", cachePath]); | ||||
|                 const tarPath = yield io.which("tar", true); | ||||
|                 core.debug(`Tar Path: ${tarPath}`); | ||||
|                 const archiveFileSize = fs.statSync(archivePath).size; | ||||
|                 core.debug(`File Size: ${archiveFileSize}`); | ||||
|                 yield exec_1.exec(`"${tarPath}"`, args); | ||||
|                 const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); | ||||
|                 utils.setCacheHitOutput(isExactKeyMatch); | ||||
|                 core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); | ||||
|             } | ||||
|             catch (error) { | ||||
|                 utils.logWarning(error.message); | ||||
|                 core.warning(error.message); | ||||
|                 utils.setCacheHitOutput(false); | ||||
|             } | ||||
|         } | ||||
|   | ||||
							
								
								
									
										295
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										295
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1496,61 +1496,41 @@ const fs = __importStar(__webpack_require__(747)); | ||||
| const Handlers_1 = __webpack_require__(941); | ||||
| const HttpClient_1 = __webpack_require__(874); | ||||
| const RestClient_1 = __webpack_require__(105); | ||||
| const utils = __importStar(__webpack_require__(443)); | ||||
| function isSuccessStatusCode(statusCode) { | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
| function getCacheApiUrl() { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "").replace("pipelines", "artifactcache"); | ||||
|     if (!baseUrl) { | ||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||
|     } | ||||
|     core.debug(`Cache Url: ${baseUrl}`); | ||||
|     return `${baseUrl}_apis/artifactcache/`; | ||||
| } | ||||
| function createAcceptHeader(type, apiVersion) { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
| function getRequestOptions() { | ||||
|     const requestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||
|     }; | ||||
|     return requestOptions; | ||||
| } | ||||
| function createRestClient() { | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
| } | ||||
| function getCacheEntry(keys) { | ||||
|     var _a; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
|         const response = yield restClient.get(resource, getRequestOptions()); | ||||
|         if (response.statusCode === 204) { | ||||
|             return null; | ||||
|         } | ||||
|         if (!isSuccessStatusCode(response.statusCode)) { | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|             throw new Error("Cache not found."); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
|         core.debug(`Cache Result:`); | ||||
|         core.debug(JSON.stringify(cacheResult)); | ||||
|         if (!cacheResult || !cacheResult.archiveLocation) { | ||||
|             throw new Error("Cache not found."); | ||||
|         } | ||||
|         return cacheResult; | ||||
|     }); | ||||
| } | ||||
| exports.getCacheEntry = getCacheEntry; | ||||
| function downloadCache(cacheEntry, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const stream = fs.createWriteStream(archivePath); | ||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||
|         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); | ||||
|         yield pipeResponseToStream(downloadResponse, stream); | ||||
|     }); | ||||
| } | ||||
| exports.downloadCache = downloadCache; | ||||
| function pipeResponseToStream(response, stream) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         return new Promise(resolve => { | ||||
| @@ -1560,107 +1540,48 @@ function pipeResponseToStream(response, stream) { | ||||
|         }); | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath) { | ||||
| function saveCache(stream, key) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const stream = fs.createWriteStream(archivePath); | ||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||
|         const downloadResponse = yield httpClient.get(archiveLocation); | ||||
|         yield pipeResponseToStream(downloadResponse, stream); | ||||
|     }); | ||||
| } | ||||
| exports.downloadCache = downloadCache; | ||||
| // Reserve Cache | ||||
| function reserveCache(key) { | ||||
|     var _a, _b, _c; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         const reserveCacheRequest = { | ||||
|             key | ||||
|         }; | ||||
|         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); | ||||
|         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); | ||||
|     }); | ||||
| } | ||||
| exports.reserveCache = reserveCache; | ||||
| function getContentRange(start, end) { | ||||
|     // Format: `bytes start-end/filesize | ||||
|     // start and end are inclusive | ||||
|     // filesize can be * | ||||
|     // For a 200 byte chunk starting at byte 0: | ||||
|     // Content-Range: bytes 0-199/* | ||||
|     return `bytes ${start}-${end}/*`; | ||||
| } | ||||
| // function bufferToStream(buffer: Buffer): NodeJS.ReadableStream { | ||||
| //     const stream = new Duplex(); | ||||
| //     stream.push(buffer); | ||||
| //     stream.push(null); | ||||
| //     return stream; | ||||
| // } | ||||
| function uploadChunk(restClient, resourceUrl, data, start, end) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||
|         const cacheUrl = getCacheUrl(); | ||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||
|         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||
|         const postUrl = cacheUrl + resource; | ||||
|         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ | ||||
|             bearerCredentialHandler | ||||
|         ]); | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         requestOptions.additionalHeaders = { | ||||
|             "Content-Type": "application/octet-stream", | ||||
|             "Content-Range": getContentRange(start, end) | ||||
|             "Content-Type": "application/octet-stream" | ||||
|         }; | ||||
|         return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function commitCache(restClient, cacheId, filesize) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const requestOptions = getRequestOptions(); | ||||
|         const commitCacheRequest = { size: filesize }; | ||||
|         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||
|     }); | ||||
| } | ||||
| function uploadFile(restClient, cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Upload Chunks | ||||
|         const fileSize = fs.statSync(archivePath).size; | ||||
|         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||
|         const responses = []; | ||||
|         const fd = fs.openSync(archivePath, "r"); | ||||
|         const concurrency = 16; // # of HTTP requests in parallel | ||||
|         const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks | ||||
|         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
|         const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|         core.debug("Awaiting all uploads"); | ||||
|         let offset = 0; | ||||
|         yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||
|             while (offset < fileSize) { | ||||
|                 const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE; | ||||
|                 const start = offset; | ||||
|                 const end = offset + chunkSize - 1; | ||||
|                 offset += MAX_CHUNK_SIZE; | ||||
|                 const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false }); | ||||
|                 responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end)); | ||||
|             } | ||||
|         }))); | ||||
|         fs.closeSync(fd); | ||||
|         const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode)); | ||||
|         if (failedResponse) { | ||||
|             throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`); | ||||
|         } | ||||
|         return; | ||||
|     }); | ||||
| } | ||||
| function saveCache(cacheId, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const restClient = createRestClient(); | ||||
|         core.debug("Upload cache"); | ||||
|         yield uploadFile(restClient, cacheId, archivePath); | ||||
|         core.debug("Commiting cache"); | ||||
|         // Commit Cache | ||||
|         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||
|         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); | ||||
|         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
|             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); | ||||
|         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); | ||||
|         if (response.statusCode !== 200) { | ||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|         } | ||||
|         core.info("Cache saved successfully"); | ||||
|     }); | ||||
| } | ||||
| exports.saveCache = saveCache; | ||||
| function getRequestOptions() { | ||||
|     const requestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||
|     }; | ||||
|     return requestOptions; | ||||
| } | ||||
| function createAcceptHeader(type, apiVersion) { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
| function getCacheUrl() { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     let cacheUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "").replace("pipelines", "artifactcache"); | ||||
|     if (!cacheUrl) { | ||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||
|     } | ||||
|     core.debug(`Cache Url: ${cacheUrl}`); | ||||
|     return cacheUrl; | ||||
| } | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
| @@ -2218,7 +2139,6 @@ var __importStar = (this && this.__importStar) || function (mod) { | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const fs = __importStar(__webpack_require__(747)); | ||||
| const os = __importStar(__webpack_require__(87)); | ||||
| const path = __importStar(__webpack_require__(622)); | ||||
| const uuidV4 = __importStar(__webpack_require__(826)); | ||||
| @@ -2250,10 +2170,6 @@ function createTempDirectory() { | ||||
|     }); | ||||
| } | ||||
| exports.createTempDirectory = createTempDirectory; | ||||
| function getArchiveFileSize(path) { | ||||
|     return fs.statSync(path).size; | ||||
| } | ||||
| exports.getArchiveFileSize = getArchiveFileSize; | ||||
| function isExactKeyMatch(key, cacheResult) { | ||||
|     return !!(cacheResult && | ||||
|         cacheResult.cacheKey && | ||||
| @@ -2262,14 +2178,6 @@ function isExactKeyMatch(key, cacheResult) { | ||||
|         }) === 0); | ||||
| } | ||||
| exports.isExactKeyMatch = isExactKeyMatch; | ||||
| function setCacheState(state) { | ||||
|     core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); | ||||
| } | ||||
| exports.setCacheState = setCacheState; | ||||
| function setCacheHitOutput(isCacheHit) { | ||||
|     core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); | ||||
| } | ||||
| exports.setCacheHitOutput = setCacheHitOutput; | ||||
| function setOutputAndState(key, cacheResult) { | ||||
|     setCacheHitOutput(isExactKeyMatch(key, cacheResult)); | ||||
|     // Store the cache result if it exists | ||||
| @@ -2279,17 +2187,17 @@ exports.setOutputAndState = setOutputAndState; | ||||
| function getCacheState() { | ||||
|     const stateData = core.getState(constants_1.State.CacheResult); | ||||
|     core.debug(`State: ${stateData}`); | ||||
|     if (stateData) { | ||||
|         return JSON.parse(stateData); | ||||
|     } | ||||
|     return undefined; | ||||
|     return (stateData && JSON.parse(stateData)); | ||||
| } | ||||
| exports.getCacheState = getCacheState; | ||||
| function logWarning(message) { | ||||
|     const warningPrefix = "[warning]"; | ||||
|     core.info(`${warningPrefix}${message}`); | ||||
| function setCacheState(state) { | ||||
|     core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); | ||||
| } | ||||
| exports.logWarning = logWarning; | ||||
| exports.setCacheState = setCacheState; | ||||
| function setCacheHitOutput(isCacheHit) { | ||||
|     core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); | ||||
| } | ||||
| exports.setCacheHitOutput = setCacheHitOutput; | ||||
| function resolvePath(filePath) { | ||||
|     if (filePath[0] === "~") { | ||||
|         const home = os.homedir(); | ||||
| @@ -2301,18 +2209,6 @@ function resolvePath(filePath) { | ||||
|     return path.resolve(filePath); | ||||
| } | ||||
| exports.resolvePath = resolvePath; | ||||
| function getSupportedEvents() { | ||||
|     return [constants_1.Events.Push, constants_1.Events.PullRequest]; | ||||
| } | ||||
| exports.getSupportedEvents = getSupportedEvents; | ||||
| // Currently the cache token is only authorized for push and pull_request events | ||||
| // All other events will fail when reading and saving the cache | ||||
| // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context | ||||
| function isValidEvent() { | ||||
|     const githubEvent = process.env[constants_1.Events.Key] || ""; | ||||
|     return getSupportedEvents().includes(githubEvent); | ||||
| } | ||||
| exports.isValidEvent = isValidEvent; | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
| @@ -2957,6 +2853,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const exec_1 = __webpack_require__(986); | ||||
| const io = __importStar(__webpack_require__(1)); | ||||
| const fs = __importStar(__webpack_require__(747)); | ||||
| const path = __importStar(__webpack_require__(622)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||
| const constants_1 = __webpack_require__(694); | ||||
| @@ -2964,64 +2861,46 @@ const utils = __importStar(__webpack_require__(443)); | ||||
| function run() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             if (!utils.isValidEvent()) { | ||||
|                 utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils | ||||
|                     .getSupportedEvents() | ||||
|                     .join(", ")} events are supported at this time.`); | ||||
|                 return; | ||||
|             } | ||||
|             const state = utils.getCacheState(); | ||||
|             // Inputs are re-evaluted before the post action, so we want the original key used for restore | ||||
|             const primaryKey = core.getState(constants_1.State.CacheKey); | ||||
|             if (!primaryKey) { | ||||
|                 utils.logWarning(`Error retrieving key from state.`); | ||||
|                 core.warning(`Error retrieving key from state.`); | ||||
|                 return; | ||||
|             } | ||||
|             if (utils.isExactKeyMatch(primaryKey, state)) { | ||||
|                 core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); | ||||
|                 return; | ||||
|             } | ||||
|             core.debug("Reserving Cache"); | ||||
|             const cacheId = yield cacheHttpClient.reserveCache(primaryKey); | ||||
|             if (cacheId < 0) { | ||||
|                 core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`); | ||||
|                 return; | ||||
|             } | ||||
|             core.debug(`Cache ID: ${cacheId}`); | ||||
|             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||
|             let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||
|             core.debug(`Cache Path: ${cachePath}`); | ||||
|             const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); | ||||
|             let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|             const args = ["-cz"]; | ||||
|             const IS_WINDOWS = process.platform === "win32"; | ||||
|             const args = IS_WINDOWS | ||||
|                 ? [ | ||||
|                     "-cz", | ||||
|                     "--force-local", | ||||
|                     "-f", | ||||
|                     archivePath.replace(/\\/g, "/"), | ||||
|                     "-C", | ||||
|                     cachePath.replace(/\\/g, "/"), | ||||
|                     "." | ||||
|                 ] | ||||
|                 : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|             if (IS_WINDOWS) { | ||||
|                 args.push("--force-local"); | ||||
|                 archivePath = archivePath.replace(/\\/g, "/"); | ||||
|                 cachePath = cachePath.replace(/\\/g, "/"); | ||||
|             } | ||||
|             args.push(...["-f", archivePath, "-C", cachePath, "."]); | ||||
|             const tarPath = yield io.which("tar", true); | ||||
|             core.debug(`Tar Path: ${tarPath}`); | ||||
|             yield exec_1.exec(`"${tarPath}"`, args); | ||||
|             const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit | ||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|             const fileSizeLimit = 400 * 1024 * 1024; // 400MB | ||||
|             const archiveFileSize = fs.statSync(archivePath).size; | ||||
|             core.debug(`File Size: ${archiveFileSize}`); | ||||
|             if (archiveFileSize > fileSizeLimit) { | ||||
|                 utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024 * 1024))} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`); | ||||
|                 core.warning(`Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.`); | ||||
|                 return; | ||||
|             } | ||||
|             yield exec_1.exec(`md5sum`, [archivePath]); | ||||
|             core.debug("Saving Cache"); | ||||
|             yield cacheHttpClient.saveCache(cacheId, archivePath); | ||||
|             const stream = fs.createReadStream(archivePath); | ||||
|             yield cacheHttpClient.saveCache(stream, primaryKey); | ||||
|         } | ||||
|         catch (error) { | ||||
|             utils.logWarning(error.message); | ||||
|             core.warning(error.message); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| @@ -3039,25 +2918,19 @@ exports.default = run; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| var Inputs; | ||||
| (function (Inputs) { | ||||
|     Inputs["Key"] = "key"; | ||||
|     Inputs["Path"] = "path"; | ||||
|     Inputs["RestoreKeys"] = "restore-keys"; | ||||
|     Inputs.Key = "key"; | ||||
|     Inputs.Path = "path"; | ||||
|     Inputs.RestoreKeys = "restore-keys"; | ||||
| })(Inputs = exports.Inputs || (exports.Inputs = {})); | ||||
| var Outputs; | ||||
| (function (Outputs) { | ||||
|     Outputs["CacheHit"] = "cache-hit"; | ||||
|     Outputs.CacheHit = "cache-hit"; | ||||
| })(Outputs = exports.Outputs || (exports.Outputs = {})); | ||||
| var State; | ||||
| (function (State) { | ||||
|     State["CacheKey"] = "CACHE_KEY"; | ||||
|     State["CacheResult"] = "CACHE_RESULT"; | ||||
|     State.CacheKey = "CACHE_KEY"; | ||||
|     State.CacheResult = "CACHE_RESULT"; | ||||
| })(State = exports.State || (exports.State = {})); | ||||
| var Events; | ||||
| (function (Events) { | ||||
|     Events["Key"] = "GITHUB_EVENT_NAME"; | ||||
|     Events["Push"] = "push"; | ||||
|     Events["PullRequest"] = "pull_request"; | ||||
| })(Events = exports.Events || (exports.Events = {})); | ||||
|  | ||||
|  | ||||
| /***/ }), | ||||
|   | ||||
							
								
								
									
										134
									
								
								examples.md
									
									
									
									
									
								
							
							
						
						
									
										134
									
								
								examples.md
									
									
									
									
									
								
							| @@ -7,8 +7,6 @@ | ||||
| - [Java - Maven](#java---maven) | ||||
| - [Node - npm](#node---npm) | ||||
| - [Node - Yarn](#node---yarn) | ||||
| - [PHP - Composer](#php---composer) | ||||
| - [Python - pip](#python---pip) | ||||
| - [Ruby - Gem](#ruby---gem) | ||||
| - [Rust - Cargo](#rust---cargo) | ||||
| - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) | ||||
| @@ -71,144 +69,26 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa | ||||
|  | ||||
| ## Node - npm | ||||
|  | ||||
| For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. See https://docs.npmjs.com/cli/cache#cache | ||||
|  | ||||
| >Note: It is not recommended to cache `node_modules`, as it can break across Node versions and won't work with `npm ci` | ||||
|  | ||||
| ### macOS and Ubuntu | ||||
|  | ||||
| ```yaml | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ~/.npm | ||||
|     key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-node- | ||||
| ``` | ||||
|  | ||||
| ### Windows | ||||
|  | ||||
| ```yaml | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ~\AppData\Roaming\npm-cache | ||||
|     key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-node- | ||||
| ``` | ||||
|  | ||||
| ### Using multiple systems and `npm config` | ||||
|  | ||||
| ```yaml   | ||||
| - name: Get npm cache directory | ||||
|   id: npm-cache | ||||
|   run: | | ||||
|     echo "::set-output name=dir::$(npm config get cache)" | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ${{ steps.npm-cache.outputs.dir }} | ||||
|     path: node_modules | ||||
|     key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-node- | ||||
| ``` | ||||
|  | ||||
| ## Node - Yarn | ||||
| The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info. | ||||
|  | ||||
| ```yaml | ||||
| - name: Get yarn cache | ||||
|   id: yarn-cache | ||||
|   run: echo "::set-output name=dir::$(yarn cache dir)" | ||||
|  | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ${{ steps.yarn-cache.outputs.dir }} | ||||
|     key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} | ||||
|     path: ~/.cache/yarn | ||||
|     key: ${{ runner.os }}-yarn-${{ hashFiles(format('{0}{1}', github.workspace, '/yarn.lock')) }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-yarn- | ||||
| ``` | ||||
|  | ||||
| ## PHP - Composer | ||||
|  | ||||
| ```yaml   | ||||
| - name: Get Composer Cache Directory | ||||
|   id: composer-cache | ||||
|   run: | | ||||
|     echo "::set-output name=dir::$(composer config cache-files-dir)" | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ${{ steps.composer-cache.outputs.dir }} | ||||
|     key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-composer- | ||||
| ``` | ||||
|  | ||||
| ## Python - pip | ||||
|  | ||||
| For pip, the cache directory will vary by OS. See https://pip.pypa.io/en/stable/reference/pip_install/#caching | ||||
|  | ||||
| Locations: | ||||
|  - Ubuntu: `~/.cache/pip` | ||||
|  - Windows: `~\AppData\Local\pip\Cache` | ||||
|  - macOS: `~/Library/Caches/pip` | ||||
|  | ||||
| ### Simple example | ||||
| ```yaml | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ~/.cache/pip | ||||
|     key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-pip- | ||||
| ``` | ||||
|  | ||||
| Replace `~/.cache/pip` with the correct `path` if not using Ubuntu. | ||||
|  | ||||
| ### Multiple OS's in a workflow | ||||
|  | ||||
| ```yaml | ||||
| - uses: actions/cache@v1 | ||||
|   if: startsWith(runner.os, 'Linux') | ||||
|   with: | ||||
|     path: ~/.cache/pip | ||||
|     key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-pip- | ||||
|  | ||||
| - uses: actions/cache@v1 | ||||
|   if: startsWith(runner.os, 'macOS') | ||||
|   with: | ||||
|     path: ~/Library/Caches/pip | ||||
|     key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-pip- | ||||
|  | ||||
| - uses: actions/cache@v1 | ||||
|   if: startsWith(runner.os, 'Windows') | ||||
|   with: | ||||
|     path: ~\AppData\Local\pip\Cache | ||||
|     key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-pip- | ||||
| ``` | ||||
|  | ||||
| ### Using a script to get cache location | ||||
|  | ||||
| > Note: This uses an internal pip API and may not always work | ||||
| ```yaml | ||||
| - name: Get pip cache | ||||
|    id: pip-cache | ||||
|    run: | | ||||
|      python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" | ||||
|  | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ${{ steps.pip-cache.outputs.dir }} | ||||
|     key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-pip- | ||||
| ``` | ||||
|  | ||||
| ## Ruby - Gem | ||||
|  | ||||
| ```yaml | ||||
| @@ -219,14 +99,6 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu. | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-gem- | ||||
| ``` | ||||
| When dependencies are installed later in the workflow, we must specify the same path for the bundler. | ||||
|  | ||||
| ```yaml | ||||
| - name: Bundle install | ||||
|   run: | | ||||
|     bundle config path vendor/bundle | ||||
|     bundle install --jobs 4 --retry 3 | ||||
| ``` | ||||
|  | ||||
| ## Rust - Cargo | ||||
|  | ||||
|   | ||||
| @@ -1,23 +1,20 @@ | ||||
| require("nock").disableNetConnect(); | ||||
|  | ||||
| module.exports = { | ||||
|     clearMocks: true, | ||||
|     moduleFileExtensions: ["js", "ts"], | ||||
|     testEnvironment: "node", | ||||
|     testMatch: ["**/*.test.ts"], | ||||
|     testRunner: "jest-circus/runner", | ||||
|     transform: { | ||||
|         "^.+\\.ts$": "ts-jest" | ||||
|     }, | ||||
|     verbose: true | ||||
| }; | ||||
|   clearMocks: true, | ||||
|   moduleFileExtensions: ['js', 'ts'], | ||||
|   testEnvironment: 'node', | ||||
|   testMatch: ['**/*.test.ts'], | ||||
|   testRunner: 'jest-circus/runner', | ||||
|   transform: { | ||||
|     '^.+\\.ts$': 'ts-jest' | ||||
|   }, | ||||
|   verbose: true | ||||
| } | ||||
|  | ||||
| const processStdoutWrite = process.stdout.write.bind(process.stdout); | ||||
| // eslint-disable-next-line @typescript-eslint/explicit-function-return-type | ||||
| const processStdoutWrite = process.stdout.write.bind(process.stdout) | ||||
| process.stdout.write = (str, encoding, cb) => { | ||||
|     // Core library will directly call process.stdout.write for commands | ||||
|     // We don't want :: commands to be executed by the runner during tests | ||||
|     if (!str.match(/^::/)) { | ||||
|         return processStdoutWrite(str, encoding, cb); | ||||
|     } | ||||
| }; | ||||
|   // Core library will directly call process.stdout.write for commands | ||||
|   // We don't want :: commands to be executed by the runner during tests | ||||
|   if (!str.match(/^::/)) { | ||||
|     return processStdoutWrite(str, encoding, cb); | ||||
|   } | ||||
| } | ||||
							
								
								
									
										1037
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										1037
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										16
									
								
								package.json
									
									
									
									
									
								
							
							
						
						
									
										16
									
								
								package.json
									
									
									
									
									
								
							| @@ -1,13 +1,12 @@ | ||||
| { | ||||
|   "name": "cache", | ||||
|   "version": "1.1.0", | ||||
|   "version": "1.0.1", | ||||
|   "private": true, | ||||
|   "description": "Cache dependencies and build outputs", | ||||
|   "main": "dist/restore/index.js", | ||||
|   "scripts": { | ||||
|     "build": "tsc", | ||||
|     "test": "tsc --noEmit && jest --coverage", | ||||
|     "lint": "eslint **/*.ts --cache", | ||||
|     "format": "prettier --write **/*.ts", | ||||
|     "format-check": "prettier --check **/*.ts", | ||||
|     "release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/" | ||||
| @@ -32,22 +31,13 @@ | ||||
|   }, | ||||
|   "devDependencies": { | ||||
|     "@types/jest": "^24.0.13", | ||||
|     "@types/nock": "^11.1.0", | ||||
|     "@types/node": "^12.0.4", | ||||
|     "@types/uuid": "^3.4.5", | ||||
|     "@typescript-eslint/eslint-plugin": "^2.7.0", | ||||
|     "@typescript-eslint/parser": "^2.7.0", | ||||
|     "@zeit/ncc": "^0.20.5", | ||||
|     "eslint": "^6.6.0", | ||||
|     "eslint-config-prettier": "^6.5.0", | ||||
|     "eslint-plugin-import": "^2.18.2", | ||||
|     "eslint-plugin-jest": "^23.0.3", | ||||
|     "eslint-plugin-prettier": "^3.1.1", | ||||
|     "jest": "^24.8.0", | ||||
|     "jest-circus": "^24.7.1", | ||||
|     "nock": "^11.7.0", | ||||
|     "prettier": "^1.19.1", | ||||
|     "prettier": "1.18.2", | ||||
|     "ts-jest": "^24.0.2", | ||||
|     "typescript": "^3.7.3" | ||||
|     "typescript": "^3.6.4" | ||||
|   } | ||||
| } | ||||
|   | ||||
| @@ -1,67 +1,27 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as fs from "fs"; | ||||
|  | ||||
| import { BearerCredentialHandler } from "typed-rest-client/Handlers"; | ||||
| import { HttpClient } from "typed-rest-client/HttpClient"; | ||||
| import { IHttpClientResponse } from "typed-rest-client/Interfaces"; | ||||
| import { | ||||
|     IRequestOptions, | ||||
|     RestClient, | ||||
|     IRestResponse | ||||
| } from "typed-rest-client/RestClient"; | ||||
| import { | ||||
|     ArtifactCacheEntry, | ||||
|     CommitCacheRequest, | ||||
|     ReserveCacheRequest, | ||||
|     ReserverCacheResponse | ||||
| } from "./contracts"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
| import { RestClient, IRequestOptions } from "typed-rest-client/RestClient"; | ||||
|  | ||||
| function isSuccessStatusCode(statusCode: number): boolean { | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
| function getCacheApiUrl(): string { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     const baseUrl: string = ( | ||||
|         process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "" | ||||
|     ).replace("pipelines", "artifactcache"); | ||||
|     if (!baseUrl) { | ||||
|         throw new Error( | ||||
|             "Cache Service Url not found, unable to restore cache." | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     core.debug(`Cache Url: ${baseUrl}`); | ||||
|     return `${baseUrl}_apis/artifactcache/`; | ||||
| } | ||||
|  | ||||
| function createAcceptHeader(type: string, apiVersion: string): string { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
|  | ||||
| function getRequestOptions(): IRequestOptions { | ||||
|     const requestOptions: IRequestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||
|     }; | ||||
|  | ||||
|     return requestOptions; | ||||
| } | ||||
|  | ||||
| function createRestClient(): RestClient { | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||
|  | ||||
|     return new RestClient("actions/cache", getCacheApiUrl(), [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
| } | ||||
| import { ArtifactCacheEntry } from "./contracts"; | ||||
|  | ||||
| export async function getCacheEntry( | ||||
|     keys: string[] | ||||
| ): Promise<ArtifactCacheEntry | null> { | ||||
|     const restClient = createRestClient(); | ||||
|     const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|     const cacheUrl = getCacheUrl(); | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||
|  | ||||
|     const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent( | ||||
|         keys.join(",") | ||||
|     )}`; | ||||
|  | ||||
|     const restClient = new RestClient("actions/cache", cacheUrl, [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
|  | ||||
|     const response = await restClient.get<ArtifactCacheEntry>( | ||||
|         resource, | ||||
| @@ -70,21 +30,29 @@ export async function getCacheEntry( | ||||
|     if (response.statusCode === 204) { | ||||
|         return null; | ||||
|     } | ||||
|     if (!isSuccessStatusCode(response.statusCode)) { | ||||
|     if (response.statusCode !== 200) { | ||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|     } | ||||
|     const cacheResult = response.result; | ||||
|     const cacheDownloadUrl = cacheResult?.archiveLocation; | ||||
|     if (!cacheDownloadUrl) { | ||||
|         throw new Error("Cache not found."); | ||||
|     } | ||||
|     core.setSecret(cacheDownloadUrl); | ||||
|     core.debug(`Cache Result:`); | ||||
|     core.debug(JSON.stringify(cacheResult)); | ||||
|     if (!cacheResult || !cacheResult.archiveLocation) { | ||||
|         throw new Error("Cache not found."); | ||||
|     } | ||||
|  | ||||
|     return cacheResult; | ||||
| } | ||||
|  | ||||
| export async function downloadCache( | ||||
|     cacheEntry: ArtifactCacheEntry, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const stream = fs.createWriteStream(archivePath); | ||||
|     const httpClient = new HttpClient("actions/cache"); | ||||
|     const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!); | ||||
|     await pipeResponseToStream(downloadResponse, stream); | ||||
| } | ||||
|  | ||||
| async function pipeResponseToStream( | ||||
|     response: IHttpClientResponse, | ||||
|     stream: NodeJS.WritableStream | ||||
| @@ -96,167 +64,61 @@ async function pipeResponseToStream( | ||||
|     }); | ||||
| } | ||||
|  | ||||
| export async function downloadCache( | ||||
|     archiveLocation: string, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const stream = fs.createWriteStream(archivePath); | ||||
|     const httpClient = new HttpClient("actions/cache"); | ||||
|     const downloadResponse = await httpClient.get(archiveLocation); | ||||
|     await pipeResponseToStream(downloadResponse, stream); | ||||
| } | ||||
| export async function saveCache(stream: NodeJS.ReadableStream, key: string) { | ||||
|     const cacheUrl = getCacheUrl(); | ||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||
|  | ||||
| // Reserve Cache | ||||
| export async function reserveCache(key: string): Promise<number> { | ||||
|     const restClient = createRestClient(); | ||||
|     const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; | ||||
|     const postUrl = cacheUrl + resource; | ||||
|  | ||||
|     const reserveCacheRequest: ReserveCacheRequest = { | ||||
|         key | ||||
|     }; | ||||
|     const response = await restClient.create<ReserverCacheResponse>( | ||||
|         "caches", | ||||
|         reserveCacheRequest, | ||||
|         getRequestOptions() | ||||
|     ); | ||||
|     const restClient = new RestClient("actions/cache", undefined, [ | ||||
|         bearerCredentialHandler | ||||
|     ]); | ||||
|  | ||||
|     return response?.result?.cacheId ?? -1; | ||||
| } | ||||
|  | ||||
| function getContentRange(start: number, end: number): string { | ||||
|     // Format: `bytes start-end/filesize | ||||
|     // start and end are inclusive | ||||
|     // filesize can be * | ||||
|     // For a 200 byte chunk starting at byte 0: | ||||
|     // Content-Range: bytes 0-199/* | ||||
|     return `bytes ${start}-${end}/*`; | ||||
| } | ||||
|  | ||||
| async function uploadChunk( | ||||
|     restClient: RestClient, | ||||
|     resourceUrl: string, | ||||
|     data: NodeJS.ReadableStream, | ||||
|     start: number, | ||||
|     end: number | ||||
| ): Promise<IRestResponse<void>> { | ||||
|     core.debug( | ||||
|         `Uploading chunk of size ${end - | ||||
|             start + | ||||
|             1} bytes at offset ${start} with content range: ${getContentRange( | ||||
|             start, | ||||
|             end | ||||
|         )}` | ||||
|     ); | ||||
|     const requestOptions = getRequestOptions(); | ||||
|     requestOptions.additionalHeaders = { | ||||
|         "Content-Type": "application/octet-stream", | ||||
|         "Content-Range": getContentRange(start, end) | ||||
|         "Content-Type": "application/octet-stream" | ||||
|     }; | ||||
|  | ||||
|     return await restClient.uploadStream<void>( | ||||
|         "PATCH", | ||||
|         resourceUrl, | ||||
|         data, | ||||
|     const response = await restClient.uploadStream<void>( | ||||
|         "POST", | ||||
|         postUrl, | ||||
|         stream, | ||||
|         requestOptions | ||||
|     ); | ||||
| } | ||||
|  | ||||
| async function uploadFile( | ||||
|     restClient: RestClient, | ||||
|     cacheId: number, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     // Upload Chunks | ||||
|     const fileSize = fs.statSync(archivePath).size; | ||||
|     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||
|     const responses: IRestResponse<void>[] = []; | ||||
|     const fd = fs.openSync(archivePath, "r"); | ||||
|  | ||||
|     const concurrency = 4; // # of HTTP requests in parallel | ||||
|     const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks | ||||
|     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||
|  | ||||
|     const parallelUploads = [...new Array(concurrency).keys()]; | ||||
|     core.debug("Awaiting all uploads"); | ||||
|     let offset = 0; | ||||
|     await Promise.all( | ||||
|         parallelUploads.map(async () => { | ||||
|             while (offset < fileSize) { | ||||
|                 const chunkSize = | ||||
|                     offset + MAX_CHUNK_SIZE > fileSize | ||||
|                         ? fileSize - offset | ||||
|                         : MAX_CHUNK_SIZE; | ||||
|                 const start = offset; | ||||
|                 const end = offset + chunkSize - 1; | ||||
|                 offset += MAX_CHUNK_SIZE; | ||||
|                 const chunk = fs.createReadStream(archivePath, { | ||||
|                     fd, | ||||
|                     start, | ||||
|                     end, | ||||
|                     autoClose: false | ||||
|                 }); | ||||
|                 responses.push( | ||||
|                     await uploadChunk( | ||||
|                         restClient, | ||||
|                         resourceUrl, | ||||
|                         chunk, | ||||
|                         start, | ||||
|                         end | ||||
|                     ) | ||||
|                 ); | ||||
|             } | ||||
|         }) | ||||
|     ); | ||||
|  | ||||
|     fs.closeSync(fd); | ||||
|  | ||||
|     const failedResponse = responses.find( | ||||
|         x => !isSuccessStatusCode(x.statusCode) | ||||
|     ); | ||||
|     if (failedResponse) { | ||||
|         throw new Error( | ||||
|             `Cache service responded with ${failedResponse.statusCode} during chunk upload.` | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     return; | ||||
| } | ||||
|  | ||||
| async function commitCache( | ||||
|     restClient: RestClient, | ||||
|     cacheId: number, | ||||
|     filesize: number | ||||
| ): Promise<IRestResponse<void>> { | ||||
|     const requestOptions = getRequestOptions(); | ||||
|     const commitCacheRequest: CommitCacheRequest = { size: filesize }; | ||||
|     return await restClient.create( | ||||
|         `caches/${cacheId.toString()}`, | ||||
|         commitCacheRequest, | ||||
|         requestOptions | ||||
|     ); | ||||
| } | ||||
|  | ||||
| export async function saveCache( | ||||
|     cacheId: number, | ||||
|     archivePath: string | ||||
| ): Promise<void> { | ||||
|     const restClient = createRestClient(); | ||||
|  | ||||
|     core.debug("Upload cache"); | ||||
|     await uploadFile(restClient, cacheId, archivePath); | ||||
|  | ||||
|     // Commit Cache | ||||
|     core.debug("Commiting cache"); | ||||
|     const cacheSize = utils.getArchiveFileSize(archivePath); | ||||
|     const commitCacheResponse = await commitCache( | ||||
|         restClient, | ||||
|         cacheId, | ||||
|         cacheSize | ||||
|     ); | ||||
|     if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
|         throw new Error( | ||||
|             `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` | ||||
|         ); | ||||
|     if (response.statusCode !== 200) { | ||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||
|     } | ||||
|  | ||||
|     core.info("Cache saved successfully"); | ||||
| } | ||||
|  | ||||
| function getRequestOptions(): IRequestOptions { | ||||
|     const requestOptions: IRequestOptions = { | ||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") | ||||
|     }; | ||||
|  | ||||
|     return requestOptions; | ||||
| } | ||||
|  | ||||
| function createAcceptHeader(type: string, apiVersion: string): string { | ||||
|     return `${type};api-version=${apiVersion}`; | ||||
| } | ||||
|  | ||||
| function getCacheUrl(): string { | ||||
|     // Ideally we just use ACTIONS_CACHE_URL | ||||
|     let cacheUrl: string = ( | ||||
|         process.env["ACTIONS_CACHE_URL"] || | ||||
|         process.env["ACTIONS_RUNTIME_URL"] || | ||||
|         "" | ||||
|     ).replace("pipelines", "artifactcache"); | ||||
|     if (!cacheUrl) { | ||||
|         throw new Error( | ||||
|             "Cache Service Url not found, unable to restore cache." | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     core.debug(`Cache Url: ${cacheUrl}`); | ||||
|     return cacheUrl; | ||||
| } | ||||
|   | ||||
| @@ -1,20 +1,14 @@ | ||||
| export enum Inputs { | ||||
|     Key = "key", | ||||
|     Path = "path", | ||||
|     RestoreKeys = "restore-keys" | ||||
| export namespace Inputs { | ||||
|     export const Key = "key"; | ||||
|     export const Path = "path"; | ||||
|     export const RestoreKeys = "restore-keys"; | ||||
| } | ||||
|  | ||||
| export enum Outputs { | ||||
|     CacheHit = "cache-hit" | ||||
| export namespace Outputs { | ||||
|     export const CacheHit = "cache-hit"; | ||||
| } | ||||
|  | ||||
| export enum State { | ||||
|     CacheKey = "CACHE_KEY", | ||||
|     CacheResult = "CACHE_RESULT" | ||||
| } | ||||
|  | ||||
| export enum Events { | ||||
|     Key = "GITHUB_EVENT_NAME", | ||||
|     Push = "push", | ||||
|     PullRequest = "pull_request" | ||||
| export namespace State { | ||||
|     export const CacheKey = "CACHE_KEY"; | ||||
|     export const CacheResult = "CACHE_RESULT"; | ||||
| } | ||||
|   | ||||
							
								
								
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							| @@ -4,16 +4,3 @@ export interface ArtifactCacheEntry { | ||||
|     creationTime?: string; | ||||
|     archiveLocation?: string; | ||||
| } | ||||
|  | ||||
| export interface CommitCacheRequest { | ||||
|     size: number; | ||||
| } | ||||
|  | ||||
| export interface ReserveCacheRequest { | ||||
|     key: string; | ||||
|     version?: string; | ||||
| } | ||||
|  | ||||
| export interface ReserverCacheResponse { | ||||
|     cacheId: number; | ||||
| } | ||||
|   | ||||
| @@ -1,26 +1,18 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import { exec } from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
|  | ||||
| import * as fs from "fs"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { Inputs, State } from "./constants"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
|  | ||||
| async function run(): Promise<void> { | ||||
| async function run() { | ||||
|     try { | ||||
|         // Validate inputs, this can cause task failure | ||||
|         if (!utils.isValidEvent()) { | ||||
|             utils.logWarning( | ||||
|                 `Event Validation Error: The event type ${ | ||||
|                     process.env[Events.Key] | ||||
|                 } is not supported. Only ${utils | ||||
|                     .getSupportedEvents() | ||||
|                     .join(", ")} events are supported at this time.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         const cachePath = utils.resolvePath( | ||||
|         let cachePath = utils.resolvePath( | ||||
|             core.getInput(Inputs.Path, { required: true }) | ||||
|         ); | ||||
|         core.debug(`Cache Path: ${cachePath}`); | ||||
| @@ -61,14 +53,14 @@ async function run(): Promise<void> { | ||||
|  | ||||
|         try { | ||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); | ||||
|             if (!cacheEntry || !cacheEntry?.archiveLocation) { | ||||
|             if (!cacheEntry) { | ||||
|                 core.info( | ||||
|                     `Cache not found for input keys: ${keys.join(", ")}.` | ||||
|                 ); | ||||
|                 return; | ||||
|             } | ||||
|  | ||||
|             const archivePath = path.join( | ||||
|             let archivePath = path.join( | ||||
|                 await utils.createTempDirectory(), | ||||
|                 "cache.tgz" | ||||
|             ); | ||||
| @@ -78,38 +70,28 @@ async function run(): Promise<void> { | ||||
|             utils.setCacheState(cacheEntry); | ||||
|  | ||||
|             // Download the cache from the cache entry | ||||
|             await cacheHttpClient.downloadCache( | ||||
|                 cacheEntry?.archiveLocation, | ||||
|                 archivePath | ||||
|             ); | ||||
|             await cacheHttpClient.downloadCache(cacheEntry, archivePath); | ||||
|  | ||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|             core.info( | ||||
|                 `Cache Size: ~${Math.round( | ||||
|                     archiveFileSize / (1024 * 1024) | ||||
|                 )} MB (${archiveFileSize} B)` | ||||
|             ); | ||||
|  | ||||
|             // Create directory to extract tar into | ||||
|             await io.mkdirP(cachePath); | ||||
|             io.mkdirP(cachePath); | ||||
|  | ||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|             const args = ["-xz"]; | ||||
|  | ||||
|             const IS_WINDOWS = process.platform === "win32"; | ||||
|             const args = IS_WINDOWS | ||||
|                 ? [ | ||||
|                       "-xz", | ||||
|                       "--force-local", | ||||
|                       "-f", | ||||
|                       archivePath.replace(/\\/g, "/"), | ||||
|                       "-C", | ||||
|                       cachePath.replace(/\\/g, "/") | ||||
|                   ] | ||||
|                 : ["-xz", "-f", archivePath, "-C", cachePath]; | ||||
|             if (IS_WINDOWS) { | ||||
|                 args.push("--force-local"); | ||||
|                 archivePath = archivePath.replace(/\\/g, "/"); | ||||
|                 cachePath = cachePath.replace(/\\/g, "/"); | ||||
|             } | ||||
|             args.push(...["-f", archivePath, "-C", cachePath]); | ||||
|  | ||||
|             const tarPath = await io.which("tar", true); | ||||
|             core.debug(`Tar Path: ${tarPath}`); | ||||
|  | ||||
|             const archiveFileSize = fs.statSync(archivePath).size; | ||||
|             core.debug(`File Size: ${archiveFileSize}`); | ||||
|  | ||||
|             await exec(`"${tarPath}"`, args); | ||||
|  | ||||
|             const isExactKeyMatch = utils.isExactKeyMatch( | ||||
| @@ -122,7 +104,7 @@ async function run(): Promise<void> { | ||||
|                 `Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}` | ||||
|             ); | ||||
|         } catch (error) { | ||||
|             utils.logWarning(error.message); | ||||
|             core.warning(error.message); | ||||
|             utils.setCacheHitOutput(false); | ||||
|         } | ||||
|     } catch (error) { | ||||
|   | ||||
							
								
								
									
										68
									
								
								src/save.ts
									
									
									
									
									
								
							
							
						
						
									
										68
									
								
								src/save.ts
									
									
									
									
									
								
							| @@ -1,30 +1,22 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import { exec } from "@actions/exec"; | ||||
|  | ||||
| import * as io from "@actions/io"; | ||||
| import * as fs from "fs"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { Inputs, State } from "./constants"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
|  | ||||
| async function run(): Promise<void> { | ||||
| async function run() { | ||||
|     try { | ||||
|         if (!utils.isValidEvent()) { | ||||
|             utils.logWarning( | ||||
|                 `Event Validation Error: The event type ${ | ||||
|                     process.env[Events.Key] | ||||
|                 } is not supported. Only ${utils | ||||
|                     .getSupportedEvents() | ||||
|                     .join(", ")} events are supported at this time.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         const state = utils.getCacheState(); | ||||
|  | ||||
|         // Inputs are re-evaluted before the post action, so we want the original key used for restore | ||||
|         const primaryKey = core.getState(State.CacheKey); | ||||
|         if (!primaryKey) { | ||||
|             utils.logWarning(`Error retrieving key from state.`); | ||||
|             core.warning(`Error retrieving key from state.`); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
| @@ -35,21 +27,12 @@ async function run(): Promise<void> { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         core.debug("Reserving Cache"); | ||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey); | ||||
|         if (cacheId < 0) { | ||||
|             core.info( | ||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|         core.debug(`Cache ID: ${cacheId}`); | ||||
|         const cachePath = utils.resolvePath( | ||||
|         let cachePath = utils.resolvePath( | ||||
|             core.getInput(Inputs.Path, { required: true }) | ||||
|         ); | ||||
|         core.debug(`Cache Path: ${cachePath}`); | ||||
|  | ||||
|         const archivePath = path.join( | ||||
|         let archivePath = path.join( | ||||
|             await utils.createTempDirectory(), | ||||
|             "cache.tgz" | ||||
|         ); | ||||
| @@ -57,39 +40,34 @@ async function run(): Promise<void> { | ||||
|  | ||||
|         // http://man7.org/linux/man-pages/man1/tar.1.html | ||||
|         // tar [-options] <name of the tar archive> [files or directories which to add into archive] | ||||
|         const args = ["-cz"]; | ||||
|         const IS_WINDOWS = process.platform === "win32"; | ||||
|         const args = IS_WINDOWS | ||||
|             ? [ | ||||
|                   "-cz", | ||||
|                   "--force-local", | ||||
|                   "-f", | ||||
|                   archivePath.replace(/\\/g, "/"), | ||||
|                   "-C", | ||||
|                   cachePath.replace(/\\/g, "/"), | ||||
|                   "." | ||||
|               ] | ||||
|             : ["-cz", "-f", archivePath, "-C", cachePath, "."]; | ||||
|         if (IS_WINDOWS) { | ||||
|             args.push("--force-local"); | ||||
|             archivePath = archivePath.replace(/\\/g, "/"); | ||||
|             cachePath = cachePath.replace(/\\/g, "/"); | ||||
|         } | ||||
|  | ||||
|         args.push(...["-f", archivePath, "-C", cachePath, "."]); | ||||
|  | ||||
|         const tarPath = await io.which("tar", true); | ||||
|         core.debug(`Tar Path: ${tarPath}`); | ||||
|         await exec(`"${tarPath}"`, args); | ||||
|  | ||||
|         const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit | ||||
|         const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|         const fileSizeLimit = 400 * 1024 * 1024; // 400MB | ||||
|         const archiveFileSize = fs.statSync(archivePath).size; | ||||
|         core.debug(`File Size: ${archiveFileSize}`); | ||||
|         if (archiveFileSize > fileSizeLimit) { | ||||
|             utils.logWarning( | ||||
|                 `Cache size of ~${Math.round( | ||||
|                     archiveFileSize / (1024 * 1024 * 1024) | ||||
|                 )} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` | ||||
|             core.warning( | ||||
|                 `Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.` | ||||
|             ); | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         core.debug("Saving Cache"); | ||||
|         await cacheHttpClient.saveCache(cacheId, archivePath); | ||||
|         const stream = fs.createReadStream(archivePath); | ||||
|         await cacheHttpClient.saveCache(stream, primaryKey); | ||||
|     } catch (error) { | ||||
|         utils.logWarning(error.message); | ||||
|         core.warning(error.message); | ||||
|     } | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,10 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as fs from "fs"; | ||||
| import * as os from "os"; | ||||
| import * as path from "path"; | ||||
| import * as uuidV4 from "uuid/v4"; | ||||
|  | ||||
| import { Events, Outputs, State } from "../constants"; | ||||
| import { Outputs, State } from "../constants"; | ||||
| import { ArtifactCacheEntry } from "../contracts"; | ||||
|  | ||||
| // From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 | ||||
| @@ -33,10 +32,6 @@ export async function createTempDirectory(): Promise<string> { | ||||
|     return dest; | ||||
| } | ||||
|  | ||||
| export function getArchiveFileSize(path: string): number { | ||||
|     return fs.statSync(path).size; | ||||
| } | ||||
|  | ||||
| export function isExactKeyMatch( | ||||
|     key: string, | ||||
|     cacheResult?: ArtifactCacheEntry | ||||
| @@ -50,18 +45,10 @@ export function isExactKeyMatch( | ||||
|     ); | ||||
| } | ||||
|  | ||||
| export function setCacheState(state: ArtifactCacheEntry): void { | ||||
|     core.saveState(State.CacheResult, JSON.stringify(state)); | ||||
| } | ||||
|  | ||||
| export function setCacheHitOutput(isCacheHit: boolean): void { | ||||
|     core.setOutput(Outputs.CacheHit, isCacheHit.toString()); | ||||
| } | ||||
|  | ||||
| export function setOutputAndState( | ||||
|     key: string, | ||||
|     cacheResult?: ArtifactCacheEntry | ||||
| ): void { | ||||
| ) { | ||||
|     setCacheHitOutput(isExactKeyMatch(key, cacheResult)); | ||||
|     // Store the cache result if it exists | ||||
|     cacheResult && setCacheState(cacheResult); | ||||
| @@ -70,16 +57,15 @@ export function setOutputAndState( | ||||
| export function getCacheState(): ArtifactCacheEntry | undefined { | ||||
|     const stateData = core.getState(State.CacheResult); | ||||
|     core.debug(`State: ${stateData}`); | ||||
|     if (stateData) { | ||||
|         return JSON.parse(stateData) as ArtifactCacheEntry; | ||||
|     } | ||||
|  | ||||
|     return undefined; | ||||
|     return (stateData && JSON.parse(stateData)) as ArtifactCacheEntry; | ||||
| } | ||||
|  | ||||
| export function logWarning(message: string): void { | ||||
|     const warningPrefix = "[warning]"; | ||||
|     core.info(`${warningPrefix}${message}`); | ||||
| export function setCacheState(state: ArtifactCacheEntry) { | ||||
|     core.saveState(State.CacheResult, JSON.stringify(state)); | ||||
| } | ||||
|  | ||||
| export function setCacheHitOutput(isCacheHit: boolean) { | ||||
|     core.setOutput(Outputs.CacheHit, isCacheHit.toString()); | ||||
| } | ||||
|  | ||||
| export function resolvePath(filePath: string): string { | ||||
| @@ -93,15 +79,3 @@ export function resolvePath(filePath: string): string { | ||||
|  | ||||
|     return path.resolve(filePath); | ||||
| } | ||||
|  | ||||
| export function getSupportedEvents(): string[] { | ||||
|     return [Events.Push, Events.PullRequest]; | ||||
| } | ||||
|  | ||||
| // Currently the cache token is only authorized for push and pull_request events | ||||
| // All other events will fail when reading and saving the cache | ||||
| // See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context | ||||
| export function isValidEvent(): boolean { | ||||
|     const githubEvent = process.env[Events.Key] || ""; | ||||
|     return getSupportedEvents().includes(githubEvent); | ||||
| } | ||||
|   | ||||
| @@ -1,29 +1,7 @@ | ||||
| import { Inputs } from "../constants"; | ||||
|  | ||||
| // See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67 | ||||
| function getInputName(name: string): string { | ||||
|     return `INPUT_${name.replace(/ /g, "_").toUpperCase()}`; | ||||
| } | ||||
|  | ||||
| export function setInput(name: string, value: string): void { | ||||
| export function setInput(name: string, value: string) { | ||||
|     process.env[getInputName(name)] = value; | ||||
| } | ||||
|  | ||||
| interface CacheInput { | ||||
|     path: string; | ||||
|     key: string; | ||||
|     restoreKeys?: string[]; | ||||
| } | ||||
|  | ||||
| export function setInputs(input: CacheInput): void { | ||||
|     setInput(Inputs.Path, input.path); | ||||
|     setInput(Inputs.Key, input.key); | ||||
|     input.restoreKeys && | ||||
|         setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n")); | ||||
| } | ||||
|  | ||||
| export function clearInputs(): void { | ||||
|     delete process.env[getInputName(Inputs.Path)]; | ||||
|     delete process.env[getInputName(Inputs.Key)]; | ||||
|     delete process.env[getInputName(Inputs.RestoreKeys)]; | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user