mirror of
				https://gitea.com/actions/cache.git
				synced 2025-10-31 07:47:07 +00:00 
			
		
		
		
	Compare commits
	
		
			43 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | a505c2e7a6 | ||
|   | c262ac0154 | ||
|   | 10a14413e7 | ||
|   | cf4f44db70 | ||
|   | 4c4974aff1 | ||
|   | 1da52de10f | ||
|   | b45d91cc4b | ||
|   | a631fadf14 | ||
|   | e223b0a12d | ||
|   | decbafc350 | ||
|   | 3854a40aee | ||
|   | 0188dffc5a | ||
|   | 002d3a77f4 | ||
|   | 4809f4ada4 | ||
|   | 3d01b4eb53 | ||
|   | cffae9552b | ||
|   | 95c1798369 | ||
|   | 639f9d8b81 | ||
|   | d9fe1b81f9 | ||
|   | 92ae3b63f8 | ||
|   | 84b3b283f0 | ||
|   | 8d14a2150b | ||
|   | c0584c42d1 | ||
|   | bb828da54c | ||
|   | 7e7aef2963 | ||
|   | b7d83b4095 | ||
|   | 50a2fdee6f | ||
|   | f0cbadd748 | ||
|   | 4657a5f525 | ||
|   | fb50aa45ec | ||
|   | 31508256ff | ||
|   | bc821d0c12 | ||
|   | bde557aefd | ||
|   | 4b0709a0d5 | ||
|   | ecf6eea708 | ||
|   | eb10706a9d | ||
|   | 30524a6fbd | ||
|   | b034b26a44 | ||
|   | e1ed41a9c9 | ||
|   | 5f4d4d4555 | ||
|   | 5d3ad75a2b | ||
|   | d8c5e69fe2 | ||
|   | f66a56e59e | 
							
								
								
									
										29
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										29
									
								
								README.md
									
									
									
									
									
								
							| @@ -2,7 +2,7 @@ | |||||||
|  |  | ||||||
| This GitHub Action allows caching dependencies and build outputs to improve workflow execution time. | This GitHub Action allows caching dependencies and build outputs to improve workflow execution time. | ||||||
|  |  | ||||||
| <a href="https://github.com/actions/cache"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a> | <a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a> | ||||||
|  |  | ||||||
| ## Documentation | ## Documentation | ||||||
|  |  | ||||||
| @@ -35,7 +35,7 @@ on: push | |||||||
| jobs: | jobs: | ||||||
|   build: |   build: | ||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|      |  | ||||||
|     steps: |     steps: | ||||||
|     - uses: actions/checkout@v1 |     - uses: actions/checkout@v1 | ||||||
|  |  | ||||||
| @@ -49,14 +49,31 @@ jobs: | |||||||
|     - name: Generate Prime Numbers |     - name: Generate Prime Numbers | ||||||
|       if: steps.cache-primes.outputs.cache-hit != 'true' |       if: steps.cache-primes.outputs.cache-hit != 'true' | ||||||
|       run: /generate-primes.sh -d prime-numbers |       run: /generate-primes.sh -d prime-numbers | ||||||
|      |  | ||||||
|     - name: Use Prime Numbers |     - name: Use Prime Numbers | ||||||
|       run: /primes.sh -d prime-numbers |       run: /primes.sh -d prime-numbers | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ## Ecosystem Examples | ## Implementation Examples | ||||||
|  |  | ||||||
|  | Every programming language and framework has its own way of caching. | ||||||
|  |  | ||||||
|  | See [Examples](examples.md) for a list of `actions/cache` implementations for use with: | ||||||
|  |  | ||||||
|  | - [C# - Nuget](./examples.md#c---nuget) | ||||||
|  | - [Elixir - Mix](./examples.md#elixir---mix) | ||||||
|  | - [Go - Modules](./examples.md#go---modules) | ||||||
|  | - [Java - Gradle](./examples.md#java---gradle) | ||||||
|  | - [Java - Maven](./examples.md#java---maven) | ||||||
|  | - [Node - npm](./examples.md#node---npm) | ||||||
|  | - [Node - Yarn](./examples.md#node---yarn) | ||||||
|  | - [PHP - Composer](./examples.md#php---composer) | ||||||
|  | - [Python - pip](./examples.md#python---pip) | ||||||
|  | - [Ruby - Gem](./examples.md#ruby---gem) | ||||||
|  | - [Rust - Cargo](./examples.md#rust---cargo) | ||||||
|  | - [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage) | ||||||
|  | - [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods) | ||||||
|  |  | ||||||
| See [Examples](examples.md) |  | ||||||
|  |  | ||||||
| ## Cache Limits | ## Cache Limits | ||||||
|  |  | ||||||
| @@ -76,7 +93,7 @@ steps: | |||||||
|     with: |     with: | ||||||
|       path: path/to/dependencies |       path: path/to/dependencies | ||||||
|       key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} |       key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} | ||||||
|    |  | ||||||
|   - name: Install Dependencies |   - name: Install Dependencies | ||||||
|     if: steps.cache.outputs.cache-hit != 'true' |     if: steps.cache.outputs.cache-hit != 'true' | ||||||
|     run: /install.sh |     run: /install.sh | ||||||
|   | |||||||
| @@ -162,6 +162,16 @@ test("getCacheState with valid state", () => { | |||||||
|     expect(getStateMock).toHaveBeenCalledTimes(1); |     expect(getStateMock).toHaveBeenCalledTimes(1); | ||||||
| }); | }); | ||||||
|  |  | ||||||
|  | test("logWarning logs a message with a warning prefix", () => { | ||||||
|  |     const message = "A warning occurred."; | ||||||
|  |  | ||||||
|  |     const infoMock = jest.spyOn(core, "info"); | ||||||
|  |  | ||||||
|  |     actionUtils.logWarning(message); | ||||||
|  |  | ||||||
|  |     expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`); | ||||||
|  | }); | ||||||
|  |  | ||||||
| test("isValidEvent returns false for unknown event", () => { | test("isValidEvent returns false for unknown event", () => { | ||||||
|     const event = "foo"; |     const event = "foo"; | ||||||
|     process.env[Events.Key] = event; |     process.env[Events.Key] = event; | ||||||
|   | |||||||
| @@ -1,18 +1,16 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as exec from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||||
| import { Events, Inputs } from "../src/constants"; | import { Events, Inputs } from "../src/constants"; | ||||||
| import { ArtifactCacheEntry } from "../src/contracts"; | import { ArtifactCacheEntry } from "../src/contracts"; | ||||||
| import run from "../src/restore"; | import run from "../src/restore"; | ||||||
|  | import * as tar from "../src/tar"; | ||||||
| import * as actionUtils from "../src/utils/actionUtils"; | import * as actionUtils from "../src/utils/actionUtils"; | ||||||
| import * as testUtils from "../src/utils/testUtils"; | import * as testUtils from "../src/utils/testUtils"; | ||||||
|  |  | ||||||
| jest.mock("@actions/exec"); |  | ||||||
| jest.mock("@actions/io"); |  | ||||||
| jest.mock("../src/utils/actionUtils"); |  | ||||||
| jest.mock("../src/cacheHttpClient"); | jest.mock("../src/cacheHttpClient"); | ||||||
|  | jest.mock("../src/tar"); | ||||||
|  | jest.mock("../src/utils/actionUtils"); | ||||||
|  |  | ||||||
| beforeAll(() => { | beforeAll(() => { | ||||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { |     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||||
| @@ -35,10 +33,6 @@ beforeAll(() => { | |||||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); |         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||||
|         return actualUtils.getSupportedEvents(); |         return actualUtils.getSupportedEvents(); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(io, "which").mockImplementation(tool => { |  | ||||||
|         return Promise.resolve(tool); |  | ||||||
|     }); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| beforeEach(() => { | beforeEach(() => { | ||||||
| @@ -50,14 +44,16 @@ afterEach(() => { | |||||||
|     delete process.env[Events.Key]; |     delete process.env[Events.Key]; | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("restore with invalid event", async () => { | test("restore with invalid event outputs warning", async () => { | ||||||
|  |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     const invalidEvent = "commit_comment"; |     const invalidEvent = "commit_comment"; | ||||||
|     process.env[Events.Key] = invalidEvent; |     process.env[Events.Key] = invalidEvent; | ||||||
|     await run(); |     await run(); | ||||||
|     expect(failedMock).toHaveBeenCalledWith( |     expect(logWarningMock).toHaveBeenCalledWith( | ||||||
|         `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` |         `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` | ||||||
|     ); |     ); | ||||||
|  |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("restore with no path should fail", async () => { | test("restore with no path should fail", async () => { | ||||||
| @@ -126,7 +122,6 @@ test("restore with no cache found", async () => { | |||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const infoMock = jest.spyOn(core, "info"); |     const infoMock = jest.spyOn(core, "info"); | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |  | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     const stateMock = jest.spyOn(core, "saveState"); |     const stateMock = jest.spyOn(core, "saveState"); | ||||||
|  |  | ||||||
| @@ -138,7 +133,6 @@ test("restore with no cache found", async () => { | |||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); |     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(0); |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith( |     expect(infoMock).toHaveBeenCalledWith( | ||||||
| @@ -153,7 +147,7 @@ test("restore with server error should fail", async () => { | |||||||
|         key |         key | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     const stateMock = jest.spyOn(core, "saveState"); |     const stateMock = jest.spyOn(core, "saveState"); | ||||||
|  |  | ||||||
| @@ -168,8 +162,8 @@ test("restore with server error should fail", async () => { | |||||||
|  |  | ||||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); |     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||||
|  |  | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(warningMock).toHaveBeenCalledWith("HTTP Error Occurred"); |     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||||
| @@ -187,7 +181,6 @@ test("restore with restore keys and no cache found", async () => { | |||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const infoMock = jest.spyOn(core, "info"); |     const infoMock = jest.spyOn(core, "info"); | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |  | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     const stateMock = jest.spyOn(core, "saveState"); |     const stateMock = jest.spyOn(core, "saveState"); | ||||||
|  |  | ||||||
| @@ -199,7 +192,6 @@ test("restore with restore keys and no cache found", async () => { | |||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); |     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(0); |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith( |     expect(infoMock).toHaveBeenCalledWith( | ||||||
| @@ -216,7 +208,6 @@ test("restore with cache found", async () => { | |||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const infoMock = jest.spyOn(core, "info"); |     const infoMock = jest.spyOn(core, "info"); | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |  | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     const stateMock = jest.spyOn(core, "saveState"); |     const stateMock = jest.spyOn(core, "saveState"); | ||||||
|  |  | ||||||
| @@ -248,8 +239,7 @@ test("restore with cache found", async () => { | |||||||
|         .spyOn(actionUtils, "getArchiveFileSize") |         .spyOn(actionUtils, "getArchiveFileSize") | ||||||
|         .mockReturnValue(fileSize); |         .mockReturnValue(fileSize); | ||||||
|  |  | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
| @@ -258,30 +248,19 @@ test("restore with cache found", async () => { | |||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); |     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); |     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||||
|  |         cacheEntry.archiveLocation, | ||||||
|  |         archivePath | ||||||
|  |     ); | ||||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); |     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); |  | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-xz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/") |  | ||||||
|           ] |  | ||||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); |     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(0); |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| @@ -296,7 +275,6 @@ test("restore with a pull request event and cache found", async () => { | |||||||
|     process.env[Events.Key] = Events.PullRequest; |     process.env[Events.Key] = Events.PullRequest; | ||||||
|  |  | ||||||
|     const infoMock = jest.spyOn(core, "info"); |     const infoMock = jest.spyOn(core, "info"); | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |  | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     const stateMock = jest.spyOn(core, "saveState"); |     const stateMock = jest.spyOn(core, "saveState"); | ||||||
|  |  | ||||||
| @@ -328,8 +306,7 @@ test("restore with a pull request event and cache found", async () => { | |||||||
|         .spyOn(actionUtils, "getArchiveFileSize") |         .spyOn(actionUtils, "getArchiveFileSize") | ||||||
|         .mockReturnValue(fileSize); |         .mockReturnValue(fileSize); | ||||||
|  |  | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
| @@ -338,31 +315,20 @@ test("restore with a pull request event and cache found", async () => { | |||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); |     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); |     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||||
|  |         cacheEntry.archiveLocation, | ||||||
|  |         archivePath | ||||||
|  |     ); | ||||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); |     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); |     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); |  | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-xz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/") |  | ||||||
|           ] |  | ||||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); |     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(0); |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| @@ -377,7 +343,6 @@ test("restore with cache found for restore key", async () => { | |||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const infoMock = jest.spyOn(core, "info"); |     const infoMock = jest.spyOn(core, "info"); | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |  | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     const stateMock = jest.spyOn(core, "saveState"); |     const stateMock = jest.spyOn(core, "saveState"); | ||||||
|  |  | ||||||
| @@ -409,8 +374,7 @@ test("restore with cache found for restore key", async () => { | |||||||
|         .spyOn(actionUtils, "getArchiveFileSize") |         .spyOn(actionUtils, "getArchiveFileSize") | ||||||
|         .mockReturnValue(fileSize); |         .mockReturnValue(fileSize); | ||||||
|  |  | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
| @@ -419,25 +383,15 @@ test("restore with cache found for restore key", async () => { | |||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); |     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); |     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||||
|  |         cacheEntry.archiveLocation, | ||||||
|  |         archivePath | ||||||
|  |     ); | ||||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); |     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); |     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(cachePath); |  | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-xz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/") |  | ||||||
|           ] |  | ||||||
|         : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||||
| @@ -445,6 +399,5 @@ test("restore with cache found for restore key", async () => { | |||||||
|     expect(infoMock).toHaveBeenCalledWith( |     expect(infoMock).toHaveBeenCalledWith( | ||||||
|         `Cache restored from key: ${restoreKey}` |         `Cache restored from key: ${restoreKey}` | ||||||
|     ); |     ); | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(0); |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|   | |||||||
| @@ -1,19 +1,17 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as exec from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||||
| import { Inputs } from "../src/constants"; | import { Events, Inputs } from "../src/constants"; | ||||||
| import { ArtifactCacheEntry } from "../src/contracts"; | import { ArtifactCacheEntry } from "../src/contracts"; | ||||||
| import run from "../src/save"; | import run from "../src/save"; | ||||||
|  | import * as tar from "../src/tar"; | ||||||
| import * as actionUtils from "../src/utils/actionUtils"; | import * as actionUtils from "../src/utils/actionUtils"; | ||||||
| import * as testUtils from "../src/utils/testUtils"; | import * as testUtils from "../src/utils/testUtils"; | ||||||
|  |  | ||||||
| jest.mock("@actions/core"); | jest.mock("@actions/core"); | ||||||
| jest.mock("@actions/exec"); |  | ||||||
| jest.mock("@actions/io"); |  | ||||||
| jest.mock("../src/utils/actionUtils"); |  | ||||||
| jest.mock("../src/cacheHttpClient"); | jest.mock("../src/cacheHttpClient"); | ||||||
|  | jest.mock("../src/tar"); | ||||||
|  | jest.mock("../src/utils/actionUtils"); | ||||||
|  |  | ||||||
| beforeAll(() => { | beforeAll(() => { | ||||||
|     jest.spyOn(core, "getInput").mockImplementation((name, options) => { |     jest.spyOn(core, "getInput").mockImplementation((name, options) => { | ||||||
| @@ -32,6 +30,16 @@ beforeAll(() => { | |||||||
|         } |         } | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|  |     jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => { | ||||||
|  |         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||||
|  |         return actualUtils.isValidEvent(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => { | ||||||
|  |         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||||
|  |         return actualUtils.getSupportedEvents(); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { |     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||||
|         return path.resolve(filePath); |         return path.resolve(filePath); | ||||||
|     }); |     }); | ||||||
| @@ -39,18 +47,31 @@ beforeAll(() => { | |||||||
|     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { |     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { | ||||||
|         return Promise.resolve("/foo/bar"); |         return Promise.resolve("/foo/bar"); | ||||||
|     }); |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | beforeEach(() => { | ||||||
|         return Promise.resolve(tool); |     process.env[Events.Key] = Events.Push; | ||||||
|     }); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| afterEach(() => { | afterEach(() => { | ||||||
|     testUtils.clearInputs(); |     testUtils.clearInputs(); | ||||||
|  |     delete process.env[Events.Key]; | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test("save with invalid event outputs warning", async () => { | ||||||
|  |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|  |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |     const invalidEvent = "commit_comment"; | ||||||
|  |     process.env[Events.Key] = invalidEvent; | ||||||
|  |     await run(); | ||||||
|  |     expect(logWarningMock).toHaveBeenCalledWith( | ||||||
|  |         `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` | ||||||
|  |     ); | ||||||
|  |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with no primary key in state outputs warning", async () => { | test("save with no primary key in state outputs warning", async () => { | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |  | ||||||
|     const cacheEntry: ArtifactCacheEntry = { |     const cacheEntry: ArtifactCacheEntry = { | ||||||
| @@ -72,16 +93,15 @@ test("save with no primary key in state outputs warning", async () => { | |||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(warningMock).toHaveBeenCalledWith( |     expect(logWarningMock).toHaveBeenCalledWith( | ||||||
|         `Error retrieving key from state.` |         `Error retrieving key from state.` | ||||||
|     ); |     ); | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with exact match returns early", async () => { | test("save with exact match returns early", async () => { | ||||||
|     const infoMock = jest.spyOn(core, "info"); |     const infoMock = jest.spyOn(core, "info"); | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |  | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |  | ||||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; |     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||||
| @@ -102,7 +122,7 @@ test("save with exact match returns early", async () => { | |||||||
|             return primaryKey; |             return primaryKey; | ||||||
|         }); |         }); | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
| @@ -110,14 +130,13 @@ test("save with exact match returns early", async () => { | |||||||
|         `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` |         `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(0); |     expect(createTarMock).toHaveBeenCalledTimes(0); | ||||||
|  |  | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(0); |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with missing input outputs warning", async () => { | test("save with missing input outputs warning", async () => { | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |  | ||||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; |     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||||
| @@ -140,15 +159,15 @@ test("save with missing input outputs warning", async () => { | |||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(warningMock).toHaveBeenCalledWith( |     expect(logWarningMock).toHaveBeenCalledWith( | ||||||
|         "Input required and not supplied: path" |         "Input required and not supplied: path" | ||||||
|     ); |     ); | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with large cache outputs warning", async () => { | test("save with large cache outputs warning", async () => { | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |  | ||||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; |     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||||
| @@ -173,9 +192,9 @@ test("save with large cache outputs warning", async () => { | |||||||
|     const cachePath = path.resolve(inputPath); |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit |     const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit | ||||||
|     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { |     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { | ||||||
|         return cacheSize; |         return cacheSize; | ||||||
|     }); |     }); | ||||||
| @@ -184,32 +203,70 @@ test("save with large cache outputs warning", async () => { | |||||||
|  |  | ||||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-cz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/"), |  | ||||||
|               "." |  | ||||||
|           ] |  | ||||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |     expect(logWarningMock).toHaveBeenCalledWith( | ||||||
|  |         "Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache." | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(warningMock).toHaveBeenCalledWith( |  | ||||||
|         "Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache." |  | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
|  | test("save with reserve cache failure outputs warning", async () => { | ||||||
|  |     const infoMock = jest.spyOn(core, "info"); | ||||||
|  |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|  |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |  | ||||||
|  |     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||||
|  |     const cacheEntry: ArtifactCacheEntry = { | ||||||
|  |         cacheKey: "Linux-node-", | ||||||
|  |         scope: "refs/heads/master", | ||||||
|  |         creationTime: "2019-11-13T19:18:02+00:00", | ||||||
|  |         archiveLocation: "www.actionscache.test/download" | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     jest.spyOn(core, "getState") | ||||||
|  |         // Cache Entry State | ||||||
|  |         .mockImplementationOnce(() => { | ||||||
|  |             return JSON.stringify(cacheEntry); | ||||||
|  |         }) | ||||||
|  |         // Cache Key State | ||||||
|  |         .mockImplementationOnce(() => { | ||||||
|  |             return primaryKey; | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |     const inputPath = "node_modules"; | ||||||
|  |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|  |     const reserveCacheMock = jest | ||||||
|  |         .spyOn(cacheHttpClient, "reserveCache") | ||||||
|  |         .mockImplementationOnce(() => { | ||||||
|  |             return Promise.resolve(-1); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|  |     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||||
|  |  | ||||||
|  |     await run(); | ||||||
|  |  | ||||||
|  |     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|  |     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||||
|  |  | ||||||
|  |     expect(infoMock).toHaveBeenCalledWith( | ||||||
|  |         `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||||
|  |     ); | ||||||
|  |  | ||||||
|  |     expect(createTarMock).toHaveBeenCalledTimes(0); | ||||||
|  |     expect(saveCacheMock).toHaveBeenCalledTimes(0); | ||||||
|  |     expect(logWarningMock).toHaveBeenCalledTimes(0); | ||||||
|  |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|  | }); | ||||||
|  |  | ||||||
| test("save with server error outputs warning", async () => { | test("save with server error outputs warning", async () => { | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |     const logWarningMock = jest.spyOn(actionUtils, "logWarning"); | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |  | ||||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; |     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||||
| @@ -234,7 +291,14 @@ test("save with server error outputs warning", async () => { | |||||||
|     const cachePath = path.resolve(inputPath); |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const cacheId = 4; | ||||||
|  |     const reserveCacheMock = jest | ||||||
|  |         .spyOn(cacheHttpClient, "reserveCache") | ||||||
|  |         .mockImplementationOnce(() => { | ||||||
|  |             return Promise.resolve(cacheId); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     const saveCacheMock = jest |     const saveCacheMock = jest | ||||||
|         .spyOn(cacheHttpClient, "saveCache") |         .spyOn(cacheHttpClient, "saveCache") | ||||||
| @@ -244,35 +308,24 @@ test("save with server error outputs warning", async () => { | |||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|  |     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|  |     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||||
|  |  | ||||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-cz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/"), |  | ||||||
|               "." |  | ||||||
|           ] |  | ||||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); |     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); |     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||||
|  |  | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(warningMock).toHaveBeenCalledWith("HTTP Error Occurred"); |     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with valid inputs uploads a cache", async () => { | test("save with valid inputs uploads a cache", async () => { | ||||||
|     const warningMock = jest.spyOn(core, "warning"); |  | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|  |  | ||||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; |     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||||
| @@ -297,33 +350,29 @@ test("save with valid inputs uploads a cache", async () => { | |||||||
|     const cachePath = path.resolve(inputPath); |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const cacheId = 4; | ||||||
|  |     const reserveCacheMock = jest | ||||||
|  |         .spyOn(cacheHttpClient, "reserveCache") | ||||||
|  |         .mockImplementationOnce(() => { | ||||||
|  |             return Promise.resolve(cacheId); | ||||||
|  |         }); | ||||||
|  |  | ||||||
|  |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); |     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|  |     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|  |     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||||
|  |  | ||||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|  |  | ||||||
|     const IS_WINDOWS = process.platform === "win32"; |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     const args = IS_WINDOWS |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         ? [ |  | ||||||
|               "-cz", |  | ||||||
|               "--force-local", |  | ||||||
|               "-f", |  | ||||||
|               archivePath.replace(/\\/g, "/"), |  | ||||||
|               "-C", |  | ||||||
|               cachePath.replace(/\\/g, "/"), |  | ||||||
|               "." |  | ||||||
|           ] |  | ||||||
|         : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith(`"tar"`, args); |  | ||||||
|  |  | ||||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); |     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); |     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||||
|  |  | ||||||
|     expect(warningMock).toHaveBeenCalledTimes(0); |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
| }); | }); | ||||||
|   | |||||||
							
								
								
									
										58
									
								
								__tests__/tar.test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										58
									
								
								__tests__/tar.test.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,58 @@ | |||||||
|  | import * as exec from "@actions/exec"; | ||||||
|  | import * as io from "@actions/io"; | ||||||
|  | import * as tar from "../src/tar"; | ||||||
|  |  | ||||||
|  | jest.mock("@actions/exec"); | ||||||
|  | jest.mock("@actions/io"); | ||||||
|  |  | ||||||
|  | beforeAll(() => { | ||||||
|  |     jest.spyOn(io, "which").mockImplementation(tool => { | ||||||
|  |         return Promise.resolve(tool); | ||||||
|  |     }); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test("extract tar", async () => { | ||||||
|  |     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||||
|  |     const execMock = jest.spyOn(exec, "exec"); | ||||||
|  |  | ||||||
|  |     const archivePath = "cache.tar"; | ||||||
|  |     const targetDirectory = "~/.npm/cache"; | ||||||
|  |     await tar.extractTar(archivePath, targetDirectory); | ||||||
|  |  | ||||||
|  |     expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); | ||||||
|  |  | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |     const tarPath = IS_WINDOWS | ||||||
|  |         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||||
|  |         : "tar"; | ||||||
|  |     expect(execMock).toHaveBeenCalledTimes(1); | ||||||
|  |     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||||
|  |         "-xz", | ||||||
|  |         "-f", | ||||||
|  |         archivePath, | ||||||
|  |         "-C", | ||||||
|  |         targetDirectory | ||||||
|  |     ]); | ||||||
|  | }); | ||||||
|  |  | ||||||
|  | test("create tar", async () => { | ||||||
|  |     const execMock = jest.spyOn(exec, "exec"); | ||||||
|  |  | ||||||
|  |     const archivePath = "cache.tar"; | ||||||
|  |     const sourceDirectory = "~/.npm/cache"; | ||||||
|  |     await tar.createTar(archivePath, sourceDirectory); | ||||||
|  |  | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |     const tarPath = IS_WINDOWS | ||||||
|  |         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||||
|  |         : "tar"; | ||||||
|  |     expect(execMock).toHaveBeenCalledTimes(1); | ||||||
|  |     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||||
|  |         "-cz", | ||||||
|  |         "-f", | ||||||
|  |         archivePath, | ||||||
|  |         "-C", | ||||||
|  |         sourceDirectory, | ||||||
|  |         "." | ||||||
|  |     ]); | ||||||
|  | }); | ||||||
							
								
								
									
										285
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										285
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1496,48 +1496,65 @@ const fs = __importStar(__webpack_require__(747)); | |||||||
| const Handlers_1 = __webpack_require__(941); | const Handlers_1 = __webpack_require__(941); | ||||||
| const HttpClient_1 = __webpack_require__(874); | const HttpClient_1 = __webpack_require__(874); | ||||||
| const RestClient_1 = __webpack_require__(105); | const RestClient_1 = __webpack_require__(105); | ||||||
| function getCacheUrl() { | const utils = __importStar(__webpack_require__(443)); | ||||||
|  | function isSuccessStatusCode(statusCode) { | ||||||
|  |     return statusCode >= 200 && statusCode < 300; | ||||||
|  | } | ||||||
|  | function isRetryableStatusCode(statusCode) { | ||||||
|  |     const retryableStatusCodes = [ | ||||||
|  |         HttpClient_1.HttpCodes.BadGateway, | ||||||
|  |         HttpClient_1.HttpCodes.ServiceUnavailable, | ||||||
|  |         HttpClient_1.HttpCodes.GatewayTimeout | ||||||
|  |     ]; | ||||||
|  |     return retryableStatusCodes.includes(statusCode); | ||||||
|  | } | ||||||
|  | function getCacheApiUrl() { | ||||||
|     // Ideally we just use ACTIONS_CACHE_URL |     // Ideally we just use ACTIONS_CACHE_URL | ||||||
|     const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || |     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||||
|         process.env["ACTIONS_RUNTIME_URL"] || |         process.env["ACTIONS_RUNTIME_URL"] || | ||||||
|         "").replace("pipelines", "artifactcache"); |         "").replace("pipelines", "artifactcache"); | ||||||
|     if (!cacheUrl) { |     if (!baseUrl) { | ||||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); |         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||||
|     } |     } | ||||||
|     core.debug(`Cache Url: ${cacheUrl}`); |     core.debug(`Cache Url: ${baseUrl}`); | ||||||
|     return cacheUrl; |     return `${baseUrl}_apis/artifactcache/`; | ||||||
| } | } | ||||||
| function createAcceptHeader(type, apiVersion) { | function createAcceptHeader(type, apiVersion) { | ||||||
|     return `${type};api-version=${apiVersion}`; |     return `${type};api-version=${apiVersion}`; | ||||||
| } | } | ||||||
| function getRequestOptions() { | function getRequestOptions() { | ||||||
|     const requestOptions = { |     const requestOptions = { | ||||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") |         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||||
|     }; |     }; | ||||||
|     return requestOptions; |     return requestOptions; | ||||||
| } | } | ||||||
|  | function createRestClient() { | ||||||
|  |     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|  |     const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||||
|  |     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ | ||||||
|  |         bearerCredentialHandler | ||||||
|  |     ]); | ||||||
|  | } | ||||||
| function getCacheEntry(keys) { | function getCacheEntry(keys) { | ||||||
|  |     var _a; | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const cacheUrl = getCacheUrl(); |         const restClient = createRestClient(); | ||||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); |  | ||||||
|         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; |  | ||||||
|         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ |  | ||||||
|             bearerCredentialHandler |  | ||||||
|         ]); |  | ||||||
|         const response = yield restClient.get(resource, getRequestOptions()); |         const response = yield restClient.get(resource, getRequestOptions()); | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (response.statusCode !== 200) { |         if (!isSuccessStatusCode(response.statusCode)) { | ||||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); |             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
|         } |         } | ||||||
|         const cacheResult = response.result; |         const cacheResult = response.result; | ||||||
|         core.debug(`Cache Result:`); |         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; | ||||||
|         core.debug(JSON.stringify(cacheResult)); |         if (!cacheDownloadUrl) { | ||||||
|         if (!cacheResult || !cacheResult.archiveLocation) { |  | ||||||
|             throw new Error("Cache not found."); |             throw new Error("Cache not found."); | ||||||
|         } |         } | ||||||
|  |         core.setSecret(cacheDownloadUrl); | ||||||
|  |         core.debug(`Cache Result:`); | ||||||
|  |         core.debug(JSON.stringify(cacheResult)); | ||||||
|         return cacheResult; |         return cacheResult; | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| @@ -1551,34 +1568,124 @@ function pipeResponseToStream(response, stream) { | |||||||
|         }); |         }); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function downloadCache(cacheEntry, archivePath) { | function downloadCache(archiveLocation, archivePath) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const stream = fs.createWriteStream(archivePath); |         const stream = fs.createWriteStream(archivePath); | ||||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); |         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||||
|         // eslint-disable-next-line @typescript-eslint/no-non-null-assertion |         const downloadResponse = yield httpClient.get(archiveLocation); | ||||||
|         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); |  | ||||||
|         yield pipeResponseToStream(downloadResponse, stream); |         yield pipeResponseToStream(downloadResponse, stream); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.downloadCache = downloadCache; | exports.downloadCache = downloadCache; | ||||||
| function saveCache(key, archivePath) { | // Reserve Cache | ||||||
|  | function reserveCache(key) { | ||||||
|  |     var _a, _b, _c; | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const stream = fs.createReadStream(archivePath); |         const restClient = createRestClient(); | ||||||
|         const cacheUrl = getCacheUrl(); |         const reserveCacheRequest = { | ||||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |             key | ||||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); |         }; | ||||||
|         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; |         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); | ||||||
|         const postUrl = cacheUrl + resource; |         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); | ||||||
|         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ |     }); | ||||||
|             bearerCredentialHandler | } | ||||||
|         ]); | exports.reserveCache = reserveCache; | ||||||
|  | function getContentRange(start, end) { | ||||||
|  |     // Format: `bytes start-end/filesize | ||||||
|  |     // start and end are inclusive | ||||||
|  |     // filesize can be * | ||||||
|  |     // For a 200 byte chunk starting at byte 0: | ||||||
|  |     // Content-Range: bytes 0-199/* | ||||||
|  |     return `bytes ${start}-${end}/*`; | ||||||
|  | } | ||||||
|  | function uploadChunk(restClient, resourceUrl, data, start, end) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         core.debug(`Uploading chunk of size ${end - | ||||||
|  |             start + | ||||||
|  |             1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||||
|         const requestOptions = getRequestOptions(); |         const requestOptions = getRequestOptions(); | ||||||
|         requestOptions.additionalHeaders = { |         requestOptions.additionalHeaders = { | ||||||
|             "Content-Type": "application/octet-stream" |             "Content-Type": "application/octet-stream", | ||||||
|  |             "Content-Range": getContentRange(start, end) | ||||||
|         }; |         }; | ||||||
|         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); |         const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { | ||||||
|         if (response.statusCode !== 200) { |             return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); | ||||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); |         }); | ||||||
|  |         const response = yield uploadChunkRequest(); | ||||||
|  |         if (isSuccessStatusCode(response.statusCode)) { | ||||||
|  |             return; | ||||||
|  |         } | ||||||
|  |         if (isRetryableStatusCode(response.statusCode)) { | ||||||
|  |             core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`); | ||||||
|  |             const retryResponse = yield uploadChunkRequest(); | ||||||
|  |             if (isSuccessStatusCode(retryResponse.statusCode)) { | ||||||
|  |                 return; | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function parseEnvNumber(key) { | ||||||
|  |     const value = Number(process.env[key]); | ||||||
|  |     if (Number.isNaN(value) || value < 0) { | ||||||
|  |         return undefined; | ||||||
|  |     } | ||||||
|  |     return value; | ||||||
|  | } | ||||||
|  | function uploadFile(restClient, cacheId, archivePath) { | ||||||
|  |     var _a, _b; | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Upload Chunks | ||||||
|  |         const fileSize = fs.statSync(archivePath).size; | ||||||
|  |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|  |         const fd = fs.openSync(archivePath, "r"); | ||||||
|  |         const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY"), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel | ||||||
|  |         const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE"), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks | ||||||
|  |         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||||
|  |         const parallelUploads = [...new Array(concurrency).keys()]; | ||||||
|  |         core.debug("Awaiting all uploads"); | ||||||
|  |         let offset = 0; | ||||||
|  |         try { | ||||||
|  |             yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |                 while (offset < fileSize) { | ||||||
|  |                     const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); | ||||||
|  |                     const start = offset; | ||||||
|  |                     const end = offset + chunkSize - 1; | ||||||
|  |                     offset += MAX_CHUNK_SIZE; | ||||||
|  |                     const chunk = fs.createReadStream(archivePath, { | ||||||
|  |                         fd, | ||||||
|  |                         start, | ||||||
|  |                         end, | ||||||
|  |                         autoClose: false | ||||||
|  |                     }); | ||||||
|  |                     yield uploadChunk(restClient, resourceUrl, chunk, start, end); | ||||||
|  |                 } | ||||||
|  |             }))); | ||||||
|  |         } | ||||||
|  |         finally { | ||||||
|  |             fs.closeSync(fd); | ||||||
|  |         } | ||||||
|  |         return; | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function commitCache(restClient, cacheId, filesize) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const requestOptions = getRequestOptions(); | ||||||
|  |         const commitCacheRequest = { size: filesize }; | ||||||
|  |         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function saveCache(cacheId, archivePath) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const restClient = createRestClient(); | ||||||
|  |         core.debug("Upload cache"); | ||||||
|  |         yield uploadFile(restClient, cacheId, archivePath); | ||||||
|  |         // Commit Cache | ||||||
|  |         core.debug("Commiting cache"); | ||||||
|  |         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||||
|  |         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); | ||||||
|  |         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||||
|  |             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); | ||||||
|         } |         } | ||||||
|         core.info("Cache saved successfully"); |         core.info("Cache saved successfully"); | ||||||
|     }); |     }); | ||||||
| @@ -2208,6 +2315,11 @@ function getCacheState() { | |||||||
|     return undefined; |     return undefined; | ||||||
| } | } | ||||||
| exports.getCacheState = getCacheState; | exports.getCacheState = getCacheState; | ||||||
|  | function logWarning(message) { | ||||||
|  |     const warningPrefix = "[warning]"; | ||||||
|  |     core.info(`${warningPrefix}${message}`); | ||||||
|  | } | ||||||
|  | exports.logWarning = logWarning; | ||||||
| function resolvePath(filePath) { | function resolvePath(filePath) { | ||||||
|     if (filePath[0] === "~") { |     if (filePath[0] === "~") { | ||||||
|         const home = os.homedir(); |         const home = os.homedir(); | ||||||
| @@ -2985,20 +3097,21 @@ var __importStar = (this && this.__importStar) || function (mod) { | |||||||
| }; | }; | ||||||
| Object.defineProperty(exports, "__esModule", { value: true }); | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
| const core = __importStar(__webpack_require__(470)); | const core = __importStar(__webpack_require__(470)); | ||||||
| const exec_1 = __webpack_require__(986); |  | ||||||
| const io = __importStar(__webpack_require__(1)); |  | ||||||
| const path = __importStar(__webpack_require__(622)); | const path = __importStar(__webpack_require__(622)); | ||||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||||
| const constants_1 = __webpack_require__(694); | const constants_1 = __webpack_require__(694); | ||||||
|  | const tar_1 = __webpack_require__(943); | ||||||
| const utils = __importStar(__webpack_require__(443)); | const utils = __importStar(__webpack_require__(443)); | ||||||
| function run() { | function run() { | ||||||
|  |     var _a; | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         try { |         try { | ||||||
|             // Validate inputs, this can cause task failure |             // Validate inputs, this can cause task failure | ||||||
|             if (!utils.isValidEvent()) { |             if (!utils.isValidEvent()) { | ||||||
|                 core.setFailed(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils |                 utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils | ||||||
|                     .getSupportedEvents() |                     .getSupportedEvents() | ||||||
|                     .join(", ")} events are supported at this time.`); |                     .join(", ")} events are supported at this time.`); | ||||||
|  |                 return; | ||||||
|             } |             } | ||||||
|             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); |             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||||
|             core.debug(`Cache Path: ${cachePath}`); |             core.debug(`Cache Path: ${cachePath}`); | ||||||
| @@ -3028,7 +3141,7 @@ function run() { | |||||||
|             } |             } | ||||||
|             try { |             try { | ||||||
|                 const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); |                 const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); | ||||||
|                 if (!cacheEntry) { |                 if (!((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) { | ||||||
|                     core.info(`Cache not found for input keys: ${keys.join(", ")}.`); |                     core.info(`Cache not found for input keys: ${keys.join(", ")}.`); | ||||||
|                     return; |                     return; | ||||||
|                 } |                 } | ||||||
| @@ -3037,33 +3150,16 @@ function run() { | |||||||
|                 // Store the cache result |                 // Store the cache result | ||||||
|                 utils.setCacheState(cacheEntry); |                 utils.setCacheState(cacheEntry); | ||||||
|                 // Download the cache from the cache entry |                 // Download the cache from the cache entry | ||||||
|                 yield cacheHttpClient.downloadCache(cacheEntry, archivePath); |                 yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath); | ||||||
|                 const archiveFileSize = utils.getArchiveFileSize(archivePath); |                 const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|                 core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); |                 core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); | ||||||
|                 // Create directory to extract tar into |                 yield tar_1.extractTar(archivePath, cachePath); | ||||||
|                 yield io.mkdirP(cachePath); |  | ||||||
|                 // http://man7.org/linux/man-pages/man1/tar.1.html |  | ||||||
|                 // tar [-options] <name of the tar archive> [files or directories which to add into archive] |  | ||||||
|                 const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|                 const args = IS_WINDOWS |  | ||||||
|                     ? [ |  | ||||||
|                         "-xz", |  | ||||||
|                         "--force-local", |  | ||||||
|                         "-f", |  | ||||||
|                         archivePath.replace(/\\/g, "/"), |  | ||||||
|                         "-C", |  | ||||||
|                         cachePath.replace(/\\/g, "/") |  | ||||||
|                     ] |  | ||||||
|                     : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|                 const tarPath = yield io.which("tar", true); |  | ||||||
|                 core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|                 yield exec_1.exec(`"${tarPath}"`, args); |  | ||||||
|                 const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); |                 const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); | ||||||
|                 utils.setCacheHitOutput(isExactKeyMatch); |                 utils.setCacheHitOutput(isExactKeyMatch); | ||||||
|                 core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); |                 core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); | ||||||
|             } |             } | ||||||
|             catch (error) { |             catch (error) { | ||||||
|                 core.warning(error.message); |                 utils.logWarning(error.message); | ||||||
|                 utils.setCacheHitOutput(false); |                 utils.setCacheHitOutput(false); | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
| @@ -5153,6 +5249,79 @@ var personalaccesstoken_1 = __webpack_require__(327); | |||||||
| exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | /***/ }), | ||||||
|  |  | ||||||
|  | /***/ 943: | ||||||
|  | /***/ (function(__unusedmodule, exports, __webpack_require__) { | ||||||
|  |  | ||||||
|  | "use strict"; | ||||||
|  |  | ||||||
|  | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||||
|  |     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||||
|  |     return new (P || (P = Promise))(function (resolve, reject) { | ||||||
|  |         function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||||||
|  |         function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||||||
|  |         function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||||||
|  |         step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||||||
|  |     }); | ||||||
|  | }; | ||||||
|  | var __importStar = (this && this.__importStar) || function (mod) { | ||||||
|  |     if (mod && mod.__esModule) return mod; | ||||||
|  |     var result = {}; | ||||||
|  |     if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; | ||||||
|  |     result["default"] = mod; | ||||||
|  |     return result; | ||||||
|  | }; | ||||||
|  | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
|  | const exec_1 = __webpack_require__(986); | ||||||
|  | const io = __importStar(__webpack_require__(1)); | ||||||
|  | const fs_1 = __webpack_require__(747); | ||||||
|  | function getTarPath() { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Explicitly use BSD Tar on Windows | ||||||
|  |         const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |         if (IS_WINDOWS) { | ||||||
|  |             const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||||
|  |             if (fs_1.existsSync(systemTar)) { | ||||||
|  |                 return systemTar; | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         return yield io.which("tar", true); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function execTar(args) { | ||||||
|  |     var _a, _b; | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         try { | ||||||
|  |             yield exec_1.exec(`"${yield getTarPath()}"`, args); | ||||||
|  |         } | ||||||
|  |         catch (error) { | ||||||
|  |             const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |             if (IS_WINDOWS) { | ||||||
|  |                 throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); | ||||||
|  |             } | ||||||
|  |             throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function extractTar(archivePath, targetDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Create directory to extract tar into | ||||||
|  |         yield io.mkdirP(targetDirectory); | ||||||
|  |         const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||||
|  |         yield execTar(args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.extractTar = extractTar; | ||||||
|  | function createTar(archivePath, sourceDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||||
|  |         yield execTar(args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.createTar = createTar; | ||||||
|  |  | ||||||
|  |  | ||||||
| /***/ }), | /***/ }), | ||||||
|  |  | ||||||
| /***/ 986: | /***/ 986: | ||||||
|   | |||||||
							
								
								
									
										298
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										298
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1496,48 +1496,65 @@ const fs = __importStar(__webpack_require__(747)); | |||||||
| const Handlers_1 = __webpack_require__(941); | const Handlers_1 = __webpack_require__(941); | ||||||
| const HttpClient_1 = __webpack_require__(874); | const HttpClient_1 = __webpack_require__(874); | ||||||
| const RestClient_1 = __webpack_require__(105); | const RestClient_1 = __webpack_require__(105); | ||||||
| function getCacheUrl() { | const utils = __importStar(__webpack_require__(443)); | ||||||
|  | function isSuccessStatusCode(statusCode) { | ||||||
|  |     return statusCode >= 200 && statusCode < 300; | ||||||
|  | } | ||||||
|  | function isRetryableStatusCode(statusCode) { | ||||||
|  |     const retryableStatusCodes = [ | ||||||
|  |         HttpClient_1.HttpCodes.BadGateway, | ||||||
|  |         HttpClient_1.HttpCodes.ServiceUnavailable, | ||||||
|  |         HttpClient_1.HttpCodes.GatewayTimeout | ||||||
|  |     ]; | ||||||
|  |     return retryableStatusCodes.includes(statusCode); | ||||||
|  | } | ||||||
|  | function getCacheApiUrl() { | ||||||
|     // Ideally we just use ACTIONS_CACHE_URL |     // Ideally we just use ACTIONS_CACHE_URL | ||||||
|     const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || |     const baseUrl = (process.env["ACTIONS_CACHE_URL"] || | ||||||
|         process.env["ACTIONS_RUNTIME_URL"] || |         process.env["ACTIONS_RUNTIME_URL"] || | ||||||
|         "").replace("pipelines", "artifactcache"); |         "").replace("pipelines", "artifactcache"); | ||||||
|     if (!cacheUrl) { |     if (!baseUrl) { | ||||||
|         throw new Error("Cache Service Url not found, unable to restore cache."); |         throw new Error("Cache Service Url not found, unable to restore cache."); | ||||||
|     } |     } | ||||||
|     core.debug(`Cache Url: ${cacheUrl}`); |     core.debug(`Cache Url: ${baseUrl}`); | ||||||
|     return cacheUrl; |     return `${baseUrl}_apis/artifactcache/`; | ||||||
| } | } | ||||||
| function createAcceptHeader(type, apiVersion) { | function createAcceptHeader(type, apiVersion) { | ||||||
|     return `${type};api-version=${apiVersion}`; |     return `${type};api-version=${apiVersion}`; | ||||||
| } | } | ||||||
| function getRequestOptions() { | function getRequestOptions() { | ||||||
|     const requestOptions = { |     const requestOptions = { | ||||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") |         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||||
|     }; |     }; | ||||||
|     return requestOptions; |     return requestOptions; | ||||||
| } | } | ||||||
|  | function createRestClient() { | ||||||
|  |     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|  |     const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); | ||||||
|  |     return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ | ||||||
|  |         bearerCredentialHandler | ||||||
|  |     ]); | ||||||
|  | } | ||||||
| function getCacheEntry(keys) { | function getCacheEntry(keys) { | ||||||
|  |     var _a; | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const cacheUrl = getCacheUrl(); |         const restClient = createRestClient(); | ||||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |         const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); |  | ||||||
|         const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; |  | ||||||
|         const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ |  | ||||||
|             bearerCredentialHandler |  | ||||||
|         ]); |  | ||||||
|         const response = yield restClient.get(resource, getRequestOptions()); |         const response = yield restClient.get(resource, getRequestOptions()); | ||||||
|         if (response.statusCode === 204) { |         if (response.statusCode === 204) { | ||||||
|             return null; |             return null; | ||||||
|         } |         } | ||||||
|         if (response.statusCode !== 200) { |         if (!isSuccessStatusCode(response.statusCode)) { | ||||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); |             throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
|         } |         } | ||||||
|         const cacheResult = response.result; |         const cacheResult = response.result; | ||||||
|         core.debug(`Cache Result:`); |         const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; | ||||||
|         core.debug(JSON.stringify(cacheResult)); |         if (!cacheDownloadUrl) { | ||||||
|         if (!cacheResult || !cacheResult.archiveLocation) { |  | ||||||
|             throw new Error("Cache not found."); |             throw new Error("Cache not found."); | ||||||
|         } |         } | ||||||
|  |         core.setSecret(cacheDownloadUrl); | ||||||
|  |         core.debug(`Cache Result:`); | ||||||
|  |         core.debug(JSON.stringify(cacheResult)); | ||||||
|         return cacheResult; |         return cacheResult; | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| @@ -1551,34 +1568,124 @@ function pipeResponseToStream(response, stream) { | |||||||
|         }); |         }); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| function downloadCache(cacheEntry, archivePath) { | function downloadCache(archiveLocation, archivePath) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const stream = fs.createWriteStream(archivePath); |         const stream = fs.createWriteStream(archivePath); | ||||||
|         const httpClient = new HttpClient_1.HttpClient("actions/cache"); |         const httpClient = new HttpClient_1.HttpClient("actions/cache"); | ||||||
|         // eslint-disable-next-line @typescript-eslint/no-non-null-assertion |         const downloadResponse = yield httpClient.get(archiveLocation); | ||||||
|         const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); |  | ||||||
|         yield pipeResponseToStream(downloadResponse, stream); |         yield pipeResponseToStream(downloadResponse, stream); | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.downloadCache = downloadCache; | exports.downloadCache = downloadCache; | ||||||
| function saveCache(key, archivePath) { | // Reserve Cache | ||||||
|  | function reserveCache(key) { | ||||||
|  |     var _a, _b, _c; | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const stream = fs.createReadStream(archivePath); |         const restClient = createRestClient(); | ||||||
|         const cacheUrl = getCacheUrl(); |         const reserveCacheRequest = { | ||||||
|         const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |             key | ||||||
|         const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); |         }; | ||||||
|         const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; |         const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); | ||||||
|         const postUrl = cacheUrl + resource; |         return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); | ||||||
|         const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ |     }); | ||||||
|             bearerCredentialHandler | } | ||||||
|         ]); | exports.reserveCache = reserveCache; | ||||||
|  | function getContentRange(start, end) { | ||||||
|  |     // Format: `bytes start-end/filesize | ||||||
|  |     // start and end are inclusive | ||||||
|  |     // filesize can be * | ||||||
|  |     // For a 200 byte chunk starting at byte 0: | ||||||
|  |     // Content-Range: bytes 0-199/* | ||||||
|  |     return `bytes ${start}-${end}/*`; | ||||||
|  | } | ||||||
|  | function uploadChunk(restClient, resourceUrl, data, start, end) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         core.debug(`Uploading chunk of size ${end - | ||||||
|  |             start + | ||||||
|  |             1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||||
|         const requestOptions = getRequestOptions(); |         const requestOptions = getRequestOptions(); | ||||||
|         requestOptions.additionalHeaders = { |         requestOptions.additionalHeaders = { | ||||||
|             "Content-Type": "application/octet-stream" |             "Content-Type": "application/octet-stream", | ||||||
|  |             "Content-Range": getContentRange(start, end) | ||||||
|         }; |         }; | ||||||
|         const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); |         const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { | ||||||
|         if (response.statusCode !== 200) { |             return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); | ||||||
|             throw new Error(`Cache service responded with ${response.statusCode}`); |         }); | ||||||
|  |         const response = yield uploadChunkRequest(); | ||||||
|  |         if (isSuccessStatusCode(response.statusCode)) { | ||||||
|  |             return; | ||||||
|  |         } | ||||||
|  |         if (isRetryableStatusCode(response.statusCode)) { | ||||||
|  |             core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`); | ||||||
|  |             const retryResponse = yield uploadChunkRequest(); | ||||||
|  |             if (isSuccessStatusCode(retryResponse.statusCode)) { | ||||||
|  |                 return; | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function parseEnvNumber(key) { | ||||||
|  |     const value = Number(process.env[key]); | ||||||
|  |     if (Number.isNaN(value) || value < 0) { | ||||||
|  |         return undefined; | ||||||
|  |     } | ||||||
|  |     return value; | ||||||
|  | } | ||||||
|  | function uploadFile(restClient, cacheId, archivePath) { | ||||||
|  |     var _a, _b; | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Upload Chunks | ||||||
|  |         const fileSize = fs.statSync(archivePath).size; | ||||||
|  |         const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|  |         const fd = fs.openSync(archivePath, "r"); | ||||||
|  |         const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY"), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel | ||||||
|  |         const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE"), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks | ||||||
|  |         core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||||
|  |         const parallelUploads = [...new Array(concurrency).keys()]; | ||||||
|  |         core.debug("Awaiting all uploads"); | ||||||
|  |         let offset = 0; | ||||||
|  |         try { | ||||||
|  |             yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |                 while (offset < fileSize) { | ||||||
|  |                     const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); | ||||||
|  |                     const start = offset; | ||||||
|  |                     const end = offset + chunkSize - 1; | ||||||
|  |                     offset += MAX_CHUNK_SIZE; | ||||||
|  |                     const chunk = fs.createReadStream(archivePath, { | ||||||
|  |                         fd, | ||||||
|  |                         start, | ||||||
|  |                         end, | ||||||
|  |                         autoClose: false | ||||||
|  |                     }); | ||||||
|  |                     yield uploadChunk(restClient, resourceUrl, chunk, start, end); | ||||||
|  |                 } | ||||||
|  |             }))); | ||||||
|  |         } | ||||||
|  |         finally { | ||||||
|  |             fs.closeSync(fd); | ||||||
|  |         } | ||||||
|  |         return; | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function commitCache(restClient, cacheId, filesize) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const requestOptions = getRequestOptions(); | ||||||
|  |         const commitCacheRequest = { size: filesize }; | ||||||
|  |         return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function saveCache(cacheId, archivePath) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const restClient = createRestClient(); | ||||||
|  |         core.debug("Upload cache"); | ||||||
|  |         yield uploadFile(restClient, cacheId, archivePath); | ||||||
|  |         // Commit Cache | ||||||
|  |         core.debug("Commiting cache"); | ||||||
|  |         const cacheSize = utils.getArchiveFileSize(archivePath); | ||||||
|  |         const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); | ||||||
|  |         if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||||
|  |             throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); | ||||||
|         } |         } | ||||||
|         core.info("Cache saved successfully"); |         core.info("Cache saved successfully"); | ||||||
|     }); |     }); | ||||||
| @@ -2208,6 +2315,11 @@ function getCacheState() { | |||||||
|     return undefined; |     return undefined; | ||||||
| } | } | ||||||
| exports.getCacheState = getCacheState; | exports.getCacheState = getCacheState; | ||||||
|  | function logWarning(message) { | ||||||
|  |     const warningPrefix = "[warning]"; | ||||||
|  |     core.info(`${warningPrefix}${message}`); | ||||||
|  | } | ||||||
|  | exports.logWarning = logWarning; | ||||||
| function resolvePath(filePath) { | function resolvePath(filePath) { | ||||||
|     if (filePath[0] === "~") { |     if (filePath[0] === "~") { | ||||||
|         const home = os.homedir(); |         const home = os.homedir(); | ||||||
| @@ -2873,58 +2985,55 @@ var __importStar = (this && this.__importStar) || function (mod) { | |||||||
| }; | }; | ||||||
| Object.defineProperty(exports, "__esModule", { value: true }); | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
| const core = __importStar(__webpack_require__(470)); | const core = __importStar(__webpack_require__(470)); | ||||||
| const exec_1 = __webpack_require__(986); |  | ||||||
| const io = __importStar(__webpack_require__(1)); |  | ||||||
| const path = __importStar(__webpack_require__(622)); | const path = __importStar(__webpack_require__(622)); | ||||||
| const cacheHttpClient = __importStar(__webpack_require__(154)); | const cacheHttpClient = __importStar(__webpack_require__(154)); | ||||||
| const constants_1 = __webpack_require__(694); | const constants_1 = __webpack_require__(694); | ||||||
|  | const tar_1 = __webpack_require__(943); | ||||||
| const utils = __importStar(__webpack_require__(443)); | const utils = __importStar(__webpack_require__(443)); | ||||||
| function run() { | function run() { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         try { |         try { | ||||||
|  |             if (!utils.isValidEvent()) { | ||||||
|  |                 utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils | ||||||
|  |                     .getSupportedEvents() | ||||||
|  |                     .join(", ")} events are supported at this time.`); | ||||||
|  |                 return; | ||||||
|  |             } | ||||||
|             const state = utils.getCacheState(); |             const state = utils.getCacheState(); | ||||||
|             // Inputs are re-evaluted before the post action, so we want the original key used for restore |             // Inputs are re-evaluted before the post action, so we want the original key used for restore | ||||||
|             const primaryKey = core.getState(constants_1.State.CacheKey); |             const primaryKey = core.getState(constants_1.State.CacheKey); | ||||||
|             if (!primaryKey) { |             if (!primaryKey) { | ||||||
|                 core.warning(`Error retrieving key from state.`); |                 utils.logWarning(`Error retrieving key from state.`); | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|             if (utils.isExactKeyMatch(primaryKey, state)) { |             if (utils.isExactKeyMatch(primaryKey, state)) { | ||||||
|                 core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); |                 core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|  |             core.debug("Reserving Cache"); | ||||||
|  |             const cacheId = yield cacheHttpClient.reserveCache(primaryKey); | ||||||
|  |             if (cacheId == -1) { | ||||||
|  |                 core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`); | ||||||
|  |                 return; | ||||||
|  |             } | ||||||
|  |             core.debug(`Cache ID: ${cacheId}`); | ||||||
|             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); |             const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); | ||||||
|             core.debug(`Cache Path: ${cachePath}`); |             core.debug(`Cache Path: ${cachePath}`); | ||||||
|             const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); |             const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); | ||||||
|             core.debug(`Archive Path: ${archivePath}`); |             core.debug(`Archive Path: ${archivePath}`); | ||||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html |             yield tar_1.createTar(archivePath, cachePath); | ||||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] |             const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit | ||||||
|             const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|             const args = IS_WINDOWS |  | ||||||
|                 ? [ |  | ||||||
|                     "-cz", |  | ||||||
|                     "--force-local", |  | ||||||
|                     "-f", |  | ||||||
|                     archivePath.replace(/\\/g, "/"), |  | ||||||
|                     "-C", |  | ||||||
|                     cachePath.replace(/\\/g, "/"), |  | ||||||
|                     "." |  | ||||||
|                 ] |  | ||||||
|                 : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|             const tarPath = yield io.which("tar", true); |  | ||||||
|             core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|             yield exec_1.exec(`"${tarPath}"`, args); |  | ||||||
|             const fileSizeLimit = 400 * 1024 * 1024; // 400MB |  | ||||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); |             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|             core.debug(`File Size: ${archiveFileSize}`); |             core.debug(`File Size: ${archiveFileSize}`); | ||||||
|             if (archiveFileSize > fileSizeLimit) { |             if (archiveFileSize > fileSizeLimit) { | ||||||
|                 core.warning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`); |                 utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`); | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|             yield cacheHttpClient.saveCache(primaryKey, archivePath); |             core.debug(`Saving Cache (ID: ${cacheId})`); | ||||||
|  |             yield cacheHttpClient.saveCache(cacheId, archivePath); | ||||||
|         } |         } | ||||||
|         catch (error) { |         catch (error) { | ||||||
|             core.warning(error.message); |             utils.logWarning(error.message); | ||||||
|         } |         } | ||||||
|     }); |     }); | ||||||
| } | } | ||||||
| @@ -5121,6 +5230,79 @@ var personalaccesstoken_1 = __webpack_require__(327); | |||||||
| exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; | ||||||
|  |  | ||||||
|  |  | ||||||
|  | /***/ }), | ||||||
|  |  | ||||||
|  | /***/ 943: | ||||||
|  | /***/ (function(__unusedmodule, exports, __webpack_require__) { | ||||||
|  |  | ||||||
|  | "use strict"; | ||||||
|  |  | ||||||
|  | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||||
|  |     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||||
|  |     return new (P || (P = Promise))(function (resolve, reject) { | ||||||
|  |         function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||||||
|  |         function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||||||
|  |         function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||||||
|  |         step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||||||
|  |     }); | ||||||
|  | }; | ||||||
|  | var __importStar = (this && this.__importStar) || function (mod) { | ||||||
|  |     if (mod && mod.__esModule) return mod; | ||||||
|  |     var result = {}; | ||||||
|  |     if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; | ||||||
|  |     result["default"] = mod; | ||||||
|  |     return result; | ||||||
|  | }; | ||||||
|  | Object.defineProperty(exports, "__esModule", { value: true }); | ||||||
|  | const exec_1 = __webpack_require__(986); | ||||||
|  | const io = __importStar(__webpack_require__(1)); | ||||||
|  | const fs_1 = __webpack_require__(747); | ||||||
|  | function getTarPath() { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Explicitly use BSD Tar on Windows | ||||||
|  |         const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |         if (IS_WINDOWS) { | ||||||
|  |             const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||||
|  |             if (fs_1.existsSync(systemTar)) { | ||||||
|  |                 return systemTar; | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         return yield io.which("tar", true); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function execTar(args) { | ||||||
|  |     var _a, _b; | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         try { | ||||||
|  |             yield exec_1.exec(`"${yield getTarPath()}"`, args); | ||||||
|  |         } | ||||||
|  |         catch (error) { | ||||||
|  |             const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |             if (IS_WINDOWS) { | ||||||
|  |                 throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); | ||||||
|  |             } | ||||||
|  |             throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function extractTar(archivePath, targetDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         // Create directory to extract tar into | ||||||
|  |         yield io.mkdirP(targetDirectory); | ||||||
|  |         const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||||
|  |         yield execTar(args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.extractTar = extractTar; | ||||||
|  | function createTar(archivePath, sourceDirectory) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||||
|  |         yield execTar(args); | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.createTar = createTar; | ||||||
|  |  | ||||||
|  |  | ||||||
| /***/ }), | /***/ }), | ||||||
|  |  | ||||||
| /***/ 986: | /***/ 986: | ||||||
|   | |||||||
							
								
								
									
										58
									
								
								examples.md
									
									
									
									
									
								
							
							
						
						
									
										58
									
								
								examples.md
									
									
									
									
									
								
							| @@ -1,27 +1,20 @@ | |||||||
| # Examples | # Examples | ||||||
|  |  | ||||||
| - [Examples](#examples) | - [C# - NuGet](#c---nuget) | ||||||
|   - [C# - Nuget](#c---nuget) | - [Elixir - Mix](#elixir---mix) | ||||||
|   - [Elixir - Mix](#elixir---mix) | - [Go - Modules](#go---modules) | ||||||
|   - [Go - Modules](#go---modules) | - [Java - Gradle](#java---gradle) | ||||||
|   - [Java - Gradle](#java---gradle) | - [Java - Maven](#java---maven) | ||||||
|   - [Java - Maven](#java---maven) | - [Node - npm](#node---npm) | ||||||
|   - [Node - npm](#node---npm) | - [Node - Yarn](#node---yarn) | ||||||
|     - [macOS and Ubuntu](#macos-and-ubuntu) | - [PHP - Composer](#php---composer) | ||||||
|     - [Windows](#windows) | - [Python - pip](#python---pip) | ||||||
|     - [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config) | - [Ruby - Gem](#ruby---gem) | ||||||
|   - [Node - Yarn](#node---yarn) | - [Rust - Cargo](#rust---cargo) | ||||||
|   - [PHP - Composer](#php---composer) | - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) | ||||||
|   - [Python - pip](#python---pip) | - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) | ||||||
|     - [Simple example](#simple-example) |  | ||||||
|     - [Multiple OS's in a workflow](#multiple-oss-in-a-workflow) |  | ||||||
|     - [Using a script to get cache location](#using-a-script-to-get-cache-location) |  | ||||||
|   - [Ruby - Gem](#ruby---gem) |  | ||||||
|   - [Rust - Cargo](#rust---cargo) |  | ||||||
|   - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) |  | ||||||
|   - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) |  | ||||||
|  |  | ||||||
| ## C# - Nuget | ## C# - NuGet | ||||||
| Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): | Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): | ||||||
|  |  | ||||||
| ```yaml | ```yaml | ||||||
| @@ -33,6 +26,21 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa | |||||||
|       ${{ runner.os }}-nuget- |       ${{ runner.os }}-nuget- | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
|  | Depending on the environment, huge packages might be pre-installed in the global cache folder. | ||||||
|  | If you do not want to include them, consider to move the cache folder like below. | ||||||
|  | >Note: This workflow does not work for projects that require files to be placed in user profile package folder | ||||||
|  | ```yaml | ||||||
|  | env: | ||||||
|  |   NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages | ||||||
|  | steps: | ||||||
|  |   - uses: actions/cache@v1 | ||||||
|  |     with: | ||||||
|  |       path: ${{ github.workspace }}/.nuget/packages | ||||||
|  |       key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} | ||||||
|  |       restore-keys: | | ||||||
|  |         ${{ runner.os }}-nuget- | ||||||
|  | ``` | ||||||
|  |  | ||||||
| ## Elixir - Mix | ## Elixir - Mix | ||||||
| ```yaml | ```yaml | ||||||
| - uses: actions/cache@v1 | - uses: actions/cache@v1 | ||||||
| @@ -226,6 +234,14 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu. | |||||||
|     restore-keys: | |     restore-keys: | | ||||||
|       ${{ runner.os }}-gem- |       ${{ runner.os }}-gem- | ||||||
| ``` | ``` | ||||||
|  | When dependencies are installed later in the workflow, we must specify the same path for the bundler. | ||||||
|  |  | ||||||
|  | ```yaml | ||||||
|  | - name: Bundle install | ||||||
|  |   run: | | ||||||
|  |     bundle config path vendor/bundle | ||||||
|  |     bundle install --jobs 4 --retry 3 | ||||||
|  | ``` | ||||||
|  |  | ||||||
| ## Rust - Cargo | ## Rust - Cargo | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										26
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										26
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "name": "cache", |   "name": "cache", | ||||||
|   "version": "1.0.2", |   "version": "1.1.0", | ||||||
|   "lockfileVersion": 1, |   "lockfileVersion": 1, | ||||||
|   "requires": true, |   "requires": true, | ||||||
|   "dependencies": { |   "dependencies": { | ||||||
| @@ -2854,9 +2854,9 @@ | |||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "handlebars": { |     "handlebars": { | ||||||
|       "version": "4.5.1", |       "version": "4.5.3", | ||||||
|       "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.1.tgz", |       "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.3.tgz", | ||||||
|       "integrity": "sha512-C29UoFzHe9yM61lOsIlCE5/mQVGrnIOrOq7maQl76L7tYPCgC1og0Ajt6uWnX4ZTxBPnjw+CUvawphwCfJgUnA==", |       "integrity": "sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA==", | ||||||
|       "dev": true, |       "dev": true, | ||||||
|       "requires": { |       "requires": { | ||||||
|         "neo-async": "^2.6.0", |         "neo-async": "^2.6.0", | ||||||
| @@ -4859,9 +4859,9 @@ | |||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "prettier": { |     "prettier": { | ||||||
|       "version": "1.18.2", |       "version": "1.19.1", | ||||||
|       "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.18.2.tgz", |       "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", | ||||||
|       "integrity": "sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw==", |       "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", | ||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "prettier-linter-helpers": { |     "prettier-linter-helpers": { | ||||||
| @@ -5983,15 +5983,15 @@ | |||||||
|       } |       } | ||||||
|     }, |     }, | ||||||
|     "typescript": { |     "typescript": { | ||||||
|       "version": "3.6.4", |       "version": "3.7.3", | ||||||
|       "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.6.4.tgz", |       "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz", | ||||||
|       "integrity": "sha512-unoCll1+l+YK4i4F8f22TaNVPRHcD9PA3yCuZ8g5e0qGqlVlJ/8FSateOLLSagn+Yg5+ZwuPkL8LFUc0Jcvksg==", |       "integrity": "sha512-Mcr/Qk7hXqFBXMN7p7Lusj1ktCBydylfQM/FZCk5glCNQJrCUKPkMHdo9R0MTFWsC/4kPFvDS0fDPvukfCkFsw==", | ||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "uglify-js": { |     "uglify-js": { | ||||||
|       "version": "3.6.7", |       "version": "3.7.3", | ||||||
|       "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.7.tgz", |       "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.7.3.tgz", | ||||||
|       "integrity": "sha512-4sXQDzmdnoXiO+xvmTzQsfIiwrjUCSA95rSP4SEd8tDb51W2TiDOlL76Hl+Kw0Ie42PSItCW8/t6pBNCF2R48A==", |       "integrity": "sha512-7tINm46/3puUA4hCkKYo4Xdts+JDaVC9ZPRcG8Xw9R4nhO/gZgUM3TENq8IF4Vatk8qCig4MzP/c8G4u2BkVQg==", | ||||||
|       "dev": true, |       "dev": true, | ||||||
|       "optional": true, |       "optional": true, | ||||||
|       "requires": { |       "requires": { | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| { | { | ||||||
|   "name": "cache", |   "name": "cache", | ||||||
|   "version": "1.0.2", |   "version": "1.1.0", | ||||||
|   "private": true, |   "private": true, | ||||||
|   "description": "Cache dependencies and build outputs", |   "description": "Cache dependencies and build outputs", | ||||||
|   "main": "dist/restore/index.js", |   "main": "dist/restore/index.js", | ||||||
| @@ -46,8 +46,8 @@ | |||||||
|     "jest": "^24.8.0", |     "jest": "^24.8.0", | ||||||
|     "jest-circus": "^24.7.1", |     "jest-circus": "^24.7.1", | ||||||
|     "nock": "^11.7.0", |     "nock": "^11.7.0", | ||||||
|     "prettier": "1.18.2", |     "prettier": "^1.19.1", | ||||||
|     "ts-jest": "^24.0.2", |     "ts-jest": "^24.0.2", | ||||||
|     "typescript": "^3.6.4" |     "typescript": "^3.7.3" | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,26 +1,49 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as fs from "fs"; | import * as fs from "fs"; | ||||||
| import { BearerCredentialHandler } from "typed-rest-client/Handlers"; | import { BearerCredentialHandler } from "typed-rest-client/Handlers"; | ||||||
| import { HttpClient } from "typed-rest-client/HttpClient"; | import { HttpClient, HttpCodes } from "typed-rest-client/HttpClient"; | ||||||
| import { IHttpClientResponse } from "typed-rest-client/Interfaces"; | import { IHttpClientResponse } from "typed-rest-client/Interfaces"; | ||||||
| import { IRequestOptions, RestClient } from "typed-rest-client/RestClient"; | import { | ||||||
| import { ArtifactCacheEntry } from "./contracts"; |     IRequestOptions, | ||||||
|  |     RestClient, | ||||||
|  |     IRestResponse | ||||||
|  | } from "typed-rest-client/RestClient"; | ||||||
|  | import { | ||||||
|  |     ArtifactCacheEntry, | ||||||
|  |     CommitCacheRequest, | ||||||
|  |     ReserveCacheRequest, | ||||||
|  |     ReserveCacheResponse | ||||||
|  | } from "./contracts"; | ||||||
|  | import * as utils from "./utils/actionUtils"; | ||||||
|  |  | ||||||
| function getCacheUrl(): string { | function isSuccessStatusCode(statusCode: number): boolean { | ||||||
|  |     return statusCode >= 200 && statusCode < 300; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | function isRetryableStatusCode(statusCode: number): boolean { | ||||||
|  |     const retryableStatusCodes = [ | ||||||
|  |         HttpCodes.BadGateway, | ||||||
|  |         HttpCodes.ServiceUnavailable, | ||||||
|  |         HttpCodes.GatewayTimeout | ||||||
|  |     ]; | ||||||
|  |     return retryableStatusCodes.includes(statusCode); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | function getCacheApiUrl(): string { | ||||||
|     // Ideally we just use ACTIONS_CACHE_URL |     // Ideally we just use ACTIONS_CACHE_URL | ||||||
|     const cacheUrl: string = ( |     const baseUrl: string = ( | ||||||
|         process.env["ACTIONS_CACHE_URL"] || |         process.env["ACTIONS_CACHE_URL"] || | ||||||
|         process.env["ACTIONS_RUNTIME_URL"] || |         process.env["ACTIONS_RUNTIME_URL"] || | ||||||
|         "" |         "" | ||||||
|     ).replace("pipelines", "artifactcache"); |     ).replace("pipelines", "artifactcache"); | ||||||
|     if (!cacheUrl) { |     if (!baseUrl) { | ||||||
|         throw new Error( |         throw new Error( | ||||||
|             "Cache Service Url not found, unable to restore cache." |             "Cache Service Url not found, unable to restore cache." | ||||||
|         ); |         ); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     core.debug(`Cache Url: ${cacheUrl}`); |     core.debug(`Cache Url: ${baseUrl}`); | ||||||
|     return cacheUrl; |     return `${baseUrl}_apis/artifactcache/`; | ||||||
| } | } | ||||||
|  |  | ||||||
| function createAcceptHeader(type: string, apiVersion: string): string { | function createAcceptHeader(type: string, apiVersion: string): string { | ||||||
| @@ -29,26 +52,26 @@ function createAcceptHeader(type: string, apiVersion: string): string { | |||||||
|  |  | ||||||
| function getRequestOptions(): IRequestOptions { | function getRequestOptions(): IRequestOptions { | ||||||
|     const requestOptions: IRequestOptions = { |     const requestOptions: IRequestOptions = { | ||||||
|         acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") |         acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     return requestOptions; |     return requestOptions; | ||||||
| } | } | ||||||
|  |  | ||||||
| export async function getCacheEntry( | function createRestClient(): RestClient { | ||||||
|     keys: string[] |  | ||||||
| ): Promise<ArtifactCacheEntry | null> { |  | ||||||
|     const cacheUrl = getCacheUrl(); |  | ||||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; | ||||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); |     const bearerCredentialHandler = new BearerCredentialHandler(token); | ||||||
|  |  | ||||||
|     const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent( |     return new RestClient("actions/cache", getCacheApiUrl(), [ | ||||||
|         keys.join(",") |  | ||||||
|     )}`; |  | ||||||
|  |  | ||||||
|     const restClient = new RestClient("actions/cache", cacheUrl, [ |  | ||||||
|         bearerCredentialHandler |         bearerCredentialHandler | ||||||
|     ]); |     ]); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function getCacheEntry( | ||||||
|  |     keys: string[] | ||||||
|  | ): Promise<ArtifactCacheEntry | null> { | ||||||
|  |     const restClient = createRestClient(); | ||||||
|  |     const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||||
|  |  | ||||||
|     const response = await restClient.get<ArtifactCacheEntry>( |     const response = await restClient.get<ArtifactCacheEntry>( | ||||||
|         resource, |         resource, | ||||||
| @@ -57,15 +80,17 @@ export async function getCacheEntry( | |||||||
|     if (response.statusCode === 204) { |     if (response.statusCode === 204) { | ||||||
|         return null; |         return null; | ||||||
|     } |     } | ||||||
|     if (response.statusCode !== 200) { |     if (!isSuccessStatusCode(response.statusCode)) { | ||||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); |         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
|     } |     } | ||||||
|     const cacheResult = response.result; |     const cacheResult = response.result; | ||||||
|     core.debug(`Cache Result:`); |     const cacheDownloadUrl = cacheResult?.archiveLocation; | ||||||
|     core.debug(JSON.stringify(cacheResult)); |     if (!cacheDownloadUrl) { | ||||||
|     if (!cacheResult || !cacheResult.archiveLocation) { |  | ||||||
|         throw new Error("Cache not found."); |         throw new Error("Cache not found."); | ||||||
|     } |     } | ||||||
|  |     core.setSecret(cacheDownloadUrl); | ||||||
|  |     core.debug(`Cache Result:`); | ||||||
|  |     core.debug(JSON.stringify(cacheResult)); | ||||||
|  |  | ||||||
|     return cacheResult; |     return cacheResult; | ||||||
| } | } | ||||||
| @@ -82,46 +107,186 @@ async function pipeResponseToStream( | |||||||
| } | } | ||||||
|  |  | ||||||
| export async function downloadCache( | export async function downloadCache( | ||||||
|     cacheEntry: ArtifactCacheEntry, |     archiveLocation: string, | ||||||
|     archivePath: string |     archivePath: string | ||||||
| ): Promise<void> { | ): Promise<void> { | ||||||
|     const stream = fs.createWriteStream(archivePath); |     const stream = fs.createWriteStream(archivePath); | ||||||
|     const httpClient = new HttpClient("actions/cache"); |     const httpClient = new HttpClient("actions/cache"); | ||||||
|     // eslint-disable-next-line @typescript-eslint/no-non-null-assertion |     const downloadResponse = await httpClient.get(archiveLocation); | ||||||
|     const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!); |  | ||||||
|     await pipeResponseToStream(downloadResponse, stream); |     await pipeResponseToStream(downloadResponse, stream); | ||||||
| } | } | ||||||
|  |  | ||||||
| export async function saveCache( | // Reserve Cache | ||||||
|     key: string, | export async function reserveCache(key: string): Promise<number> { | ||||||
|     archivePath: string |     const restClient = createRestClient(); | ||||||
|  |  | ||||||
|  |     const reserveCacheRequest: ReserveCacheRequest = { | ||||||
|  |         key | ||||||
|  |     }; | ||||||
|  |     const response = await restClient.create<ReserveCacheResponse>( | ||||||
|  |         "caches", | ||||||
|  |         reserveCacheRequest, | ||||||
|  |         getRequestOptions() | ||||||
|  |     ); | ||||||
|  |  | ||||||
|  |     return response?.result?.cacheId ?? -1; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | function getContentRange(start: number, end: number): string { | ||||||
|  |     // Format: `bytes start-end/filesize | ||||||
|  |     // start and end are inclusive | ||||||
|  |     // filesize can be * | ||||||
|  |     // For a 200 byte chunk starting at byte 0: | ||||||
|  |     // Content-Range: bytes 0-199/* | ||||||
|  |     return `bytes ${start}-${end}/*`; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | async function uploadChunk( | ||||||
|  |     restClient: RestClient, | ||||||
|  |     resourceUrl: string, | ||||||
|  |     data: NodeJS.ReadableStream, | ||||||
|  |     start: number, | ||||||
|  |     end: number | ||||||
| ): Promise<void> { | ): Promise<void> { | ||||||
|     const stream = fs.createReadStream(archivePath); |     core.debug( | ||||||
|  |         `Uploading chunk of size ${end - | ||||||
|     const cacheUrl = getCacheUrl(); |             start + | ||||||
|     const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; |             1} bytes at offset ${start} with content range: ${getContentRange( | ||||||
|     const bearerCredentialHandler = new BearerCredentialHandler(token); |             start, | ||||||
|  |             end | ||||||
|     const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; |         )}` | ||||||
|     const postUrl = cacheUrl + resource; |     ); | ||||||
|  |  | ||||||
|     const restClient = new RestClient("actions/cache", undefined, [ |  | ||||||
|         bearerCredentialHandler |  | ||||||
|     ]); |  | ||||||
|  |  | ||||||
|     const requestOptions = getRequestOptions(); |     const requestOptions = getRequestOptions(); | ||||||
|     requestOptions.additionalHeaders = { |     requestOptions.additionalHeaders = { | ||||||
|         "Content-Type": "application/octet-stream" |         "Content-Type": "application/octet-stream", | ||||||
|  |         "Content-Range": getContentRange(start, end) | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     const response = await restClient.uploadStream<void>( |     const uploadChunkRequest = async (): Promise<IRestResponse<void>> => { | ||||||
|         "POST", |         return await restClient.uploadStream<void>( | ||||||
|         postUrl, |             "PATCH", | ||||||
|         stream, |             resourceUrl, | ||||||
|  |             data, | ||||||
|  |             requestOptions | ||||||
|  |         ); | ||||||
|  |     }; | ||||||
|  |  | ||||||
|  |     const response = await uploadChunkRequest(); | ||||||
|  |     if (isSuccessStatusCode(response.statusCode)) { | ||||||
|  |         return; | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     if (isRetryableStatusCode(response.statusCode)) { | ||||||
|  |         core.debug( | ||||||
|  |             `Received ${response.statusCode}, retrying chunk at offset ${start}.` | ||||||
|  |         ); | ||||||
|  |         const retryResponse = await uploadChunkRequest(); | ||||||
|  |         if (isSuccessStatusCode(retryResponse.statusCode)) { | ||||||
|  |             return; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |  | ||||||
|  |     throw new Error( | ||||||
|  |         `Cache service responded with ${response.statusCode} during chunk upload.` | ||||||
|  |     ); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | function parseEnvNumber(key: string): number | undefined { | ||||||
|  |     const value = Number(process.env[key]); | ||||||
|  |     if (Number.isNaN(value) || value < 0) { | ||||||
|  |         return undefined; | ||||||
|  |     } | ||||||
|  |     return value; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | async function uploadFile( | ||||||
|  |     restClient: RestClient, | ||||||
|  |     cacheId: number, | ||||||
|  |     archivePath: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     // Upload Chunks | ||||||
|  |     const fileSize = fs.statSync(archivePath).size; | ||||||
|  |     const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); | ||||||
|  |     const fd = fs.openSync(archivePath, "r"); | ||||||
|  |  | ||||||
|  |     const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel | ||||||
|  |     const MAX_CHUNK_SIZE = | ||||||
|  |         parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks | ||||||
|  |     core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); | ||||||
|  |  | ||||||
|  |     const parallelUploads = [...new Array(concurrency).keys()]; | ||||||
|  |     core.debug("Awaiting all uploads"); | ||||||
|  |     let offset = 0; | ||||||
|  |  | ||||||
|  |     try { | ||||||
|  |         await Promise.all( | ||||||
|  |             parallelUploads.map(async () => { | ||||||
|  |                 while (offset < fileSize) { | ||||||
|  |                     const chunkSize = Math.min( | ||||||
|  |                         fileSize - offset, | ||||||
|  |                         MAX_CHUNK_SIZE | ||||||
|  |                     ); | ||||||
|  |                     const start = offset; | ||||||
|  |                     const end = offset + chunkSize - 1; | ||||||
|  |                     offset += MAX_CHUNK_SIZE; | ||||||
|  |                     const chunk = fs.createReadStream(archivePath, { | ||||||
|  |                         fd, | ||||||
|  |                         start, | ||||||
|  |                         end, | ||||||
|  |                         autoClose: false | ||||||
|  |                     }); | ||||||
|  |  | ||||||
|  |                     await uploadChunk( | ||||||
|  |                         restClient, | ||||||
|  |                         resourceUrl, | ||||||
|  |                         chunk, | ||||||
|  |                         start, | ||||||
|  |                         end | ||||||
|  |                     ); | ||||||
|  |                 } | ||||||
|  |             }) | ||||||
|  |         ); | ||||||
|  |     } finally { | ||||||
|  |         fs.closeSync(fd); | ||||||
|  |     } | ||||||
|  |     return; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | async function commitCache( | ||||||
|  |     restClient: RestClient, | ||||||
|  |     cacheId: number, | ||||||
|  |     filesize: number | ||||||
|  | ): Promise<IRestResponse<void>> { | ||||||
|  |     const requestOptions = getRequestOptions(); | ||||||
|  |     const commitCacheRequest: CommitCacheRequest = { size: filesize }; | ||||||
|  |     return await restClient.create( | ||||||
|  |         `caches/${cacheId.toString()}`, | ||||||
|  |         commitCacheRequest, | ||||||
|         requestOptions |         requestOptions | ||||||
|     ); |     ); | ||||||
|     if (response.statusCode !== 200) { | } | ||||||
|         throw new Error(`Cache service responded with ${response.statusCode}`); |  | ||||||
|  | export async function saveCache( | ||||||
|  |     cacheId: number, | ||||||
|  |     archivePath: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     const restClient = createRestClient(); | ||||||
|  |  | ||||||
|  |     core.debug("Upload cache"); | ||||||
|  |     await uploadFile(restClient, cacheId, archivePath); | ||||||
|  |  | ||||||
|  |     // Commit Cache | ||||||
|  |     core.debug("Commiting cache"); | ||||||
|  |     const cacheSize = utils.getArchiveFileSize(archivePath); | ||||||
|  |     const commitCacheResponse = await commitCache( | ||||||
|  |         restClient, | ||||||
|  |         cacheId, | ||||||
|  |         cacheSize | ||||||
|  |     ); | ||||||
|  |     if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||||
|  |         throw new Error( | ||||||
|  |             `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` | ||||||
|  |         ); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     core.info("Cache saved successfully"); |     core.info("Cache saved successfully"); | ||||||
|   | |||||||
							
								
								
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										13
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							| @@ -4,3 +4,16 @@ export interface ArtifactCacheEntry { | |||||||
|     creationTime?: string; |     creationTime?: string; | ||||||
|     archiveLocation?: string; |     archiveLocation?: string; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | export interface CommitCacheRequest { | ||||||
|  |     size: number; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export interface ReserveCacheRequest { | ||||||
|  |     key: string; | ||||||
|  |     version?: string; | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export interface ReserveCacheResponse { | ||||||
|  |     cacheId: number; | ||||||
|  | } | ||||||
|   | |||||||
| @@ -1,22 +1,22 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import { exec } from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "./cacheHttpClient"; | import * as cacheHttpClient from "./cacheHttpClient"; | ||||||
| import { Events, Inputs, State } from "./constants"; | import { Events, Inputs, State } from "./constants"; | ||||||
|  | import { extractTar } from "./tar"; | ||||||
| import * as utils from "./utils/actionUtils"; | import * as utils from "./utils/actionUtils"; | ||||||
|  |  | ||||||
| async function run(): Promise<void> { | async function run(): Promise<void> { | ||||||
|     try { |     try { | ||||||
|         // Validate inputs, this can cause task failure |         // Validate inputs, this can cause task failure | ||||||
|         if (!utils.isValidEvent()) { |         if (!utils.isValidEvent()) { | ||||||
|             core.setFailed( |             utils.logWarning( | ||||||
|                 `Event Validation Error: The event type ${ |                 `Event Validation Error: The event type ${ | ||||||
|                     process.env[Events.Key] |                     process.env[Events.Key] | ||||||
|                 } is not supported. Only ${utils |                 } is not supported. Only ${utils | ||||||
|                     .getSupportedEvents() |                     .getSupportedEvents() | ||||||
|                     .join(", ")} events are supported at this time.` |                     .join(", ")} events are supported at this time.` | ||||||
|             ); |             ); | ||||||
|  |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         const cachePath = utils.resolvePath( |         const cachePath = utils.resolvePath( | ||||||
| @@ -60,7 +60,7 @@ async function run(): Promise<void> { | |||||||
|  |  | ||||||
|         try { |         try { | ||||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); |             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); | ||||||
|             if (!cacheEntry) { |             if (!cacheEntry?.archiveLocation) { | ||||||
|                 core.info( |                 core.info( | ||||||
|                     `Cache not found for input keys: ${keys.join(", ")}.` |                     `Cache not found for input keys: ${keys.join(", ")}.` | ||||||
|                 ); |                 ); | ||||||
| @@ -77,7 +77,10 @@ async function run(): Promise<void> { | |||||||
|             utils.setCacheState(cacheEntry); |             utils.setCacheState(cacheEntry); | ||||||
|  |  | ||||||
|             // Download the cache from the cache entry |             // Download the cache from the cache entry | ||||||
|             await cacheHttpClient.downloadCache(cacheEntry, archivePath); |             await cacheHttpClient.downloadCache( | ||||||
|  |                 cacheEntry.archiveLocation, | ||||||
|  |                 archivePath | ||||||
|  |             ); | ||||||
|  |  | ||||||
|             const archiveFileSize = utils.getArchiveFileSize(archivePath); |             const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|             core.info( |             core.info( | ||||||
| @@ -86,27 +89,7 @@ async function run(): Promise<void> { | |||||||
|                 )} MB (${archiveFileSize} B)` |                 )} MB (${archiveFileSize} B)` | ||||||
|             ); |             ); | ||||||
|  |  | ||||||
|             // Create directory to extract tar into |             await extractTar(archivePath, cachePath); | ||||||
|             await io.mkdirP(cachePath); |  | ||||||
|  |  | ||||||
|             // http://man7.org/linux/man-pages/man1/tar.1.html |  | ||||||
|             // tar [-options] <name of the tar archive> [files or directories which to add into archive] |  | ||||||
|             const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|             const args = IS_WINDOWS |  | ||||||
|                 ? [ |  | ||||||
|                       "-xz", |  | ||||||
|                       "--force-local", |  | ||||||
|                       "-f", |  | ||||||
|                       archivePath.replace(/\\/g, "/"), |  | ||||||
|                       "-C", |  | ||||||
|                       cachePath.replace(/\\/g, "/") |  | ||||||
|                   ] |  | ||||||
|                 : ["-xz", "-f", archivePath, "-C", cachePath]; |  | ||||||
|  |  | ||||||
|             const tarPath = await io.which("tar", true); |  | ||||||
|             core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|  |  | ||||||
|             await exec(`"${tarPath}"`, args); |  | ||||||
|  |  | ||||||
|             const isExactKeyMatch = utils.isExactKeyMatch( |             const isExactKeyMatch = utils.isExactKeyMatch( | ||||||
|                 primaryKey, |                 primaryKey, | ||||||
| @@ -118,7 +101,7 @@ async function run(): Promise<void> { | |||||||
|                 `Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}` |                 `Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}` | ||||||
|             ); |             ); | ||||||
|         } catch (error) { |         } catch (error) { | ||||||
|             core.warning(error.message); |             utils.logWarning(error.message); | ||||||
|             utils.setCacheHitOutput(false); |             utils.setCacheHitOutput(false); | ||||||
|         } |         } | ||||||
|     } catch (error) { |     } catch (error) { | ||||||
|   | |||||||
							
								
								
									
										57
									
								
								src/save.ts
									
									
									
									
									
								
							
							
						
						
									
										57
									
								
								src/save.ts
									
									
									
									
									
								
							| @@ -1,19 +1,29 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import { exec } from "@actions/exec"; |  | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as cacheHttpClient from "./cacheHttpClient"; | import * as cacheHttpClient from "./cacheHttpClient"; | ||||||
| import { Inputs, State } from "./constants"; | import { Events, Inputs, State } from "./constants"; | ||||||
|  | import { createTar } from "./tar"; | ||||||
| import * as utils from "./utils/actionUtils"; | import * as utils from "./utils/actionUtils"; | ||||||
|  |  | ||||||
| async function run(): Promise<void> { | async function run(): Promise<void> { | ||||||
|     try { |     try { | ||||||
|  |         if (!utils.isValidEvent()) { | ||||||
|  |             utils.logWarning( | ||||||
|  |                 `Event Validation Error: The event type ${ | ||||||
|  |                     process.env[Events.Key] | ||||||
|  |                 } is not supported. Only ${utils | ||||||
|  |                     .getSupportedEvents() | ||||||
|  |                     .join(", ")} events are supported at this time.` | ||||||
|  |             ); | ||||||
|  |             return; | ||||||
|  |         } | ||||||
|  |  | ||||||
|         const state = utils.getCacheState(); |         const state = utils.getCacheState(); | ||||||
|  |  | ||||||
|         // Inputs are re-evaluted before the post action, so we want the original key used for restore |         // Inputs are re-evaluted before the post action, so we want the original key used for restore | ||||||
|         const primaryKey = core.getState(State.CacheKey); |         const primaryKey = core.getState(State.CacheKey); | ||||||
|         if (!primaryKey) { |         if (!primaryKey) { | ||||||
|             core.warning(`Error retrieving key from state.`); |             utils.logWarning(`Error retrieving key from state.`); | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
| @@ -24,6 +34,15 @@ async function run(): Promise<void> { | |||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         core.debug("Reserving Cache"); | ||||||
|  |         const cacheId = await cacheHttpClient.reserveCache(primaryKey); | ||||||
|  |         if (cacheId == -1) { | ||||||
|  |             core.info( | ||||||
|  |                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||||
|  |             ); | ||||||
|  |             return; | ||||||
|  |         } | ||||||
|  |         core.debug(`Cache ID: ${cacheId}`); | ||||||
|         const cachePath = utils.resolvePath( |         const cachePath = utils.resolvePath( | ||||||
|             core.getInput(Inputs.Path, { required: true }) |             core.getInput(Inputs.Path, { required: true }) | ||||||
|         ); |         ); | ||||||
| @@ -35,40 +54,24 @@ async function run(): Promise<void> { | |||||||
|         ); |         ); | ||||||
|         core.debug(`Archive Path: ${archivePath}`); |         core.debug(`Archive Path: ${archivePath}`); | ||||||
|  |  | ||||||
|         // http://man7.org/linux/man-pages/man1/tar.1.html |         await createTar(archivePath, cachePath); | ||||||
|         // tar [-options] <name of the tar archive> [files or directories which to add into archive] |  | ||||||
|         const IS_WINDOWS = process.platform === "win32"; |  | ||||||
|         const args = IS_WINDOWS |  | ||||||
|             ? [ |  | ||||||
|                   "-cz", |  | ||||||
|                   "--force-local", |  | ||||||
|                   "-f", |  | ||||||
|                   archivePath.replace(/\\/g, "/"), |  | ||||||
|                   "-C", |  | ||||||
|                   cachePath.replace(/\\/g, "/"), |  | ||||||
|                   "." |  | ||||||
|               ] |  | ||||||
|             : ["-cz", "-f", archivePath, "-C", cachePath, "."]; |  | ||||||
|  |  | ||||||
|         const tarPath = await io.which("tar", true); |         const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit | ||||||
|         core.debug(`Tar Path: ${tarPath}`); |  | ||||||
|         await exec(`"${tarPath}"`, args); |  | ||||||
|  |  | ||||||
|         const fileSizeLimit = 400 * 1024 * 1024; // 400MB |  | ||||||
|         const archiveFileSize = utils.getArchiveFileSize(archivePath); |         const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|         core.debug(`File Size: ${archiveFileSize}`); |         core.debug(`File Size: ${archiveFileSize}`); | ||||||
|         if (archiveFileSize > fileSizeLimit) { |         if (archiveFileSize > fileSizeLimit) { | ||||||
|             core.warning( |             utils.logWarning( | ||||||
|                 `Cache size of ~${Math.round( |                 `Cache size of ~${Math.round( | ||||||
|                     archiveFileSize / (1024 * 1024) |                     archiveFileSize / (1024 * 1024) | ||||||
|                 )} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.` |                 )} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` | ||||||
|             ); |             ); | ||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         await cacheHttpClient.saveCache(primaryKey, archivePath); |         core.debug(`Saving Cache (ID: ${cacheId})`); | ||||||
|  |         await cacheHttpClient.saveCache(cacheId, archivePath); | ||||||
|     } catch (error) { |     } catch (error) { | ||||||
|         core.warning(error.message); |         utils.logWarning(error.message); | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
							
								
								
									
										47
									
								
								src/tar.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										47
									
								
								src/tar.ts
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,47 @@ | |||||||
|  | import { exec } from "@actions/exec"; | ||||||
|  | import * as io from "@actions/io"; | ||||||
|  | import { existsSync } from "fs"; | ||||||
|  |  | ||||||
|  | async function getTarPath(): Promise<string> { | ||||||
|  |     // Explicitly use BSD Tar on Windows | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |     if (IS_WINDOWS) { | ||||||
|  |         const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||||
|  |         if (existsSync(systemTar)) { | ||||||
|  |             return systemTar; | ||||||
|  |         } | ||||||
|  |     } | ||||||
|  |     return await io.which("tar", true); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | async function execTar(args: string[]): Promise<void> { | ||||||
|  |     try { | ||||||
|  |         await exec(`"${await getTarPath()}"`, args); | ||||||
|  |     } catch (error) { | ||||||
|  |         const IS_WINDOWS = process.platform === "win32"; | ||||||
|  |         if (IS_WINDOWS) { | ||||||
|  |             throw new Error( | ||||||
|  |                 `Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.` | ||||||
|  |             ); | ||||||
|  |         } | ||||||
|  |         throw new Error(`Tar failed with error: ${error?.message}`); | ||||||
|  |     } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function extractTar( | ||||||
|  |     archivePath: string, | ||||||
|  |     targetDirectory: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     // Create directory to extract tar into | ||||||
|  |     await io.mkdirP(targetDirectory); | ||||||
|  |     const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; | ||||||
|  |     await execTar(args); | ||||||
|  | } | ||||||
|  |  | ||||||
|  | export async function createTar( | ||||||
|  |     archivePath: string, | ||||||
|  |     sourceDirectory: string | ||||||
|  | ): Promise<void> { | ||||||
|  |     const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; | ||||||
|  |     await execTar(args); | ||||||
|  | } | ||||||
| @@ -77,6 +77,11 @@ export function getCacheState(): ArtifactCacheEntry | undefined { | |||||||
|     return undefined; |     return undefined; | ||||||
| } | } | ||||||
|  |  | ||||||
|  | export function logWarning(message: string): void { | ||||||
|  |     const warningPrefix = "[warning]"; | ||||||
|  |     core.info(`${warningPrefix}${message}`); | ||||||
|  | } | ||||||
|  |  | ||||||
| export function resolvePath(filePath: string): string { | export function resolvePath(filePath: string): string { | ||||||
|     if (filePath[0] === "~") { |     if (filePath[0] === "~") { | ||||||
|         const home = os.homedir(); |         const home = os.homedir(); | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user