mirror of
				https://gitea.com/actions/cache.git
				synced 2025-10-31 07:47:07 +00:00 
			
		
		
		
	Compare commits
	
		
			29 Commits
		
	
	
		
			add-retrie
			...
			releases/v
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | f5ce41475b | ||
|   | 68fa0a8d81 | ||
|   | 56ec64e417 | ||
|   | efbc4e162b | ||
|   | d9747005de | ||
|   | 3f662ca624 | ||
|   | 0232e3178d | ||
|   | ee7a57c615 | ||
|   | da9f90cb83 | ||
|   | ec7f7ebd08 | ||
|   | 2a973a0f4e | ||
|   | cbbb8b4d4f | ||
|   | 5a0add1806 | ||
|   | 9fe7ad8b07 | ||
|   | 7c7d003bbb | ||
|   | 96e5a46c57 | ||
|   | 84e606dfac | ||
|   | 70655ec832 | ||
|   | fe1055e9d1 | ||
|   | a505c2e7a6 | ||
|   | 10a14413e7 | ||
|   | cf4f44db70 | ||
|   | 4c4974aff1 | ||
|   | cffae9552b | ||
|   | 44543250bd | ||
|   | 6491e51b66 | ||
|   | 86dff562ab | ||
|   | 0f810ad45a | ||
|   | 9d8c7b4041 | 
| @@ -12,12 +12,5 @@ | |||||||
|     "plugin:prettier/recommended", |     "plugin:prettier/recommended", | ||||||
|     "prettier/@typescript-eslint" |     "prettier/@typescript-eslint" | ||||||
|   ], |   ], | ||||||
|   "plugins": ["@typescript-eslint", "simple-import-sort", "jest"], |   "plugins": ["@typescript-eslint", "jest"] | ||||||
|   "rules": { |  | ||||||
|     "import/first": "error", |  | ||||||
|     "import/newline-after-import": "error", |  | ||||||
|     "import/no-duplicates": "error", |  | ||||||
|     "simple-import-sort/sort": "error", |  | ||||||
|     "sort-imports": "off" |  | ||||||
|   } |  | ||||||
| } | } | ||||||
|   | |||||||
							
								
								
									
										35
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										35
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @@ -1,35 +0,0 @@ | |||||||
| name: "Code Scanning - Action" |  | ||||||
|  |  | ||||||
| on: |  | ||||||
|   push: |  | ||||||
|   schedule: |  | ||||||
|     - cron: '0 0 * * 0' |  | ||||||
|  |  | ||||||
| jobs: |  | ||||||
|   CodeQL-Build: |  | ||||||
|  |  | ||||||
|     strategy: |  | ||||||
|       fail-fast: false |  | ||||||
|  |  | ||||||
|  |  | ||||||
|     # CodeQL runs on ubuntu-latest, windows-latest, and macos-latest |  | ||||||
|     runs-on: ubuntu-latest |  | ||||||
|  |  | ||||||
|     steps: |  | ||||||
|     - name: Checkout repository |  | ||||||
|       uses: actions/checkout@v2 |  | ||||||
|  |  | ||||||
|     # Initializes the CodeQL tools for scanning. |  | ||||||
|     - name: Initialize CodeQL |  | ||||||
|       uses: github/codeql-action/init@v1 |  | ||||||
|       # Override language selection by uncommenting this and choosing your languages |  | ||||||
|       # with: |  | ||||||
|       #   languages: go, javascript, csharp, python, cpp, java |  | ||||||
|  |  | ||||||
|     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java). |  | ||||||
|     # If this step fails, then you should remove it and run the build manually (see below). |  | ||||||
|     - name: Autobuild |  | ||||||
|       uses: github/codeql-action/autobuild@v1 |  | ||||||
|  |  | ||||||
|     - name: Perform CodeQL Analysis |  | ||||||
|       uses: github/codeql-action/analyze@v1 |  | ||||||
							
								
								
									
										32
									
								
								.github/workflows/workflow.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								.github/workflows/workflow.yml
									
									
									
									
										vendored
									
									
								
							| @@ -4,11 +4,13 @@ on: | |||||||
|   pull_request: |   pull_request: | ||||||
|     branches: |     branches: | ||||||
|       - master |       - master | ||||||
|  |       - releases/** | ||||||
|     paths-ignore: |     paths-ignore: | ||||||
|       - '**.md' |       - '**.md' | ||||||
|   push: |   push: | ||||||
|     branches: |     branches: | ||||||
|       - master |       - master | ||||||
|  |       - releases/** | ||||||
|     paths-ignore: |     paths-ignore: | ||||||
|       - '**.md' |       - '**.md' | ||||||
|  |  | ||||||
| @@ -56,19 +58,14 @@ jobs: | |||||||
|     steps: |     steps: | ||||||
|     - name: Checkout |     - name: Checkout | ||||||
|       uses: actions/checkout@v2 |       uses: actions/checkout@v2 | ||||||
|     - name: Generate files in working directory |     - name: Generate files | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: __tests__/create-cache-files.sh ${{ runner.os }} test-cache |       run: __tests__/create-cache-files.sh ${{ runner.os }} | ||||||
|     - name: Generate files outside working directory |  | ||||||
|       shell: bash |  | ||||||
|       run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache |  | ||||||
|     - name: Save cache |     - name: Save cache | ||||||
|       uses: ./ |       uses: ./ | ||||||
|       with: |       with: | ||||||
|         key: test-${{ runner.os }}-${{ github.run_id }} |         key: test-${{ runner.os }}-${{ github.run_id }} | ||||||
|         path: | |         path: test-cache | ||||||
|           test-cache |  | ||||||
|           ~/test-cache |  | ||||||
|   test-restore: |   test-restore: | ||||||
|     needs: test-save |     needs: test-save | ||||||
|     strategy: |     strategy: | ||||||
| @@ -83,15 +80,10 @@ jobs: | |||||||
|       uses: ./ |       uses: ./ | ||||||
|       with: |       with: | ||||||
|         key: test-${{ runner.os }}-${{ github.run_id }} |         key: test-${{ runner.os }}-${{ github.run_id }} | ||||||
|         path: | |         path: test-cache | ||||||
|           test-cache |     - name: Verify cache | ||||||
|           ~/test-cache |  | ||||||
|     - name: Verify cache files in working directory |  | ||||||
|       shell: bash |       shell: bash | ||||||
|       run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache |       run: __tests__/verify-cache-files.sh ${{ runner.os }} | ||||||
|     - name: Verify cache files outside working directory |  | ||||||
|       shell: bash |  | ||||||
|       run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache |  | ||||||
|  |  | ||||||
|   # End to end with proxy |   # End to end with proxy | ||||||
|   test-proxy-save: |   test-proxy-save: | ||||||
| @@ -101,7 +93,7 @@ jobs: | |||||||
|       options: --dns 127.0.0.1 |       options: --dns 127.0.0.1 | ||||||
|     services: |     services: | ||||||
|       squid-proxy: |       squid-proxy: | ||||||
|         image: datadog/squid:latest |         image: ubuntu/squid:latest | ||||||
|         ports: |         ports: | ||||||
|           - 3128:3128 |           - 3128:3128 | ||||||
|     env: |     env: | ||||||
| @@ -110,7 +102,7 @@ jobs: | |||||||
|     - name: Checkout |     - name: Checkout | ||||||
|       uses: actions/checkout@v2 |       uses: actions/checkout@v2 | ||||||
|     - name: Generate files |     - name: Generate files | ||||||
|       run: __tests__/create-cache-files.sh proxy test-cache |       run: __tests__/create-cache-files.sh proxy | ||||||
|     - name: Save cache |     - name: Save cache | ||||||
|       uses: ./ |       uses: ./ | ||||||
|       with: |       with: | ||||||
| @@ -124,7 +116,7 @@ jobs: | |||||||
|       options: --dns 127.0.0.1 |       options: --dns 127.0.0.1 | ||||||
|     services: |     services: | ||||||
|       squid-proxy: |       squid-proxy: | ||||||
|         image: datadog/squid:latest |         image: ubuntu/squid:latest | ||||||
|         ports: |         ports: | ||||||
|           - 3128:3128 |           - 3128:3128 | ||||||
|     env: |     env: | ||||||
| @@ -138,4 +130,4 @@ jobs: | |||||||
|         key: test-proxy-${{ github.run_id }} |         key: test-proxy-${{ github.run_id }} | ||||||
|         path: test-cache |         path: test-cache | ||||||
|     - name: Verify cache |     - name: Verify cache | ||||||
|       run: __tests__/verify-cache-files.sh proxy test-cache |       run: __tests__/verify-cache-files.sh proxy | ||||||
|   | |||||||
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -1,5 +1,8 @@ | |||||||
| __tests__/runner/* | __tests__/runner/* | ||||||
|  |  | ||||||
|  | # comment out in distribution branches | ||||||
|  | dist/ | ||||||
|  |  | ||||||
| node_modules/ | node_modules/ | ||||||
| lib/ | lib/ | ||||||
|  |  | ||||||
|   | |||||||
| @@ -37,7 +37,7 @@ jobs: | |||||||
|     runs-on: ubuntu-latest |     runs-on: ubuntu-latest | ||||||
|  |  | ||||||
|     steps: |     steps: | ||||||
|     - uses: actions/checkout@v2 |     - uses: actions/checkout@v1 | ||||||
|  |  | ||||||
|     - name: Cache Primes |     - name: Cache Primes | ||||||
|       id: cache-primes |       id: cache-primes | ||||||
| @@ -67,9 +67,7 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us | |||||||
| - [Java - Gradle](./examples.md#java---gradle) | - [Java - Gradle](./examples.md#java---gradle) | ||||||
| - [Java - Maven](./examples.md#java---maven) | - [Java - Maven](./examples.md#java---maven) | ||||||
| - [Node - npm](./examples.md#node---npm) | - [Node - npm](./examples.md#node---npm) | ||||||
| - [Node - Lerna](./examples.md#node---lerna) |  | ||||||
| - [Node - Yarn](./examples.md#node---yarn) | - [Node - Yarn](./examples.md#node---yarn) | ||||||
| - [OCaml/Reason - esy](./examples.md##ocamlreason---esy) |  | ||||||
| - [PHP - Composer](./examples.md#php---composer) | - [PHP - Composer](./examples.md#php---composer) | ||||||
| - [Python - pip](./examples.md#python---pip) | - [Python - pip](./examples.md#python---pip) | ||||||
| - [R - renv](./examples.md#r---renv) | - [R - renv](./examples.md#r---renv) | ||||||
| @@ -91,7 +89,7 @@ Using the `cache-hit` output, subsequent steps (such as install or build) can be | |||||||
| Example: | Example: | ||||||
| ```yaml | ```yaml | ||||||
| steps: | steps: | ||||||
|   - uses: actions/checkout@v2 |   - uses: actions/checkout@v1 | ||||||
|  |  | ||||||
|   - uses: actions/cache@v1 |   - uses: actions/cache@v1 | ||||||
|     id: cache |     id: cache | ||||||
|   | |||||||
| @@ -1,6 +1,4 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as io from "@actions/io"; |  | ||||||
| import { promises as fs } from "fs"; |  | ||||||
| import * as os from "os"; | import * as os from "os"; | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
|  |  | ||||||
| @@ -8,24 +6,13 @@ import { Events, Outputs, State } from "../src/constants"; | |||||||
| import { ArtifactCacheEntry } from "../src/contracts"; | import { ArtifactCacheEntry } from "../src/contracts"; | ||||||
| import * as actionUtils from "../src/utils/actionUtils"; | import * as actionUtils from "../src/utils/actionUtils"; | ||||||
|  |  | ||||||
| import uuid = require("uuid"); |  | ||||||
|  |  | ||||||
| jest.mock("@actions/core"); | jest.mock("@actions/core"); | ||||||
| jest.mock("os"); | jest.mock("os"); | ||||||
|  |  | ||||||
| function getTempDir(): string { |  | ||||||
|     return path.join(__dirname, "_temp", "actionUtils"); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| afterEach(() => { | afterEach(() => { | ||||||
|     delete process.env[Events.Key]; |     delete process.env[Events.Key]; | ||||||
| }); | }); | ||||||
|  |  | ||||||
| afterAll(async () => { |  | ||||||
|     delete process.env["GITHUB_WORKSPACE"]; |  | ||||||
|     await io.rmRF(getTempDir()); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("getArchiveFileSize returns file size", () => { | test("getArchiveFileSize returns file size", () => { | ||||||
|     const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); |     const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); | ||||||
|  |  | ||||||
| @@ -194,43 +181,17 @@ test("isValidEvent returns false for unknown event", () => { | |||||||
|     expect(isValidEvent).toBe(false); |     expect(isValidEvent).toBe(false); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("resolvePaths with no ~ in path", async () => { | test("resolvePath with no ~ in path", () => { | ||||||
|     const filePath = ".cache"; |     const filePath = ".cache/yarn"; | ||||||
|  |  | ||||||
|     // Create the following layout: |     const resolvedPath = actionUtils.resolvePath(filePath); | ||||||
|     //   cwd |  | ||||||
|     //   cwd/.cache |  | ||||||
|     //   cwd/.cache/file.txt |  | ||||||
|  |  | ||||||
|     const root = path.join(getTempDir(), "no-tilde"); |     const expectedPath = path.resolve(filePath); | ||||||
|     // tarball entries will be relative to workspace |     expect(resolvedPath).toBe(expectedPath); | ||||||
|     process.env["GITHUB_WORKSPACE"] = root; |  | ||||||
|  |  | ||||||
|     await fs.mkdir(root, { recursive: true }); |  | ||||||
|     const cache = path.join(root, ".cache"); |  | ||||||
|     await fs.mkdir(cache, { recursive: true }); |  | ||||||
|     await fs.writeFile(path.join(cache, "file.txt"), "cached"); |  | ||||||
|  |  | ||||||
|     const originalCwd = process.cwd(); |  | ||||||
|  |  | ||||||
|     try { |  | ||||||
|         process.chdir(root); |  | ||||||
|  |  | ||||||
|         const resolvedPath = await actionUtils.resolvePaths([filePath]); |  | ||||||
|  |  | ||||||
|         const expectedPath = [filePath]; |  | ||||||
|         expect(resolvedPath).toStrictEqual(expectedPath); |  | ||||||
|     } finally { |  | ||||||
|         process.chdir(originalCwd); |  | ||||||
|     } |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("resolvePaths with ~ in path", async () => { | test("resolvePath with ~ in path", () => { | ||||||
|     const cacheDir = uuid(); |     const filePath = "~/.cache/yarn"; | ||||||
|     const filePath = `~/${cacheDir}`; |  | ||||||
|     // Create the following layout: |  | ||||||
|     //   ~/uuid |  | ||||||
|     //   ~/uuid/file.txt |  | ||||||
|  |  | ||||||
|     const homedir = jest.requireActual("os").homedir(); |     const homedir = jest.requireActual("os").homedir(); | ||||||
|     const homedirMock = jest.spyOn(os, "homedir"); |     const homedirMock = jest.spyOn(os, "homedir"); | ||||||
| @@ -238,93 +199,24 @@ test("resolvePaths with ~ in path", async () => { | |||||||
|         return homedir; |         return homedir; | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const target = path.join(homedir, cacheDir); |     const resolvedPath = actionUtils.resolvePath(filePath); | ||||||
|     await fs.mkdir(target, { recursive: true }); |  | ||||||
|     await fs.writeFile(path.join(target, "file.txt"), "cached"); |  | ||||||
|  |  | ||||||
|     const root = getTempDir(); |     const expectedPath = path.join(homedir, ".cache/yarn"); | ||||||
|     process.env["GITHUB_WORKSPACE"] = root; |     expect(resolvedPath).toBe(expectedPath); | ||||||
|  |  | ||||||
|     try { |  | ||||||
|         const resolvedPath = await actionUtils.resolvePaths([filePath]); |  | ||||||
|  |  | ||||||
|         const expectedPath = [path.relative(root, target)]; |  | ||||||
|         expect(resolvedPath).toStrictEqual(expectedPath); |  | ||||||
|     } finally { |  | ||||||
|         await io.rmRF(target); |  | ||||||
|     } |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("resolvePaths with home not found", async () => { | test("resolvePath with home not found", () => { | ||||||
|     const filePath = "~/.cache/yarn"; |     const filePath = "~/.cache/yarn"; | ||||||
|     const homedirMock = jest.spyOn(os, "homedir"); |     const homedirMock = jest.spyOn(os, "homedir"); | ||||||
|     homedirMock.mockImplementation(() => { |     homedirMock.mockImplementation(() => { | ||||||
|         return ""; |         return ""; | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow( |     expect(() => actionUtils.resolvePath(filePath)).toThrow( | ||||||
|         "Unable to determine HOME directory" |         "Unable to resolve `~` to HOME" | ||||||
|     ); |     ); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("resolvePaths inclusion pattern returns found", async () => { |  | ||||||
|     const pattern = "*.ts"; |  | ||||||
|     // Create the following layout: |  | ||||||
|     //   inclusion-patterns |  | ||||||
|     //   inclusion-patterns/miss.txt |  | ||||||
|     //   inclusion-patterns/test.ts |  | ||||||
|  |  | ||||||
|     const root = path.join(getTempDir(), "inclusion-patterns"); |  | ||||||
|     // tarball entries will be relative to workspace |  | ||||||
|     process.env["GITHUB_WORKSPACE"] = root; |  | ||||||
|  |  | ||||||
|     await fs.mkdir(root, { recursive: true }); |  | ||||||
|     await fs.writeFile(path.join(root, "miss.txt"), "no match"); |  | ||||||
|     await fs.writeFile(path.join(root, "test.ts"), "match"); |  | ||||||
|  |  | ||||||
|     const originalCwd = process.cwd(); |  | ||||||
|  |  | ||||||
|     try { |  | ||||||
|         process.chdir(root); |  | ||||||
|  |  | ||||||
|         const resolvedPath = await actionUtils.resolvePaths([pattern]); |  | ||||||
|  |  | ||||||
|         const expectedPath = ["test.ts"]; |  | ||||||
|         expect(resolvedPath).toStrictEqual(expectedPath); |  | ||||||
|     } finally { |  | ||||||
|         process.chdir(originalCwd); |  | ||||||
|     } |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("resolvePaths exclusion pattern returns not found", async () => { |  | ||||||
|     const patterns = ["*.ts", "!test.ts"]; |  | ||||||
|     // Create the following layout: |  | ||||||
|     //   exclusion-patterns |  | ||||||
|     //   exclusion-patterns/miss.txt |  | ||||||
|     //   exclusion-patterns/test.ts |  | ||||||
|  |  | ||||||
|     const root = path.join(getTempDir(), "exclusion-patterns"); |  | ||||||
|     // tarball entries will be relative to workspace |  | ||||||
|     process.env["GITHUB_WORKSPACE"] = root; |  | ||||||
|  |  | ||||||
|     await fs.mkdir(root, { recursive: true }); |  | ||||||
|     await fs.writeFile(path.join(root, "miss.txt"), "no match"); |  | ||||||
|     await fs.writeFile(path.join(root, "test.ts"), "no match"); |  | ||||||
|  |  | ||||||
|     const originalCwd = process.cwd(); |  | ||||||
|  |  | ||||||
|     try { |  | ||||||
|         process.chdir(root); |  | ||||||
|  |  | ||||||
|         const resolvedPath = await actionUtils.resolvePaths(patterns); |  | ||||||
|  |  | ||||||
|         const expectedPath = []; |  | ||||||
|         expect(resolvedPath).toStrictEqual(expectedPath); |  | ||||||
|     } finally { |  | ||||||
|         process.chdir(originalCwd); |  | ||||||
|     } |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("isValidEvent returns true for push event", () => { | test("isValidEvent returns true for push event", () => { | ||||||
|     const event = Events.Push; |     const event = Events.Push; | ||||||
|     process.env[Events.Key] = event; |     process.env[Events.Key] = event; | ||||||
| @@ -342,16 +234,3 @@ test("isValidEvent returns true for pull request event", () => { | |||||||
|  |  | ||||||
|     expect(isValidEvent).toBe(true); |     expect(isValidEvent).toBe(true); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("unlinkFile unlinks file", async () => { |  | ||||||
|     const testDirectory = await fs.mkdtemp("unlinkFileTest"); |  | ||||||
|     const testFile = path.join(testDirectory, "test.txt"); |  | ||||||
|     await fs.writeFile(testFile, "hello world"); |  | ||||||
|  |  | ||||||
|     await actionUtils.unlinkFile(testFile); |  | ||||||
|  |  | ||||||
|     // This should throw as testFile should not exist |  | ||||||
|     await expect(fs.stat(testFile)).rejects.toThrow(); |  | ||||||
|  |  | ||||||
|     await fs.rmdir(testDirectory); |  | ||||||
| }); |  | ||||||
|   | |||||||
| @@ -1,43 +1,10 @@ | |||||||
| import { getCacheVersion, retry } from "../src/cacheHttpClient"; | import { retry } from "../src/cacheHttpClient"; | ||||||
| import { CompressionMethod, Inputs } from "../src/constants"; |  | ||||||
| import * as testUtils from "../src/utils/testUtils"; | import * as testUtils from "../src/utils/testUtils"; | ||||||
|  |  | ||||||
| afterEach(() => { | afterEach(() => { | ||||||
|     testUtils.clearInputs(); |     testUtils.clearInputs(); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("getCacheVersion with path input and compression method undefined returns version", async () => { |  | ||||||
|     testUtils.setInput(Inputs.Path, "node_modules"); |  | ||||||
|  |  | ||||||
|     const result = getCacheVersion(); |  | ||||||
|  |  | ||||||
|     expect(result).toEqual( |  | ||||||
|         "b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985" |  | ||||||
|     ); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("getCacheVersion with zstd compression returns version", async () => { |  | ||||||
|     testUtils.setInput(Inputs.Path, "node_modules"); |  | ||||||
|     const result = getCacheVersion(CompressionMethod.Zstd); |  | ||||||
|  |  | ||||||
|     expect(result).toEqual( |  | ||||||
|         "273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24" |  | ||||||
|     ); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("getCacheVersion with gzip compression does not change vesion", async () => { |  | ||||||
|     testUtils.setInput(Inputs.Path, "node_modules"); |  | ||||||
|     const result = getCacheVersion(CompressionMethod.Gzip); |  | ||||||
|  |  | ||||||
|     expect(result).toEqual( |  | ||||||
|         "b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985" |  | ||||||
|     ); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("getCacheVersion with no input throws", async () => { |  | ||||||
|     expect(() => getCacheVersion()).toThrow(); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| interface TestResponse { | interface TestResponse { | ||||||
|     statusCode: number; |     statusCode: number; | ||||||
|     result: string | null; |     result: string | null; | ||||||
|   | |||||||
| @@ -7,11 +7,5 @@ if [ -z "$prefix" ]; then | |||||||
|   exit 1 |   exit 1 | ||||||
| fi | fi | ||||||
|  |  | ||||||
| path="$2" | mkdir test-cache | ||||||
| if [ -z "$path" ]; then | echo "$prefix $GITHUB_RUN_ID" > test-cache/test-file.txt | ||||||
|   echo "Must supply path argument" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| mkdir -p $path |  | ||||||
| echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt |  | ||||||
| @@ -1,13 +1,7 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
|  |  | ||||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||||
| import { | import { Events, Inputs } from "../src/constants"; | ||||||
|     CacheFilename, |  | ||||||
|     CompressionMethod, |  | ||||||
|     Events, |  | ||||||
|     Inputs |  | ||||||
| } from "../src/constants"; |  | ||||||
| import { ArtifactCacheEntry } from "../src/contracts"; | import { ArtifactCacheEntry } from "../src/contracts"; | ||||||
| import run from "../src/restore"; | import run from "../src/restore"; | ||||||
| import * as tar from "../src/tar"; | import * as tar from "../src/tar"; | ||||||
| @@ -19,6 +13,10 @@ jest.mock("../src/tar"); | |||||||
| jest.mock("../src/utils/actionUtils"); | jest.mock("../src/utils/actionUtils"); | ||||||
|  |  | ||||||
| beforeAll(() => { | beforeAll(() => { | ||||||
|  |     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||||
|  |         return path.resolve(filePath); | ||||||
|  |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( |     jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( | ||||||
|         (key, cacheResult) => { |         (key, cacheResult) => { | ||||||
|             const actualUtils = jest.requireActual("../src/utils/actionUtils"); |             const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||||
| @@ -35,11 +33,6 @@ beforeAll(() => { | |||||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); |         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||||
|         return actualUtils.getSupportedEvents(); |         return actualUtils.getSupportedEvents(); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => { |  | ||||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); |  | ||||||
|         return actualUtils.getCacheFileName(cm); |  | ||||||
|     }); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| beforeEach(() => { | beforeEach(() => { | ||||||
| @@ -66,8 +59,7 @@ test("restore with invalid event outputs warning", async () => { | |||||||
| test("restore with no path should fail", async () => { | test("restore with no path should fail", async () => { | ||||||
|     const failedMock = jest.spyOn(core, "setFailed"); |     const failedMock = jest.spyOn(core, "setFailed"); | ||||||
|     await run(); |     await run(); | ||||||
|     // this input isn't necessary for restore b/c tarball contains entries relative to workspace |     expect(failedMock).toHaveBeenCalledWith( | ||||||
|     expect(failedMock).not.toHaveBeenCalledWith( |  | ||||||
|         "Input required and not supplied: path" |         "Input required and not supplied: path" | ||||||
|     ); |     ); | ||||||
| }); | }); | ||||||
| @@ -144,7 +136,7 @@ test("restore with no cache found", async () => { | |||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith( |     expect(infoMock).toHaveBeenCalledWith( | ||||||
|         `Cache not found for input keys: ${key}` |         `Cache not found for input keys: ${key}.` | ||||||
|     ); |     ); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| @@ -203,12 +195,13 @@ test("restore with restore keys and no cache found", async () => { | |||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith( |     expect(infoMock).toHaveBeenCalledWith( | ||||||
|         `Cache not found for input keys: ${key}, ${restoreKey}` |         `Cache not found for input keys: ${key}, ${restoreKey}.` | ||||||
|     ); |     ); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("restore with gzip compressed cache found", async () => { | test("restore with cache found", async () => { | ||||||
|     const key = "node-test"; |     const key = "node-test"; | ||||||
|  |     const cachePath = path.resolve("node_modules"); | ||||||
|     testUtils.setInputs({ |     testUtils.setInputs({ | ||||||
|         path: "node_modules", |         path: "node_modules", | ||||||
|         key |         key | ||||||
| @@ -237,7 +230,7 @@ test("restore with gzip compressed cache found", async () => { | |||||||
|         return Promise.resolve(tempPath); |         return Promise.resolve(tempPath); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const archivePath = path.join(tempPath, CacheFilename.Gzip); |     const archivePath = path.join(tempPath, "cache.tgz"); | ||||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); |     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); |     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||||
|  |  | ||||||
| @@ -247,20 +240,12 @@ test("restore with gzip compressed cache found", async () => { | |||||||
|         .mockReturnValue(fileSize); |         .mockReturnValue(fileSize); | ||||||
|  |  | ||||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile"); |  | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|  |  | ||||||
|     const compression = CompressionMethod.Gzip; |  | ||||||
|     const getCompressionMock = jest |  | ||||||
|         .spyOn(actionUtils, "getCompressionMethod") |  | ||||||
|         .mockReturnValue(Promise.resolve(compression)); |  | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); |     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key], { |     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||||
|         compressionMethod: compression |  | ||||||
|     }); |  | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( |     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||||
| @@ -270,21 +255,18 @@ test("restore with gzip compressed cache found", async () => { | |||||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); |     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||||
|  |  | ||||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|  |  | ||||||
|     expect(unlinkFileMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(unlinkFileMock).toHaveBeenCalledWith(archivePath); |  | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); |     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("restore with a pull request event and zstd compressed cache found", async () => { | test("restore with a pull request event and cache found", async () => { | ||||||
|     const key = "node-test"; |     const key = "node-test"; | ||||||
|  |     const cachePath = path.resolve("node_modules"); | ||||||
|     testUtils.setInputs({ |     testUtils.setInputs({ | ||||||
|         path: "node_modules", |         path: "node_modules", | ||||||
|         key |         key | ||||||
| @@ -315,7 +297,7 @@ test("restore with a pull request event and zstd compressed cache found", async | |||||||
|         return Promise.resolve(tempPath); |         return Promise.resolve(tempPath); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const archivePath = path.join(tempPath, CacheFilename.Zstd); |     const archivePath = path.join(tempPath, "cache.tgz"); | ||||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); |     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); |     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||||
|  |  | ||||||
| @@ -326,17 +308,11 @@ test("restore with a pull request event and zstd compressed cache found", async | |||||||
|  |  | ||||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|     const compression = CompressionMethod.Zstd; |  | ||||||
|     const getCompressionMock = jest |  | ||||||
|         .spyOn(actionUtils, "getCompressionMethod") |  | ||||||
|         .mockReturnValue(Promise.resolve(compression)); |  | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); |     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key], { |     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||||
|         compressionMethod: compression |  | ||||||
|     }); |  | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( |     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||||
| @@ -347,19 +323,19 @@ test("restore with a pull request event and zstd compressed cache found", async | |||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); |     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); | ||||||
|  |  | ||||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); |     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("restore with cache found for restore key", async () => { | test("restore with cache found for restore key", async () => { | ||||||
|     const key = "node-test"; |     const key = "node-test"; | ||||||
|     const restoreKey = "node-"; |     const restoreKey = "node-"; | ||||||
|  |     const cachePath = path.resolve("node_modules"); | ||||||
|     testUtils.setInputs({ |     testUtils.setInputs({ | ||||||
|         path: "node_modules", |         path: "node_modules", | ||||||
|         key, |         key, | ||||||
| @@ -389,7 +365,7 @@ test("restore with cache found for restore key", async () => { | |||||||
|         return Promise.resolve(tempPath); |         return Promise.resolve(tempPath); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     const archivePath = path.join(tempPath, CacheFilename.Zstd); |     const archivePath = path.join(tempPath, "cache.tgz"); | ||||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); |     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); |     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||||
|  |  | ||||||
| @@ -400,17 +376,11 @@ test("restore with cache found for restore key", async () => { | |||||||
|  |  | ||||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); |     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); |     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||||
|     const compression = CompressionMethod.Zstd; |  | ||||||
|     const getCompressionMock = jest |  | ||||||
|         .spyOn(actionUtils, "getCompressionMethod") |  | ||||||
|         .mockReturnValue(Promise.resolve(compression)); |  | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); |     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], { |     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); | ||||||
|         compressionMethod: compression |  | ||||||
|     }); |  | ||||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); |     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); |     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( |     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||||
| @@ -421,7 +391,7 @@ test("restore with cache found for restore key", async () => { | |||||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); |     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); | ||||||
|  |  | ||||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); |     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); |     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|  |  | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); |     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); |     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||||
| @@ -430,5 +400,4 @@ test("restore with cache found for restore key", async () => { | |||||||
|         `Cache restored from key: ${restoreKey}` |         `Cache restored from key: ${restoreKey}` | ||||||
|     ); |     ); | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); |  | ||||||
| }); | }); | ||||||
|   | |||||||
| @@ -1,13 +1,7 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
|  |  | ||||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||||
| import { | import { Events, Inputs } from "../src/constants"; | ||||||
|     CacheFilename, |  | ||||||
|     CompressionMethod, |  | ||||||
|     Events, |  | ||||||
|     Inputs |  | ||||||
| } from "../src/constants"; |  | ||||||
| import { ArtifactCacheEntry } from "../src/contracts"; | import { ArtifactCacheEntry } from "../src/contracts"; | ||||||
| import run from "../src/save"; | import run from "../src/save"; | ||||||
| import * as tar from "../src/tar"; | import * as tar from "../src/tar"; | ||||||
| @@ -46,20 +40,13 @@ beforeAll(() => { | |||||||
|         return actualUtils.getSupportedEvents(); |         return actualUtils.getSupportedEvents(); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(actionUtils, "resolvePaths").mockImplementation( |     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||||
|         async filePaths => { |         return path.resolve(filePath); | ||||||
|             return filePaths.map(x => path.resolve(x)); |     }); | ||||||
|         } |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { |     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { | ||||||
|         return Promise.resolve("/foo/bar"); |         return Promise.resolve("/foo/bar"); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => { |  | ||||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); |  | ||||||
|         return actualUtils.getCacheFileName(cm); |  | ||||||
|     }); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| beforeEach(() => { | beforeEach(() => { | ||||||
| @@ -202,7 +189,7 @@ test("save with large cache outputs warning", async () => { | |||||||
|         }); |         }); | ||||||
|  |  | ||||||
|     const inputPath = "node_modules"; |     const inputPath = "node_modules"; | ||||||
|     const cachePaths = [path.resolve(inputPath)]; |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const createTarMock = jest.spyOn(tar, "createTar"); |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
| @@ -211,27 +198,20 @@ test("save with large cache outputs warning", async () => { | |||||||
|     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { |     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { | ||||||
|         return cacheSize; |         return cacheSize; | ||||||
|     }); |     }); | ||||||
|     const compression = CompressionMethod.Gzip; |  | ||||||
|     const getCompressionMock = jest |  | ||||||
|         .spyOn(actionUtils, "getCompressionMethod") |  | ||||||
|         .mockReturnValue(Promise.resolve(compression)); |  | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     const archiveFolder = "/foo/bar"; |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|  |  | ||||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(createTarMock).toHaveBeenCalledWith( |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         archiveFolder, |  | ||||||
|         cachePaths, |  | ||||||
|         compression |  | ||||||
|     ); |  | ||||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(logWarningMock).toHaveBeenCalledWith( |     expect(logWarningMock).toHaveBeenCalledWith( | ||||||
|         "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." |         "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with reserve cache failure outputs warning", async () => { | test("save with reserve cache failure outputs warning", async () => { | ||||||
| @@ -267,18 +247,13 @@ test("save with reserve cache failure outputs warning", async () => { | |||||||
|         }); |         }); | ||||||
|  |  | ||||||
|     const createTarMock = jest.spyOn(tar, "createTar"); |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); |     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||||
|     const compression = CompressionMethod.Zstd; |  | ||||||
|     const getCompressionMock = jest |  | ||||||
|         .spyOn(actionUtils, "getCompressionMethod") |  | ||||||
|         .mockReturnValue(Promise.resolve(compression)); |  | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); |     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { |     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||||
|         compressionMethod: compression |  | ||||||
|     }); |  | ||||||
|  |  | ||||||
|     expect(infoMock).toHaveBeenCalledWith( |     expect(infoMock).toHaveBeenCalledWith( | ||||||
|         `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` |         `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||||
| @@ -288,7 +263,6 @@ test("save with reserve cache failure outputs warning", async () => { | |||||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(0); |     expect(saveCacheMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(logWarningMock).toHaveBeenCalledTimes(0); |     expect(logWarningMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with server error outputs warning", async () => { | test("save with server error outputs warning", async () => { | ||||||
| @@ -314,7 +288,7 @@ test("save with server error outputs warning", async () => { | |||||||
|         }); |         }); | ||||||
|  |  | ||||||
|     const inputPath = "node_modules"; |     const inputPath = "node_modules"; | ||||||
|     const cachePaths = [path.resolve(inputPath)]; |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const cacheId = 4; |     const cacheId = 4; | ||||||
| @@ -331,36 +305,24 @@ test("save with server error outputs warning", async () => { | |||||||
|         .mockImplementationOnce(() => { |         .mockImplementationOnce(() => { | ||||||
|             throw new Error("HTTP Error Occurred"); |             throw new Error("HTTP Error Occurred"); | ||||||
|         }); |         }); | ||||||
|     const compression = CompressionMethod.Zstd; |  | ||||||
|     const getCompressionMock = jest |  | ||||||
|         .spyOn(actionUtils, "getCompressionMethod") |  | ||||||
|         .mockReturnValue(Promise.resolve(compression)); |  | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); |     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { |     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||||
|         compressionMethod: compression |  | ||||||
|     }); |  | ||||||
|  |  | ||||||
|     const archiveFolder = "/foo/bar"; |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|     const archiveFile = path.join(archiveFolder, CacheFilename.Zstd); |  | ||||||
|  |  | ||||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(createTarMock).toHaveBeenCalledWith( |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         archiveFolder, |  | ||||||
|         cachePaths, |  | ||||||
|         compression |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); |     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile); |     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||||
|  |  | ||||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); |     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); |     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("save with valid inputs uploads a cache", async () => { | test("save with valid inputs uploads a cache", async () => { | ||||||
| @@ -385,7 +347,7 @@ test("save with valid inputs uploads a cache", async () => { | |||||||
|         }); |         }); | ||||||
|  |  | ||||||
|     const inputPath = "node_modules"; |     const inputPath = "node_modules"; | ||||||
|     const cachePaths = [path.resolve(inputPath)]; |     const cachePath = path.resolve(inputPath); | ||||||
|     testUtils.setInput(Inputs.Path, inputPath); |     testUtils.setInput(Inputs.Path, inputPath); | ||||||
|  |  | ||||||
|     const cacheId = 4; |     const cacheId = 4; | ||||||
| @@ -398,31 +360,19 @@ test("save with valid inputs uploads a cache", async () => { | |||||||
|     const createTarMock = jest.spyOn(tar, "createTar"); |     const createTarMock = jest.spyOn(tar, "createTar"); | ||||||
|  |  | ||||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); |     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||||
|     const compression = CompressionMethod.Zstd; |  | ||||||
|     const getCompressionMock = jest |  | ||||||
|         .spyOn(actionUtils, "getCompressionMethod") |  | ||||||
|         .mockReturnValue(Promise.resolve(compression)); |  | ||||||
|  |  | ||||||
|     await run(); |     await run(); | ||||||
|  |  | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); |     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { |     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||||
|         compressionMethod: compression |  | ||||||
|     }); |  | ||||||
|  |  | ||||||
|     const archiveFolder = "/foo/bar"; |     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||||
|     const archiveFile = path.join(archiveFolder, CacheFilename.Zstd); |  | ||||||
|  |  | ||||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); |     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(createTarMock).toHaveBeenCalledWith( |     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||||
|         archiveFolder, |  | ||||||
|         cachePaths, |  | ||||||
|         compression |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); |     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile); |     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||||
|  |  | ||||||
|     expect(failedMock).toHaveBeenCalledTimes(0); |     expect(failedMock).toHaveBeenCalledTimes(0); | ||||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); |  | ||||||
| }); | }); | ||||||
|   | |||||||
| @@ -1,204 +1,86 @@ | |||||||
| import * as exec from "@actions/exec"; | import * as exec from "@actions/exec"; | ||||||
| import * as io from "@actions/io"; | import * as io from "@actions/io"; | ||||||
| import * as path from "path"; |  | ||||||
|  |  | ||||||
| import { CacheFilename, CompressionMethod } from "../src/constants"; |  | ||||||
| import * as tar from "../src/tar"; | import * as tar from "../src/tar"; | ||||||
| import * as utils from "../src/utils/actionUtils"; |  | ||||||
|  |  | ||||||
| import fs = require("fs"); | import fs = require("fs"); | ||||||
|  |  | ||||||
| jest.mock("@actions/exec"); | jest.mock("@actions/exec"); | ||||||
| jest.mock("@actions/io"); | jest.mock("@actions/io"); | ||||||
|  |  | ||||||
| const IS_WINDOWS = process.platform === "win32"; | beforeAll(() => { | ||||||
|  |  | ||||||
| function getTempDir(): string { |  | ||||||
|     return path.join(__dirname, "_temp", "tar"); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| beforeAll(async () => { |  | ||||||
|     jest.spyOn(io, "which").mockImplementation(tool => { |     jest.spyOn(io, "which").mockImplementation(tool => { | ||||||
|         return Promise.resolve(tool); |         return Promise.resolve(tool); | ||||||
|     }); |     }); | ||||||
|  |  | ||||||
|     process.env["GITHUB_WORKSPACE"] = process.cwd(); |  | ||||||
|     await jest.requireActual("@actions/io").rmRF(getTempDir()); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| afterAll(async () => { | test("extract BSD tar", async () => { | ||||||
|     delete process.env["GITHUB_WORKSPACE"]; |  | ||||||
|     await jest.requireActual("@actions/io").rmRF(getTempDir()); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("zstd extract tar", async () => { |  | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const execMock = jest.spyOn(exec, "exec"); | ||||||
|  |  | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|     const archivePath = IS_WINDOWS |     const archivePath = IS_WINDOWS | ||||||
|         ? `${process.env["windir"]}\\fakepath\\cache.tar` |         ? `${process.env["windir"]}\\fakepath\\cache.tar` | ||||||
|         : "cache.tar"; |         : "cache.tar"; | ||||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; |     const targetDirectory = "~/.npm/cache"; | ||||||
|  |     await tar.extractTar(archivePath, targetDirectory); | ||||||
|  |  | ||||||
|     await tar.extractTar(archivePath, CompressionMethod.Zstd); |     expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); | ||||||
|  |  | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(workspace); |  | ||||||
|     const tarPath = IS_WINDOWS |     const tarPath = IS_WINDOWS | ||||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` |         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||||
|         : "tar"; |         : "tar"; | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |     expect(execMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(execMock).toHaveBeenCalledWith( |     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||||
|         `"${tarPath}"`, |         "-xz", | ||||||
|         [ |         "-f", | ||||||
|             "--use-compress-program", |  | ||||||
|             "zstd -d --long=30", |  | ||||||
|             "-xf", |  | ||||||
|         IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, |         IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, | ||||||
|             "-P", |  | ||||||
|         "-C", |         "-C", | ||||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace |         IS_WINDOWS ? targetDirectory?.replace(/\\/g, "/") : targetDirectory | ||||||
|         ], |     ]); | ||||||
|         { cwd: undefined } |  | ||||||
|     ); |  | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("gzip extract tar", async () => { | test("extract GNU tar", async () => { | ||||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|     const archivePath = IS_WINDOWS |  | ||||||
|         ? `${process.env["windir"]}\\fakepath\\cache.tar` |  | ||||||
|         : "cache.tar"; |  | ||||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; |  | ||||||
|  |  | ||||||
|     await tar.extractTar(archivePath, CompressionMethod.Gzip); |  | ||||||
|  |  | ||||||
|     expect(mkdirMock).toHaveBeenCalledWith(workspace); |  | ||||||
|     const tarPath = IS_WINDOWS |  | ||||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` |  | ||||||
|         : "tar"; |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith( |  | ||||||
|         `"${tarPath}"`, |  | ||||||
|         [ |  | ||||||
|             "-z", |  | ||||||
|             "-xf", |  | ||||||
|             IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, |  | ||||||
|             "-P", |  | ||||||
|             "-C", |  | ||||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace |  | ||||||
|         ], |  | ||||||
|         { cwd: undefined } |  | ||||||
|     ); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("gzip extract GNU tar on windows", async () => { |  | ||||||
|     if (IS_WINDOWS) { |     if (IS_WINDOWS) { | ||||||
|         jest.spyOn(fs, "existsSync").mockReturnValueOnce(false); |         jest.spyOn(fs, "existsSync").mockReturnValueOnce(false); | ||||||
|  |         jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true)); | ||||||
|  |  | ||||||
|         const isGnuMock = jest |  | ||||||
|             .spyOn(utils, "useGnuTar") |  | ||||||
|             .mockReturnValue(Promise.resolve(true)); |  | ||||||
|         const execMock = jest.spyOn(exec, "exec"); |         const execMock = jest.spyOn(exec, "exec"); | ||||||
|         const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`; |         const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`; | ||||||
|         const workspace = process.env["GITHUB_WORKSPACE"]; |         const targetDirectory = "~/.npm/cache"; | ||||||
|  |  | ||||||
|         await tar.extractTar(archivePath, CompressionMethod.Gzip); |         await tar.extractTar(archivePath, targetDirectory); | ||||||
|  |  | ||||||
|         expect(isGnuMock).toHaveBeenCalledTimes(1); |  | ||||||
|         expect(execMock).toHaveBeenCalledTimes(1); |         expect(execMock).toHaveBeenCalledTimes(1); | ||||||
|         expect(execMock).toHaveBeenCalledWith( |         expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [ | ||||||
|             `"tar"`, |             "-xz", | ||||||
|             [ |             "-f", | ||||||
|                 "-z", |  | ||||||
|                 "-xf", |  | ||||||
|             archivePath.replace(/\\/g, "/"), |             archivePath.replace(/\\/g, "/"), | ||||||
|                 "-P", |  | ||||||
|             "-C", |             "-C", | ||||||
|                 workspace?.replace(/\\/g, "/"), |             targetDirectory?.replace(/\\/g, "/"), | ||||||
|             "--force-local" |             "--force-local" | ||||||
|             ], |         ]); | ||||||
|             { cwd: undefined } |  | ||||||
|         ); |  | ||||||
|     } |     } | ||||||
| }); | }); | ||||||
|  |  | ||||||
| test("zstd create tar", async () => { | test("create BSD tar", async () => { | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |     const execMock = jest.spyOn(exec, "exec"); | ||||||
|  |  | ||||||
|     const archiveFolder = getTempDir(); |     const archivePath = "cache.tar"; | ||||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; |     const sourceDirectory = "~/.npm/cache"; | ||||||
|     const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`]; |     await tar.createTar(archivePath, sourceDirectory); | ||||||
|  |  | ||||||
|     await fs.promises.mkdir(archiveFolder, { recursive: true }); |  | ||||||
|  |  | ||||||
|     await tar.createTar( |  | ||||||
|         archiveFolder, |  | ||||||
|         sourceDirectories, |  | ||||||
|         CompressionMethod.Zstd |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|  |     const IS_WINDOWS = process.platform === "win32"; | ||||||
|     const tarPath = IS_WINDOWS |     const tarPath = IS_WINDOWS | ||||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` |         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||||
|         : "tar"; |         : "tar"; | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |     expect(execMock).toHaveBeenCalledTimes(1); | ||||||
|     expect(execMock).toHaveBeenCalledWith( |     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||||
|         `"${tarPath}"`, |         "-cz", | ||||||
|         [ |         "-f", | ||||||
|             "--use-compress-program", |         IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, | ||||||
|             "zstd -T0 --long=30", |  | ||||||
|             "-cf", |  | ||||||
|             IS_WINDOWS |  | ||||||
|                 ? CacheFilename.Zstd.replace(/\\/g, "/") |  | ||||||
|                 : CacheFilename.Zstd, |  | ||||||
|             "-P", |  | ||||||
|         "-C", |         "-C", | ||||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace, |         IS_WINDOWS ? sourceDirectory?.replace(/\\/g, "/") : sourceDirectory, | ||||||
|             "--files-from", |         "." | ||||||
|             "manifest.txt" |     ]); | ||||||
|         ], |  | ||||||
|         { |  | ||||||
|             cwd: archiveFolder |  | ||||||
|         } |  | ||||||
|     ); |  | ||||||
| }); |  | ||||||
|  |  | ||||||
| test("gzip create tar", async () => { |  | ||||||
|     const execMock = jest.spyOn(exec, "exec"); |  | ||||||
|  |  | ||||||
|     const archiveFolder = getTempDir(); |  | ||||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; |  | ||||||
|     const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`]; |  | ||||||
|  |  | ||||||
|     await fs.promises.mkdir(archiveFolder, { recursive: true }); |  | ||||||
|  |  | ||||||
|     await tar.createTar( |  | ||||||
|         archiveFolder, |  | ||||||
|         sourceDirectories, |  | ||||||
|         CompressionMethod.Gzip |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|     const tarPath = IS_WINDOWS |  | ||||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` |  | ||||||
|         : "tar"; |  | ||||||
|  |  | ||||||
|     expect(execMock).toHaveBeenCalledTimes(1); |  | ||||||
|     expect(execMock).toHaveBeenCalledWith( |  | ||||||
|         `"${tarPath}"`, |  | ||||||
|         [ |  | ||||||
|             "-z", |  | ||||||
|             "-cf", |  | ||||||
|             IS_WINDOWS |  | ||||||
|                 ? CacheFilename.Gzip.replace(/\\/g, "/") |  | ||||||
|                 : CacheFilename.Gzip, |  | ||||||
|             "-P", |  | ||||||
|             "-C", |  | ||||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace, |  | ||||||
|             "--files-from", |  | ||||||
|             "manifest.txt" |  | ||||||
|         ], |  | ||||||
|         { |  | ||||||
|             cwd: archiveFolder |  | ||||||
|         } |  | ||||||
|     ); |  | ||||||
| }); | }); | ||||||
|   | |||||||
| @@ -7,12 +7,6 @@ if [ -z "$prefix" ]; then | |||||||
|   exit 1 |   exit 1 | ||||||
| fi | fi | ||||||
|  |  | ||||||
| path="$2" |  | ||||||
| if [ -z "$path" ]; then |  | ||||||
|   echo "Must specify path argument" |  | ||||||
|   exit 1 |  | ||||||
| fi |  | ||||||
|  |  | ||||||
| # Sanity check GITHUB_RUN_ID defined | # Sanity check GITHUB_RUN_ID defined | ||||||
| if [ -z "$GITHUB_RUN_ID" ]; then | if [ -z "$GITHUB_RUN_ID" ]; then | ||||||
|   echo "GITHUB_RUN_ID not defined" |   echo "GITHUB_RUN_ID not defined" | ||||||
| @@ -20,7 +14,7 @@ if [ -z "$GITHUB_RUN_ID" ]; then | |||||||
| fi | fi | ||||||
|  |  | ||||||
| # Verify file exists | # Verify file exists | ||||||
| file="$path/test-file.txt" | file="test-cache/test-file.txt" | ||||||
| echo "Checking for $file" | echo "Checking for $file" | ||||||
| if [ ! -e $file ]; then | if [ ! -e $file ]; then | ||||||
|   echo "File does not exist" |   echo "File does not exist" | ||||||
|   | |||||||
							
								
								
									
										4548
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4548
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										4542
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4542
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										124
									
								
								examples.md
									
									
									
									
									
								
							
							
						
						
									
										124
									
								
								examples.md
									
									
									
									
									
								
							| @@ -1,34 +1,22 @@ | |||||||
| # Examples | # Examples | ||||||
|  |  | ||||||
| - [Examples](#examples) | - [C# - NuGet](#c---nuget) | ||||||
|   - [C# - NuGet](#c---nuget) | - [Elixir - Mix](#elixir---mix) | ||||||
|   - [Elixir - Mix](#elixir---mix) | - [Go - Modules](#go---modules) | ||||||
|   - [Go - Modules](#go---modules) | - [Haskell - Cabal](#haskell---cabal) | ||||||
|   - [Haskell - Cabal](#haskell---cabal) | - [Java - Gradle](#java---gradle) | ||||||
|   - [Java - Gradle](#java---gradle) | - [Java - Maven](#java---maven) | ||||||
|   - [Java - Maven](#java---maven) | - [Node - npm](#node---npm) | ||||||
|   - [Node - npm](#node---npm) | - [Node - Yarn](#node---yarn) | ||||||
|     - [macOS and Ubuntu](#macos-and-ubuntu) | - [PHP - Composer](#php---composer) | ||||||
|     - [Windows](#windows) | - [Python - pip](#python---pip) | ||||||
|     - [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config) | - [R - renv](#r---renv) | ||||||
|   - [Node - Lerna](#node---lerna) | - [Ruby - Bundler](#ruby---bundler) | ||||||
|   - [Node - Yarn](#node---yarn) | - [Rust - Cargo](#rust---cargo) | ||||||
|   - [OCaml/Reason - esy](#ocamlreason---esy) | - [Scala - SBT](#scala---sbt) | ||||||
|   - [PHP - Composer](#php---composer) | - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) | ||||||
|   - [Python - pip](#python---pip) | - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) | ||||||
|     - [Simple example](#simple-example) | - [Swift - Swift Package Manager](#swift---swift-package-manager) | ||||||
|     - [Multiple OS's in a workflow](#multiple-oss-in-a-workflow) |  | ||||||
|     - [Using pip to get cache location](#using-pip-to-get-cache-location) |  | ||||||
|     - [Using a script to get cache location](#using-a-script-to-get-cache-location) |  | ||||||
|   - [R - renv](#r---renv) |  | ||||||
|     - [Simple example](#simple-example-1) |  | ||||||
|     - [Multiple OS's in a workflow](#multiple-oss-in-a-workflow-1) |  | ||||||
|   - [Ruby - Bundler](#ruby---bundler) |  | ||||||
|   - [Rust - Cargo](#rust---cargo) |  | ||||||
|   - [Scala - SBT](#scala---sbt) |  | ||||||
|   - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) |  | ||||||
|   - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) |  | ||||||
|   - [Swift - Swift Package Manager](#swift---swift-package-manager) |  | ||||||
|  |  | ||||||
| ## C# - NuGet | ## C# - NuGet | ||||||
| Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): | Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): | ||||||
| @@ -106,7 +94,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba | |||||||
| - uses: actions/cache@v1 | - uses: actions/cache@v1 | ||||||
|   with: |   with: | ||||||
|     path: ~/.gradle/caches |     path: ~/.gradle/caches | ||||||
|     key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }} |     key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }} | ||||||
|     restore-keys: | |     restore-keys: | | ||||||
|       ${{ runner.os }}-gradle- |       ${{ runner.os }}-gradle- | ||||||
| ``` | ``` | ||||||
| @@ -142,14 +130,10 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o | |||||||
| ### Windows | ### Windows | ||||||
|  |  | ||||||
| ```yaml | ```yaml | ||||||
| - name: Get npm cache directory |  | ||||||
|   id: npm-cache |  | ||||||
|   run: | |  | ||||||
|     echo "::set-output name=dir::$(npm config get cache)" |  | ||||||
| - uses: actions/cache@v1 | - uses: actions/cache@v1 | ||||||
|   with: |   with: | ||||||
|     path: ${{ steps.npm-cache.outputs.dir }} |     path: ~\AppData\Roaming\npm-cache | ||||||
|     key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} |     key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }} | ||||||
|     restore-keys: | |     restore-keys: | | ||||||
|       ${{ runner.os }}-node- |       ${{ runner.os }}-node- | ||||||
| ``` | ``` | ||||||
| @@ -169,64 +153,22 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o | |||||||
|       ${{ runner.os }}-node- |       ${{ runner.os }}-node- | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ## Node - Lerna |  | ||||||
|  |  | ||||||
| >Note this example uses the new multi-paths feature and is only available at `master` |  | ||||||
| ```yaml |  | ||||||
| - name: restore lerna |  | ||||||
|   uses: actions/cache@master |  | ||||||
|   with: |  | ||||||
|     path: | |  | ||||||
|       node_modules |  | ||||||
|       */*/node_modules |  | ||||||
|     key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| ## Node - Yarn | ## Node - Yarn | ||||||
| The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info. | The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info. | ||||||
|  |  | ||||||
| ```yaml | ```yaml | ||||||
| - name: Get yarn cache directory path | - name: Get yarn cache | ||||||
|   id: yarn-cache-dir-path |   id: yarn-cache | ||||||
|   run: echo "::set-output name=dir::$(yarn cache dir)" |   run: echo "::set-output name=dir::$(yarn cache dir)" | ||||||
|  |  | ||||||
| - uses: actions/cache@v1 | - uses: actions/cache@v1 | ||||||
|   id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) |  | ||||||
|   with: |   with: | ||||||
|     path: ${{ steps.yarn-cache-dir-path.outputs.dir }} |     path: ${{ steps.yarn-cache.outputs.dir }} | ||||||
|     key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} |     key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} | ||||||
|     restore-keys: | |     restore-keys: | | ||||||
|       ${{ runner.os }}-yarn- |       ${{ runner.os }}-yarn- | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ## OCaml/Reason - esy |  | ||||||
| Esy allows you to export built dependencies and import pre-built dependencies. |  | ||||||
| ```yaml |  | ||||||
|     - name: Restore Cache |  | ||||||
|       id: restore-cache |  | ||||||
|       uses: actions/cache@v1 |  | ||||||
|       with: |  | ||||||
|         path: _export |  | ||||||
|         key:  ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }} |  | ||||||
|         restore-keys: | |  | ||||||
|           ${{ runner.os }}-esy- |  | ||||||
|     - name: Esy install |  | ||||||
|       run: 'esy install' |  | ||||||
|     - name: Import Cache |  | ||||||
|       run: | |  | ||||||
|         esy import-dependencies _export |  | ||||||
|         rm -rf _export |  | ||||||
|  |  | ||||||
|     ...(Build job)... |  | ||||||
|  |  | ||||||
|     # Re-export dependencies if anything has changed or if it is the first time |  | ||||||
|     - name: Setting dependency cache  |  | ||||||
|       run: | |  | ||||||
|         esy export-dependencies |  | ||||||
|       if: steps.restore-cache.outputs.cache-hit != 'true' |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
|  |  | ||||||
| ## PHP - Composer | ## PHP - Composer | ||||||
|  |  | ||||||
| ```yaml   | ```yaml   | ||||||
| @@ -291,29 +233,11 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu. | |||||||
|       ${{ runner.os }}-pip- |       ${{ runner.os }}-pip- | ||||||
| ``` | ``` | ||||||
|  |  | ||||||
| ### Using pip to get cache location |  | ||||||
|  |  | ||||||
| > Note: This requires pip 20.1+ |  | ||||||
| ```yaml |  | ||||||
| - name: Get pip cache dir |  | ||||||
|   id: pip-cache |  | ||||||
|   run: | |  | ||||||
|     echo "::set-output name=dir::$(pip cache dir)" |  | ||||||
|  |  | ||||||
| - name: pip cache |  | ||||||
|   uses: actions/cache@v1 |  | ||||||
|   with: |  | ||||||
|     path: ${{ steps.pip-cache.outputs.dir }} |  | ||||||
|     key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} |  | ||||||
|     restore-keys: | |  | ||||||
|       ${{ runner.os }}-pip- |  | ||||||
| ``` |  | ||||||
|  |  | ||||||
| ### Using a script to get cache location | ### Using a script to get cache location | ||||||
|  |  | ||||||
| > Note: This uses an internal pip API and may not always work | > Note: This uses an internal pip API and may not always work | ||||||
| ```yaml | ```yaml | ||||||
| - name: Get pip cache dir | - name: Get pip cache | ||||||
|    id: pip-cache |    id: pip-cache | ||||||
|    run: | |    run: | | ||||||
|      python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" |      python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" | ||||||
|   | |||||||
							
								
								
									
										3960
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										3960
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										13
									
								
								package.json
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								package.json
									
									
									
									
									
								
							| @@ -1,15 +1,16 @@ | |||||||
| { | { | ||||||
|   "name": "cache", |   "name": "cache", | ||||||
|   "version": "1.1.2", |   "version": "1.2.0", | ||||||
|   "private": true, |   "private": true, | ||||||
|   "description": "Cache dependencies and build outputs", |   "description": "Cache dependencies and build outputs", | ||||||
|   "main": "dist/restore/index.js", |   "main": "dist/restore/index.js", | ||||||
|   "scripts": { |   "scripts": { | ||||||
|     "build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts", |     "build": "tsc", | ||||||
|     "test": "tsc --noEmit && jest --coverage", |     "test": "tsc --noEmit && jest --coverage", | ||||||
|     "lint": "eslint **/*.ts --cache", |     "lint": "eslint **/*.ts --cache", | ||||||
|     "format": "prettier --write **/*.ts", |     "format": "prettier --write **/*.ts", | ||||||
|     "format-check": "prettier --check **/*.ts" |     "format-check": "prettier --check **/*.ts", | ||||||
|  |     "release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/" | ||||||
|   }, |   }, | ||||||
|   "repository": { |   "repository": { | ||||||
|     "type": "git", |     "type": "git", | ||||||
| @@ -23,10 +24,9 @@ | |||||||
|   "author": "GitHub", |   "author": "GitHub", | ||||||
|   "license": "MIT", |   "license": "MIT", | ||||||
|   "dependencies": { |   "dependencies": { | ||||||
|     "@actions/core": "^1.2.0", |     "@actions/core": "^1.10.0", | ||||||
|     "@actions/exec": "^1.0.1", |     "@actions/exec": "^1.0.1", | ||||||
|     "@actions/glob": "^0.1.0", |     "@actions/http-client": "^1.0.6", | ||||||
|     "@actions/http-client": "^1.0.8", |  | ||||||
|     "@actions/io": "^1.0.1", |     "@actions/io": "^1.0.1", | ||||||
|     "uuid": "^3.3.3" |     "uuid": "^3.3.3" | ||||||
|   }, |   }, | ||||||
| @@ -43,7 +43,6 @@ | |||||||
|     "eslint-plugin-import": "^2.18.2", |     "eslint-plugin-import": "^2.18.2", | ||||||
|     "eslint-plugin-jest": "^23.0.3", |     "eslint-plugin-jest": "^23.0.3", | ||||||
|     "eslint-plugin-prettier": "^3.1.1", |     "eslint-plugin-prettier": "^3.1.1", | ||||||
|     "eslint-plugin-simple-import-sort": "^5.0.2", |  | ||||||
|     "jest": "^24.8.0", |     "jest": "^24.8.0", | ||||||
|     "jest-circus": "^24.7.1", |     "jest-circus": "^24.7.1", | ||||||
|     "nock": "^11.7.0", |     "nock": "^11.7.0", | ||||||
|   | |||||||
| @@ -6,23 +6,19 @@ import { | |||||||
|     IRequestOptions, |     IRequestOptions, | ||||||
|     ITypedResponse |     ITypedResponse | ||||||
| } from "@actions/http-client/interfaces"; | } from "@actions/http-client/interfaces"; | ||||||
| import * as crypto from "crypto"; |  | ||||||
| import * as fs from "fs"; | import * as fs from "fs"; | ||||||
| import * as stream from "stream"; | import * as stream from "stream"; | ||||||
| import * as util from "util"; | import * as util from "util"; | ||||||
|  |  | ||||||
| import { CompressionMethod, Inputs, SocketTimeout } from "./constants"; | import { SocketTimeout } from "./constants"; | ||||||
| import { | import { | ||||||
|     ArtifactCacheEntry, |     ArtifactCacheEntry, | ||||||
|     CacheOptions, |  | ||||||
|     CommitCacheRequest, |     CommitCacheRequest, | ||||||
|     ReserveCacheRequest, |     ReserveCacheRequest, | ||||||
|     ReserveCacheResponse |     ReserveCacheResponse | ||||||
| } from "./contracts"; | } from "./contracts"; | ||||||
| import * as utils from "./utils/actionUtils"; | import * as utils from "./utils/actionUtils"; | ||||||
|  |  | ||||||
| const versionSalt = "1.0"; |  | ||||||
|  |  | ||||||
| function isSuccessStatusCode(statusCode?: number): boolean { | function isSuccessStatusCode(statusCode?: number): boolean { | ||||||
|     if (!statusCode) { |     if (!statusCode) { | ||||||
|         return false; |         return false; | ||||||
| @@ -92,20 +88,6 @@ function createHttpClient(): HttpClient { | |||||||
|     ); |     ); | ||||||
| } | } | ||||||
|  |  | ||||||
| export function getCacheVersion(compressionMethod?: CompressionMethod): string { |  | ||||||
|     const components = [core.getInput(Inputs.Path, { required: true })].concat( |  | ||||||
|         compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : [] |  | ||||||
|     ); |  | ||||||
|  |  | ||||||
|     // Add salt to cache version to support breaking changes in cache entry |  | ||||||
|     components.push(versionSalt); |  | ||||||
|  |  | ||||||
|     return crypto |  | ||||||
|         .createHash("sha256") |  | ||||||
|         .update(components.join("|")) |  | ||||||
|         .digest("hex"); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export async function retry<T>( | export async function retry<T>( | ||||||
|     name: string, |     name: string, | ||||||
|     method: () => Promise<T>, |     method: () => Promise<T>, | ||||||
| @@ -176,22 +158,20 @@ export async function retryHttpClientResponse<T>( | |||||||
| } | } | ||||||
|  |  | ||||||
| export async function getCacheEntry( | export async function getCacheEntry( | ||||||
|     keys: string[], |     keys: string[] | ||||||
|     options?: CacheOptions |  | ||||||
| ): Promise<ArtifactCacheEntry | null> { | ): Promise<ArtifactCacheEntry | null> { | ||||||
|     const httpClient = createHttpClient(); |     const httpClient = createHttpClient(); | ||||||
|     const version = getCacheVersion(options?.compressionMethod); |     const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||||
|     const resource = `cache?keys=${encodeURIComponent( |  | ||||||
|         keys.join(",") |  | ||||||
|     )}&version=${version}`; |  | ||||||
|  |  | ||||||
|     const response = await retryTypedResponse("getCacheEntry", () => |     const response = await retryTypedResponse("getCacheEntry", () => | ||||||
|         httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource)) |         httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource)) | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|     if (response.statusCode === 204) { |     if (response.statusCode === 204) { | ||||||
|         return null; |         return null; | ||||||
|     } |     } | ||||||
|  |     if (!isSuccessStatusCode(response.statusCode)) { | ||||||
|  |         throw new Error(`Cache service responded with ${response.statusCode}`); | ||||||
|  |     } | ||||||
|  |  | ||||||
|     const cacheResult = response.result; |     const cacheResult = response.result; | ||||||
|     const cacheDownloadUrl = cacheResult?.archiveLocation; |     const cacheDownloadUrl = cacheResult?.archiveLocation; | ||||||
| @@ -253,16 +233,11 @@ export async function downloadCache( | |||||||
| } | } | ||||||
|  |  | ||||||
| // Reserve Cache | // Reserve Cache | ||||||
| export async function reserveCache( | export async function reserveCache(key: string): Promise<number> { | ||||||
|     key: string, |  | ||||||
|     options?: CacheOptions |  | ||||||
| ): Promise<number> { |  | ||||||
|     const httpClient = createHttpClient(); |     const httpClient = createHttpClient(); | ||||||
|     const version = getCacheVersion(options?.compressionMethod); |  | ||||||
|  |  | ||||||
|     const reserveCacheRequest: ReserveCacheRequest = { |     const reserveCacheRequest: ReserveCacheRequest = { | ||||||
|         key, |         key | ||||||
|         version |  | ||||||
|     }; |     }; | ||||||
|     const response = await retryTypedResponse("reserveCache", () => |     const response = await retryTypedResponse("reserveCache", () => | ||||||
|         httpClient.postJson<ReserveCacheResponse>( |         httpClient.postJson<ReserveCacheResponse>( | ||||||
| @@ -270,7 +245,6 @@ export async function reserveCache( | |||||||
|             reserveCacheRequest |             reserveCacheRequest | ||||||
|         ) |         ) | ||||||
|     ); |     ); | ||||||
|  |  | ||||||
|     return response?.result?.cacheId ?? -1; |     return response?.result?.cacheId ?? -1; | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -303,18 +277,15 @@ async function uploadChunk( | |||||||
|         "Content-Range": getContentRange(start, end) |         "Content-Range": getContentRange(start, end) | ||||||
|     }; |     }; | ||||||
|  |  | ||||||
|     const uploadChunkRequest = async (): Promise<IHttpClientResponse> => { |     await retryHttpClientResponse( | ||||||
|         return await httpClient.sendStream( |         `uploadChunk (start: ${start}, end: ${end})`, | ||||||
|  |         () => | ||||||
|  |             httpClient.sendStream( | ||||||
|                 "PATCH", |                 "PATCH", | ||||||
|                 resourceUrl, |                 resourceUrl, | ||||||
|                 openStream(), |                 openStream(), | ||||||
|                 additionalHeaders |                 additionalHeaders | ||||||
|         ); |             ) | ||||||
|     }; |  | ||||||
|  |  | ||||||
|     await retryHttpClientResponse( |  | ||||||
|         `uploadChunk (start: ${start}, end: ${end})`, |  | ||||||
|         uploadChunkRequest |  | ||||||
|     ); |     ); | ||||||
| } | } | ||||||
|  |  | ||||||
| @@ -361,11 +332,17 @@ async function uploadFile( | |||||||
|                         httpClient, |                         httpClient, | ||||||
|                         resourceUrl, |                         resourceUrl, | ||||||
|                         () => |                         () => | ||||||
|                             fs.createReadStream(archivePath, { |                             fs | ||||||
|  |                                 .createReadStream(archivePath, { | ||||||
|                                     fd, |                                     fd, | ||||||
|                                     start, |                                     start, | ||||||
|                                     end, |                                     end, | ||||||
|                                     autoClose: false |                                     autoClose: false | ||||||
|  |                                 }) | ||||||
|  |                                 .on("error", error => { | ||||||
|  |                                     throw new Error( | ||||||
|  |                                         `Cache upload failed because file read failed with ${error.Message}` | ||||||
|  |                                     ); | ||||||
|                                 }), |                                 }), | ||||||
|                         start, |                         start, | ||||||
|                         end |                         end | ||||||
|   | |||||||
| @@ -19,16 +19,6 @@ export enum Events { | |||||||
|     PullRequest = "pull_request" |     PullRequest = "pull_request" | ||||||
| } | } | ||||||
|  |  | ||||||
| export enum CacheFilename { |  | ||||||
|     Gzip = "cache.tgz", |  | ||||||
|     Zstd = "cache.tzst" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export enum CompressionMethod { |  | ||||||
|     Gzip = "gzip", |  | ||||||
|     Zstd = "zstd" |  | ||||||
| } |  | ||||||
|  |  | ||||||
| // Socket timeout in milliseconds during download.  If no traffic is received | // Socket timeout in milliseconds during download.  If no traffic is received | ||||||
| // over the socket during this period, the socket is destroyed and the download | // over the socket during this period, the socket is destroyed and the download | ||||||
| // is aborted. | // is aborted. | ||||||
|   | |||||||
							
								
								
									
										6
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							| @@ -1,5 +1,3 @@ | |||||||
| import { CompressionMethod } from "./constants"; |  | ||||||
|  |  | ||||||
| export interface ArtifactCacheEntry { | export interface ArtifactCacheEntry { | ||||||
|     cacheKey?: string; |     cacheKey?: string; | ||||||
|     scope?: string; |     scope?: string; | ||||||
| @@ -19,7 +17,3 @@ export interface ReserveCacheRequest { | |||||||
| export interface ReserveCacheResponse { | export interface ReserveCacheResponse { | ||||||
|     cacheId: number; |     cacheId: number; | ||||||
| } | } | ||||||
|  |  | ||||||
| export interface CacheOptions { |  | ||||||
|     compressionMethod?: CompressionMethod; |  | ||||||
| } |  | ||||||
|   | |||||||
| @@ -1,6 +1,5 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
|  |  | ||||||
| import * as cacheHttpClient from "./cacheHttpClient"; | import * as cacheHttpClient from "./cacheHttpClient"; | ||||||
| import { Events, Inputs, State } from "./constants"; | import { Events, Inputs, State } from "./constants"; | ||||||
| import { extractTar } from "./tar"; | import { extractTar } from "./tar"; | ||||||
| @@ -20,6 +19,11 @@ async function run(): Promise<void> { | |||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|  |         const cachePath = utils.resolvePath( | ||||||
|  |             core.getInput(Inputs.Path, { required: true }) | ||||||
|  |         ); | ||||||
|  |         core.debug(`Cache Path: ${cachePath}`); | ||||||
|  |  | ||||||
|         const primaryKey = core.getInput(Inputs.Key, { required: true }); |         const primaryKey = core.getInput(Inputs.Key, { required: true }); | ||||||
|         core.saveState(State.CacheKey, primaryKey); |         core.saveState(State.CacheKey, primaryKey); | ||||||
|  |  | ||||||
| @@ -54,27 +58,24 @@ async function run(): Promise<void> { | |||||||
|             } |             } | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         const compressionMethod = await utils.getCompressionMethod(); |  | ||||||
|  |  | ||||||
|         try { |         try { | ||||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys, { |             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); | ||||||
|                 compressionMethod: compressionMethod |  | ||||||
|             }); |  | ||||||
|             if (!cacheEntry?.archiveLocation) { |             if (!cacheEntry?.archiveLocation) { | ||||||
|                 core.info(`Cache not found for input keys: ${keys.join(", ")}`); |                 core.info( | ||||||
|  |                     `Cache not found for input keys: ${keys.join(", ")}.` | ||||||
|  |                 ); | ||||||
|                 return; |                 return; | ||||||
|             } |             } | ||||||
|  |  | ||||||
|             const archivePath = path.join( |             const archivePath = path.join( | ||||||
|                 await utils.createTempDirectory(), |                 await utils.createTempDirectory(), | ||||||
|                 utils.getCacheFileName(compressionMethod) |                 "cache.tgz" | ||||||
|             ); |             ); | ||||||
|             core.debug(`Archive Path: ${archivePath}`); |             core.debug(`Archive Path: ${archivePath}`); | ||||||
|  |  | ||||||
|             // Store the cache result |             // Store the cache result | ||||||
|             utils.setCacheState(cacheEntry); |             utils.setCacheState(cacheEntry); | ||||||
|  |  | ||||||
|             try { |  | ||||||
|             // Download the cache from the cache entry |             // Download the cache from the cache entry | ||||||
|             await cacheHttpClient.downloadCache( |             await cacheHttpClient.downloadCache( | ||||||
|                 cacheEntry.archiveLocation, |                 cacheEntry.archiveLocation, | ||||||
| @@ -88,15 +89,7 @@ async function run(): Promise<void> { | |||||||
|                 )} MB (${archiveFileSize} B)` |                 )} MB (${archiveFileSize} B)` | ||||||
|             ); |             ); | ||||||
|  |  | ||||||
|                 await extractTar(archivePath, compressionMethod); |             await extractTar(archivePath, cachePath); | ||||||
|             } finally { |  | ||||||
|                 // Try to delete the archive to save space |  | ||||||
|                 try { |  | ||||||
|                     await utils.unlinkFile(archivePath); |  | ||||||
|                 } catch (error) { |  | ||||||
|                     core.debug(`Failed to delete archive: ${error}`); |  | ||||||
|                 } |  | ||||||
|             } |  | ||||||
|  |  | ||||||
|             const isExactKeyMatch = utils.isExactKeyMatch( |             const isExactKeyMatch = utils.isExactKeyMatch( | ||||||
|                 primaryKey, |                 primaryKey, | ||||||
|   | |||||||
							
								
								
									
										26
									
								
								src/save.ts
									
									
									
									
									
								
							
							
						
						
									
										26
									
								
								src/save.ts
									
									
									
									
									
								
							| @@ -1,6 +1,5 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
|  |  | ||||||
| import * as cacheHttpClient from "./cacheHttpClient"; | import * as cacheHttpClient from "./cacheHttpClient"; | ||||||
| import { Events, Inputs, State } from "./constants"; | import { Events, Inputs, State } from "./constants"; | ||||||
| import { createTar } from "./tar"; | import { createTar } from "./tar"; | ||||||
| @@ -35,12 +34,8 @@ async function run(): Promise<void> { | |||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|  |  | ||||||
|         const compressionMethod = await utils.getCompressionMethod(); |  | ||||||
|  |  | ||||||
|         core.debug("Reserving Cache"); |         core.debug("Reserving Cache"); | ||||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey, { |         const cacheId = await cacheHttpClient.reserveCache(primaryKey); | ||||||
|             compressionMethod: compressionMethod |  | ||||||
|         }); |  | ||||||
|         if (cacheId == -1) { |         if (cacheId == -1) { | ||||||
|             core.info( |             core.info( | ||||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` |                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||||
| @@ -48,25 +43,18 @@ async function run(): Promise<void> { | |||||||
|             return; |             return; | ||||||
|         } |         } | ||||||
|         core.debug(`Cache ID: ${cacheId}`); |         core.debug(`Cache ID: ${cacheId}`); | ||||||
|         const cachePaths = await utils.resolvePaths( |         const cachePath = utils.resolvePath( | ||||||
|             core |             core.getInput(Inputs.Path, { required: true }) | ||||||
|                 .getInput(Inputs.Path, { required: true }) |  | ||||||
|                 .split("\n") |  | ||||||
|                 .filter(x => x !== "") |  | ||||||
|         ); |         ); | ||||||
|  |         core.debug(`Cache Path: ${cachePath}`); | ||||||
|  |  | ||||||
|         core.debug("Cache Paths:"); |  | ||||||
|         core.debug(`${JSON.stringify(cachePaths)}`); |  | ||||||
|  |  | ||||||
|         const archiveFolder = await utils.createTempDirectory(); |  | ||||||
|         const archivePath = path.join( |         const archivePath = path.join( | ||||||
|             archiveFolder, |             await utils.createTempDirectory(), | ||||||
|             utils.getCacheFileName(compressionMethod) |             "cache.tgz" | ||||||
|         ); |         ); | ||||||
|  |  | ||||||
|         core.debug(`Archive Path: ${archivePath}`); |         core.debug(`Archive Path: ${archivePath}`); | ||||||
|  |  | ||||||
|         await createTar(archiveFolder, cachePaths, compressionMethod); |         await createTar(archivePath, cachePath); | ||||||
|  |  | ||||||
|         const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit |         const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit | ||||||
|         const archiveFileSize = utils.getArchiveFileSize(archivePath); |         const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||||
|   | |||||||
							
								
								
									
										81
									
								
								src/tar.ts
									
									
									
									
									
								
							
							
						
						
									
										81
									
								
								src/tar.ts
									
									
									
									
									
								
							| @@ -1,10 +1,26 @@ | |||||||
|  | import * as core from "@actions/core"; | ||||||
| import { exec } from "@actions/exec"; | import { exec } from "@actions/exec"; | ||||||
| import * as io from "@actions/io"; | import * as io from "@actions/io"; | ||||||
| import { existsSync, writeFileSync } from "fs"; | import { existsSync } from "fs"; | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
|  | import * as tar from "./tar"; | ||||||
|  |  | ||||||
| import { CompressionMethod } from "./constants"; | export async function isGnuTar(): Promise<boolean> { | ||||||
| import * as utils from "./utils/actionUtils"; |     core.debug("Checking tar --version"); | ||||||
|  |     let versionOutput = ""; | ||||||
|  |     await exec("tar --version", [], { | ||||||
|  |         ignoreReturnCode: true, | ||||||
|  |         silent: true, | ||||||
|  |         listeners: { | ||||||
|  |             stdout: (data: Buffer): string => | ||||||
|  |                 (versionOutput += data.toString()), | ||||||
|  |             stderr: (data: Buffer): string => (versionOutput += data.toString()) | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  |  | ||||||
|  |     core.debug(versionOutput.trim()); | ||||||
|  |     return versionOutput.toUpperCase().includes("GNU TAR"); | ||||||
|  | } | ||||||
|  |  | ||||||
| async function getTarPath(args: string[]): Promise<string> { | async function getTarPath(args: string[]): Promise<string> { | ||||||
|     // Explicitly use BSD Tar on Windows |     // Explicitly use BSD Tar on Windows | ||||||
| @@ -13,75 +29,48 @@ async function getTarPath(args: string[]): Promise<string> { | |||||||
|         const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; |         const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||||
|         if (existsSync(systemTar)) { |         if (existsSync(systemTar)) { | ||||||
|             return systemTar; |             return systemTar; | ||||||
|         } else if (await utils.useGnuTar()) { |         } else if (await tar.isGnuTar()) { | ||||||
|             args.push("--force-local"); |             args.push("--force-local"); | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|     return await io.which("tar", true); |     return await io.which("tar", true); | ||||||
| } | } | ||||||
|  |  | ||||||
| async function execTar(args: string[], cwd?: string): Promise<void> { | async function execTar(args: string[]): Promise<void> { | ||||||
|     try { |     try { | ||||||
|         await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd }); |         await exec(`"${await getTarPath(args)}"`, args); | ||||||
|     } catch (error) { |     } catch (error) { | ||||||
|         throw new Error(`Tar failed with error: ${error?.message}`); |         throw new Error(`Tar failed with error: ${error?.message}`); | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  |  | ||||||
| function getWorkingDirectory(): string { |  | ||||||
|     return process.env["GITHUB_WORKSPACE"] ?? process.cwd(); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export async function extractTar( | export async function extractTar( | ||||||
|     archivePath: string, |     archivePath: string, | ||||||
|     compressionMethod: CompressionMethod |     targetDirectory: string | ||||||
| ): Promise<void> { | ): Promise<void> { | ||||||
|     // Create directory to extract tar into |     // Create directory to extract tar into | ||||||
|     const workingDirectory = getWorkingDirectory(); |     await io.mkdirP(targetDirectory); | ||||||
|     await io.mkdirP(workingDirectory); |  | ||||||
|     // --d: Decompress. |  | ||||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. |  | ||||||
|     // Using 30 here because we also support 32-bit self-hosted runners. |  | ||||||
|     const args = [ |     const args = [ | ||||||
|         ...(compressionMethod == CompressionMethod.Zstd |         "-xz", | ||||||
|             ? ["--use-compress-program", "zstd -d --long=30"] |         "-f", | ||||||
|             : ["-z"]), |  | ||||||
|         "-xf", |  | ||||||
|         archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), |         archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||||
|         "-P", |  | ||||||
|         "-C", |         "-C", | ||||||
|         workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") |         targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") | ||||||
|     ]; |     ]; | ||||||
|     await execTar(args); |     await execTar(args); | ||||||
| } | } | ||||||
|  |  | ||||||
| export async function createTar( | export async function createTar( | ||||||
|     archiveFolder: string, |     archivePath: string, | ||||||
|     sourceDirectories: string[], |     sourceDirectory: string | ||||||
|     compressionMethod: CompressionMethod |  | ||||||
| ): Promise<void> { | ): Promise<void> { | ||||||
|     // Write source directories to manifest.txt to avoid command length limits |  | ||||||
|     const manifestFilename = "manifest.txt"; |  | ||||||
|     const cacheFileName = utils.getCacheFileName(compressionMethod); |  | ||||||
|     writeFileSync( |  | ||||||
|         path.join(archiveFolder, manifestFilename), |  | ||||||
|         sourceDirectories.join("\n") |  | ||||||
|     ); |  | ||||||
|     // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. |  | ||||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. |  | ||||||
|     // Using 30 here because we also support 32-bit self-hosted runners. |  | ||||||
|     const workingDirectory = getWorkingDirectory(); |  | ||||||
|     const args = [ |     const args = [ | ||||||
|         ...(compressionMethod == CompressionMethod.Zstd |         "-cz", | ||||||
|             ? ["--use-compress-program", "zstd -T0 --long=30"] |         "-f", | ||||||
|             : ["-z"]), |         archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||||
|         "-cf", |  | ||||||
|         cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"), |  | ||||||
|         "-P", |  | ||||||
|         "-C", |         "-C", | ||||||
|         workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), |         sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||||
|         "--files-from", |         "." | ||||||
|         manifestFilename |  | ||||||
|     ]; |     ]; | ||||||
|     await execTar(args, archiveFolder); |     await execTar(args); | ||||||
| } | } | ||||||
|   | |||||||
| @@ -1,20 +1,11 @@ | |||||||
| import * as core from "@actions/core"; | import * as core from "@actions/core"; | ||||||
| import * as exec from "@actions/exec"; |  | ||||||
| import * as glob from "@actions/glob"; |  | ||||||
| import * as io from "@actions/io"; | import * as io from "@actions/io"; | ||||||
| import * as fs from "fs"; | import * as fs from "fs"; | ||||||
| import * as os from "os"; | import * as os from "os"; | ||||||
| import * as path from "path"; | import * as path from "path"; | ||||||
| import * as util from "util"; |  | ||||||
| import * as uuidV4 from "uuid/v4"; | import * as uuidV4 from "uuid/v4"; | ||||||
|  |  | ||||||
| import { | import { Events, Outputs, State } from "../constants"; | ||||||
|     CacheFilename, |  | ||||||
|     CompressionMethod, |  | ||||||
|     Events, |  | ||||||
|     Outputs, |  | ||||||
|     State |  | ||||||
| } from "../constants"; |  | ||||||
| import { ArtifactCacheEntry } from "../contracts"; | import { ArtifactCacheEntry } from "../contracts"; | ||||||
|  |  | ||||||
| // From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 | // From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 | ||||||
| @@ -37,7 +28,6 @@ export async function createTempDirectory(): Promise<string> { | |||||||
|         } |         } | ||||||
|         tempDirectory = path.join(baseLocation, "actions", "temp"); |         tempDirectory = path.join(baseLocation, "actions", "temp"); | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     const dest = path.join(tempDirectory, uuidV4.default()); |     const dest = path.join(tempDirectory, uuidV4.default()); | ||||||
|     await io.mkdirP(dest); |     await io.mkdirP(dest); | ||||||
|     return dest; |     return dest; | ||||||
| @@ -92,21 +82,16 @@ export function logWarning(message: string): void { | |||||||
|     core.info(`${warningPrefix}${message}`); |     core.info(`${warningPrefix}${message}`); | ||||||
| } | } | ||||||
|  |  | ||||||
| export async function resolvePaths(patterns: string[]): Promise<string[]> { | export function resolvePath(filePath: string): string { | ||||||
|     const paths: string[] = []; |     if (filePath[0] === "~") { | ||||||
|     const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd(); |         const home = os.homedir(); | ||||||
|     const globber = await glob.create(patterns.join("\n"), { |         if (!home) { | ||||||
|         implicitDescendants: false |             throw new Error("Unable to resolve `~` to HOME"); | ||||||
|     }); |         } | ||||||
|  |         return path.join(home, filePath.slice(1)); | ||||||
|     for await (const file of globber.globGenerator()) { |  | ||||||
|         const relativeFile = path.relative(workspace, file); |  | ||||||
|         core.debug(`Matched: ${relativeFile}`); |  | ||||||
|         // Paths are made relative so the tar entries are all relative to the root of the workspace. |  | ||||||
|         paths.push(`${relativeFile}`); |  | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     return paths; |     return path.resolve(filePath); | ||||||
| } | } | ||||||
|  |  | ||||||
| export function getSupportedEvents(): string[] { | export function getSupportedEvents(): string[] { | ||||||
| @@ -120,53 +105,3 @@ export function isValidEvent(): boolean { | |||||||
|     const githubEvent = process.env[Events.Key] || ""; |     const githubEvent = process.env[Events.Key] || ""; | ||||||
|     return getSupportedEvents().includes(githubEvent); |     return getSupportedEvents().includes(githubEvent); | ||||||
| } | } | ||||||
|  |  | ||||||
| export function unlinkFile(path: fs.PathLike): Promise<void> { |  | ||||||
|     return util.promisify(fs.unlink)(path); |  | ||||||
| } |  | ||||||
|  |  | ||||||
| async function getVersion(app: string): Promise<string> { |  | ||||||
|     core.debug(`Checking ${app} --version`); |  | ||||||
|     let versionOutput = ""; |  | ||||||
|     try { |  | ||||||
|         await exec.exec(`${app} --version`, [], { |  | ||||||
|             ignoreReturnCode: true, |  | ||||||
|             silent: true, |  | ||||||
|             listeners: { |  | ||||||
|                 stdout: (data: Buffer): string => |  | ||||||
|                     (versionOutput += data.toString()), |  | ||||||
|                 stderr: (data: Buffer): string => |  | ||||||
|                     (versionOutput += data.toString()) |  | ||||||
|             } |  | ||||||
|         }); |  | ||||||
|     } catch (err) { |  | ||||||
|         core.debug(err.message); |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     versionOutput = versionOutput.trim(); |  | ||||||
|     core.debug(versionOutput); |  | ||||||
|     return versionOutput; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export async function getCompressionMethod(): Promise<CompressionMethod> { |  | ||||||
|     // Disabling zstd on Windows due to https://github.com/actions/cache/issues/301 |  | ||||||
|     if (os.platform() === "win32") { |  | ||||||
|         return CompressionMethod.Gzip; |  | ||||||
|     } |  | ||||||
|  |  | ||||||
|     const versionOutput = await getVersion("zstd"); |  | ||||||
|     return versionOutput.toLowerCase().includes("zstd command line interface") |  | ||||||
|         ? CompressionMethod.Zstd |  | ||||||
|         : CompressionMethod.Gzip; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export function getCacheFileName(compressionMethod: CompressionMethod): string { |  | ||||||
|     return compressionMethod == CompressionMethod.Zstd |  | ||||||
|         ? CacheFilename.Zstd |  | ||||||
|         : CacheFilename.Gzip; |  | ||||||
| } |  | ||||||
|  |  | ||||||
| export async function useGnuTar(): Promise<boolean> { |  | ||||||
|     const versionOutput = await getVersion("tar"); |  | ||||||
|     return versionOutput.toLowerCase().includes("gnu tar"); |  | ||||||
| } |  | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user