Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions packages/artifact/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 10 additions & 4 deletions packages/cache/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,9 @@ Read more about the change & access the migration guide: [reference to the annou

This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache).

#### Save Cache
### Save Cache

Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails.
Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved successfully and throws an error if cache upload fails.

```js
const cache = require('@actions/cache');
Expand All @@ -38,7 +38,13 @@ const key = 'npm-foobar-d5ea0750'
const cacheId = await cache.saveCache(paths, key)
```

#### Restore Cache
You can control archive compression when saving. Provide `compressionLevel` in `UploadOptions` (0 = no compression/plain tar, 9 = maximum, default = 6) or set the `CACHE_COMPRESSION_LEVEL` environment variable:

```js
const cacheId = await cache.saveCache(paths, key, {compressionLevel: 3})
```

### Restore Cache

Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found.

Expand All @@ -56,7 +62,7 @@ const restoreKeys = [
const cacheKey = await cache.restoreCache(paths, key, restoreKeys)
```

##### Cache segment restore timeout
### Cache segment restore timeout

A cache gets downloaded in multiple segments of fixed sizes (now `128MB` to fail-fast, previously `1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner were used). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.4` of cache package introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.

Expand Down
36 changes: 32 additions & 4 deletions packages/cache/__tests__/options.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,14 @@ const downloadConcurrency = 8
const timeoutInMs = 30000
const segmentTimeoutInMs = 600000
const lookupOnly = false
const compressionLevel = 6

afterEach(() => {
delete process.env.CACHE_UPLOAD_CONCURRENCY
delete process.env.CACHE_UPLOAD_CHUNK_SIZE
delete process.env.CACHE_COMPRESSION_LEVEL
delete process.env.SEGMENT_DOWNLOAD_TIMEOUT_MINS
})

test('getDownloadOptions sets defaults', async () => {
const actualOptions = getDownloadOptions()
Expand Down Expand Up @@ -44,7 +52,8 @@ test('getUploadOptions sets defaults', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024,
useAzureSdk: false
useAzureSdk: false,
compressionLevel
}
const actualOptions = getUploadOptions()

Expand All @@ -55,7 +64,8 @@ test('getUploadOptions overrides all settings', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 2,
uploadChunkSize: 16 * 1024 * 1024,
useAzureSdk: true
useAzureSdk: true,
compressionLevel: 3
}

const actualOptions = getUploadOptions(expectedOptions)
Expand All @@ -67,11 +77,13 @@ test('env variables override all getUploadOptions settings', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 16,
uploadChunkSize: 64 * 1024 * 1024,
useAzureSdk: true
useAzureSdk: true,
compressionLevel: 8
}

process.env.CACHE_UPLOAD_CONCURRENCY = '16'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '64'
process.env.CACHE_COMPRESSION_LEVEL = '8'

const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
Expand All @@ -81,16 +93,32 @@ test('env variables override all getUploadOptions settings but do not exceed cap
const expectedOptions: UploadOptions = {
uploadConcurrency: 32,
uploadChunkSize: 128 * 1024 * 1024,
useAzureSdk: true
useAzureSdk: true,
compressionLevel: 9
}

process.env.CACHE_UPLOAD_CONCURRENCY = '64'
process.env.CACHE_UPLOAD_CHUNK_SIZE = '256'
process.env.CACHE_COMPRESSION_LEVEL = '12'

const actualOptions = getUploadOptions(expectedOptions)
expect(actualOptions).toEqual(expectedOptions)
})

test('compression level clamps and floors values', async () => {
const expectedOptions: UploadOptions = {
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024,
useAzureSdk: false,
compressionLevel: 0
}

process.env.CACHE_COMPRESSION_LEVEL = '-1.7'

const actualOptions = getUploadOptions()
expect(actualOptions).toEqual(expectedOptions)
})

test('getDownloadOptions overrides download timeout minutes', async () => {
const expectedOptions: DownloadOptions = {
useAzureSdk: false,
Expand Down
22 changes: 16 additions & 6 deletions packages/cache/__tests__/saveCache.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,8 @@ test('save with large cache outputs should fail', async () => {
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
Expand Down Expand Up @@ -127,7 +128,8 @@ test('save with large cache outputs should fail in GHES with error message', asy
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
Expand Down Expand Up @@ -175,7 +177,8 @@ test('save with large cache outputs should fail in GHES without error message',
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
Expand Down Expand Up @@ -277,7 +280,8 @@ test('save with server error should fail', async () => {
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
Expand Down Expand Up @@ -324,14 +328,20 @@ test('save with valid inputs uploads a cache', async () => {
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)
expect(saveCacheMock).toHaveBeenCalledTimes(1)
expect(saveCacheMock).toHaveBeenCalledWith(
cacheId,
archiveFile,
'',
undefined
expect.objectContaining({
useAzureSdk: false,
uploadConcurrency: 4,
uploadChunkSize: 32 * 1024 * 1024,
compressionLevel: 6
})
)
expect(getCompressionMock).toHaveBeenCalledTimes(1)
})
Expand Down
36 changes: 24 additions & 12 deletions packages/cache/__tests__/saveCacheV2.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,8 @@ test('save cache fails if a signedUploadURL was not passed', async () => {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
uploadConcurrency: 8,
compressionLevel: 6
}

const createCacheEntryMock = jest
Expand Down Expand Up @@ -178,7 +179,8 @@ test('save cache fails if a signedUploadURL was not passed', async () => {
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)

expect(saveCacheMock).toHaveBeenCalledWith(
Expand All @@ -201,7 +203,8 @@ test('finalize save cache failure', async () => {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
uploadConcurrency: 8,
compressionLevel: 6
}

const createCacheEntryMock = jest
Expand Down Expand Up @@ -241,7 +244,8 @@ test('finalize save cache failure', async () => {
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)

expect(saveCacheMock).toHaveBeenCalledWith(
Expand Down Expand Up @@ -275,7 +279,8 @@ test('save with valid inputs uploads a cache', async () => {
archiveSizeBytes: archiveFileSize, // These should always match
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
uploadConcurrency: 8,
compressionLevel: 6
}

jest
Expand Down Expand Up @@ -316,7 +321,8 @@ test('save with valid inputs uploads a cache', async () => {
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)

expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
Expand All @@ -341,7 +347,8 @@ test('save with extremely large cache should succeed in v2 (no size limit)', asy
archiveSizeBytes: archiveFileSize,
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
uploadConcurrency: 8,
compressionLevel: 6
}

jest
Expand Down Expand Up @@ -382,7 +389,8 @@ test('save with extremely large cache should succeed in v2 (no size limit)', asy
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)

expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
Expand Down Expand Up @@ -446,7 +454,8 @@ test('save with finalize cache entry failure and specific error message', async
archiveSizeBytes: archiveFileSize,
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
uploadConcurrency: 8,
compressionLevel: 6
}

const createCacheEntryMock = jest
Expand Down Expand Up @@ -488,7 +497,8 @@ test('save with finalize cache entry failure and specific error message', async
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)

expect(saveCacheMock).toHaveBeenCalledWith(
Expand Down Expand Up @@ -521,7 +531,8 @@ test('save with multiple large caches should succeed in v2 (testing 50GB)', asyn
archiveSizeBytes: archiveFileSize,
useAzureSdk: true,
uploadChunkSize: 64 * 1024 * 1024,
uploadConcurrency: 8
uploadConcurrency: 8,
compressionLevel: 6
}

jest
Expand Down Expand Up @@ -562,7 +573,8 @@ test('save with multiple large caches should succeed in v2 (testing 50GB)', asyn
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
compression,
6
)

expect(finalizeCacheEntryMock).toHaveBeenCalledWith({
Expand Down
Loading