diff --git a/packages/cache/__tests__/restoreCacheV2.test.ts b/packages/cache/__tests__/restoreCacheV2.test.ts new file mode 100644 index 0000000000..cc4f9e3c74 --- /dev/null +++ b/packages/cache/__tests__/restoreCacheV2.test.ts @@ -0,0 +1,442 @@ +import * as core from '@actions/core' +import * as path from 'path' +import * as tar from '../src/internal/tar' +import * as config from '../src/internal/config' +import * as cacheUtils from '../src/internal/cacheUtils' +import * as downloadCacheModule from '../src/internal/blob/download-cache' +import {restoreCache} from '../src/cache' +import {CacheFilename, CompressionMethod} from '../src/internal/constants' +import {CacheServiceClientJSON} from '../src/generated/results/api/v1/cache.twirp' +import {BlobDownloadResponseParsed} from '@azure/storage-blob' + +jest.mock('../src/internal/cacheHttpClient') +jest.mock('../src/internal/cacheUtils') +jest.mock('../src/internal/config') +jest.mock('../src/internal/tar') + +let logDebugMock: jest.SpyInstance +let logInfoMock: jest.SpyInstance + +beforeAll(() => { + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) + + jest.spyOn(cacheUtils, 'getCacheFileName').mockImplementation(cm => { + const actualUtils = jest.requireActual('../src/internal/cacheUtils') + return actualUtils.getCacheFileName(cm) + }) + + // Ensure that we're using v2 for these tests + jest.spyOn(config, 'getCacheServiceVersion').mockReturnValue('v2') + + logDebugMock = jest.spyOn(core, 'debug') + logInfoMock = jest.spyOn(core, 'info') +}) + +afterEach(() => { + expect(logDebugMock).toHaveBeenCalledWith('Cache service version: v2') +}) + +test('restore with no path should fail', async () => { + const paths: string[] = [] + const key = 'node-test' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Path Validation Error: At least one directory or file path is required` + ) +}) + +test('restore with too many keys should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = [...Array(20).keys()].map(x => x.toString()) + await expect(restoreCache(paths, key, restoreKeys)).rejects.toThrowError( + `Key Validation Error: Keys are limited to a maximum of 10.` + ) +}) + +test('restore with large key should fail', async () => { + const paths = ['node_modules'] + const key = 'foo'.repeat(512) // Over the 512 character limit + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ) +}) + +test('restore with invalid key should fail', async () => { + const paths = ['node_modules'] + const key = 'comma,comma' + await expect(restoreCache(paths, key)).rejects.toThrowError( + `Key Validation Error: ${key} cannot contain commas.` + ) +}) + +test('restore with no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(undefined) +}) + +test('restore with server error should fail', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const logWarningMock = jest.spyOn(core, 'warning') + + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockImplementation(() => { + throw new Error('HTTP Error Occurred') + }) + + const cacheKey = await restoreCache(paths, key) + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledTimes(1) + expect(logWarningMock).toHaveBeenCalledWith( + 'Failed to restore: HTTP Error Occurred' + ) +}) + +test('restore with restore keys and no cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = ['node-'] + const logWarningMock = jest.spyOn(core, 'warning') + + jest + .spyOn(CacheServiceClientJSON.prototype, 'GetCacheEntryDownloadURL') + .mockReturnValue( + Promise.resolve({ + ok: false, + signedDownloadUrl: '', + matchedKey: '' + }) + ) + + const cacheKey = await restoreCache(paths, key, restoreKeys) + + expect(cacheKey).toBe(undefined) + expect(logWarningMock).toHaveBeenCalledWith( + `Cache not found for keys: ${[key, ...restoreKeys].join(', ')}` + ) +}) + +test('restore with gzip compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) + + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) + ) + + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) + ) + + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) + +test('restore with zstd compressed cache found', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const compressionMethod = CompressionMethod.Zstd + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + '8e2e96a184cb0cd6b48285b176c06a418f3d7fce14c29d9886fd1bb4f05c513d' + + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) + + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) + ) + + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Zstd) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) + ) + + const fileSize = 62915000 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + + const cacheKey = await restoreCache(paths, key) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) + +test('restore with cache found for restore key', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const restoreKeys = ['node-'] + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'b8b58e9bd7b1e8f83d9f05c7e06ea865ba44a0330e07a14db74ac74386677bed' + + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) + + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: restoreKeys[0] + }) + ) + + const tempPath = '/foo/bar' + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + createTempDirectoryMock.mockImplementation(async () => { + return Promise.resolve(tempPath) + }) + + const archivePath = path.join(tempPath, CacheFilename.Gzip) + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + downloadCacheFileMock.mockReturnValue( + Promise.resolve({ + _response: { + status: 200 + } + } as BlobDownloadResponseParsed) + ) + + const fileSize = 142 + const getArchiveFileSizeInBytesMock = jest + .spyOn(cacheUtils, 'getArchiveFileSizeInBytes') + .mockReturnValue(fileSize) + + const extractTarMock = jest.spyOn(tar, 'extractTar') + const unlinkFileMock = jest.spyOn(cacheUtils, 'unlinkFile') + + const cacheKey = await restoreCache(paths, key, restoreKeys) + + expect(cacheKey).toBe(restoreKeys[0]) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys, + version: cacheVersion + }) + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1) + expect(downloadCacheFileMock).toHaveBeenCalledWith( + signedDownloadUrl, + archivePath + ) + expect(getArchiveFileSizeInBytesMock).toHaveBeenCalledWith(archivePath) + expect(logInfoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`) + + expect(extractTarMock).toHaveBeenCalledTimes(1) + expect(extractTarMock).toHaveBeenCalledWith(archivePath, compressionMethod) + + expect(unlinkFileMock).toHaveBeenCalledTimes(1) + expect(unlinkFileMock).toHaveBeenCalledWith(archivePath) + + expect(compressionMethodMock).toHaveBeenCalledTimes(1) +}) + +test('restore with dry run', async () => { + const paths = ['node_modules'] + const key = 'node-test' + const options = {lookupOnly: true} + const compressionMethod = CompressionMethod.Gzip + const signedDownloadUrl = 'https://blob-storage.local?signed=true' + const cacheVersion = + 'd90f107aaeb22920dba0c637a23c37b5bc497b4dfa3b07fe3f79bf88a273c11b' + + const getCacheVersionMock = jest.spyOn(cacheUtils, 'getCacheVersion') + getCacheVersionMock.mockReturnValue(cacheVersion) + + const compressionMethodMock = jest.spyOn(cacheUtils, 'getCompressionMethod') + compressionMethodMock.mockReturnValue(Promise.resolve(compressionMethod)) + + const getCacheDownloadURLMock = jest.spyOn( + CacheServiceClientJSON.prototype, + 'GetCacheEntryDownloadURL' + ) + getCacheDownloadURLMock.mockReturnValue( + Promise.resolve({ + ok: true, + signedDownloadUrl, + matchedKey: key + }) + ) + + const createTempDirectoryMock = jest.spyOn(cacheUtils, 'createTempDirectory') + const downloadCacheFileMock = jest.spyOn( + downloadCacheModule, + 'downloadCacheFile' + ) + + const cacheKey = await restoreCache(paths, key, undefined, options) + + expect(cacheKey).toBe(key) + expect(getCacheVersionMock).toHaveBeenCalledWith( + paths, + compressionMethod, + false + ) + expect(getCacheDownloadURLMock).toHaveBeenCalledWith({ + key, + restoreKeys: [], + version: cacheVersion + }) + expect(logInfoMock).toHaveBeenCalledWith('Lookup only - skipping download') + + // creating a tempDir and downloading the cache are skipped + expect(createTempDirectoryMock).toHaveBeenCalledTimes(0) + expect(downloadCacheFileMock).toHaveBeenCalledTimes(0) +}) diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 1450c8ace9..8b7a8d023e 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -79,9 +79,11 @@ export async function restoreCache( options?: DownloadOptions, enableCrossOsArchive = false ): Promise { + const cacheServiceVersion: string = getCacheServiceVersion() + core.debug(`Cache service version: ${cacheServiceVersion}`) + checkPaths(paths) - const cacheServiceVersion: string = getCacheServiceVersion() switch (cacheServiceVersion) { case 'v2': return await restoreCacheV2( @@ -259,7 +261,7 @@ async function restoreCacheV2( if (options?.lookupOnly) { core.info('Lookup only - skipping download') - return request.key + return response.matchedKey } archivePath = path.join( @@ -269,7 +271,11 @@ async function restoreCacheV2( core.debug(`Archive path: ${archivePath}`) core.debug(`Starting download of archive to: ${archivePath}`) - await downloadCacheFile(response.signedDownloadUrl, archivePath) + const downloadResponse = await downloadCacheFile( + response.signedDownloadUrl, + archivePath + ) + core.debug(`Download response status: ${downloadResponse._response.status}`) const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath) core.info( @@ -285,9 +291,15 @@ async function restoreCacheV2( await extractTar(archivePath, compressionMethod) core.info('Cache restored successfully') - return request.key + return response.matchedKey } catch (error) { - throw new Error(`Failed to restore: ${error.message}`) + const typedError = error as Error + if (typedError.name === ValidationError.name) { + throw error + } else { + // Supress all non-validation cache related errors because caching should be optional + core.warning(`Failed to restore: ${(error as Error).message}`) + } } finally { try { if (archivePath) { @@ -297,6 +309,8 @@ async function restoreCacheV2( core.debug(`Failed to delete archive: ${error}`) } } + + return undefined } /** @@ -525,7 +539,13 @@ async function saveCacheV2( cacheId = parseInt(finalizeResponse.entryId) } catch (error) { const typedError = error as Error - core.warning(`Failed to save: ${typedError.message}`) + if (typedError.name === ValidationError.name) { + throw error + } else if (typedError.name === ReserveCacheError.name) { + core.info(`Failed to save: ${typedError.message}`) + } else { + core.warning(`Failed to save: ${typedError.message}`) + } } finally { // Try to delete the archive to save space try { diff --git a/packages/cache/src/generated/results/api/v1/cache.ts b/packages/cache/src/generated/results/api/v1/cache.ts index 0736c7adae..387bbd1509 100644 --- a/packages/cache/src/generated/results/api/v1/cache.ts +++ b/packages/cache/src/generated/results/api/v1/cache.ts @@ -12,7 +12,7 @@ import type { PartialMessage } from "@protobuf-ts/runtime"; import { reflectionMergePartial } from "@protobuf-ts/runtime"; import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; import { MessageType } from "@protobuf-ts/runtime"; -import { Timestamp } from "../../../google/protobuf/timestamp"; +import { CacheEntry } from "../../entities/v1/cacheentry"; import { CacheMetadata } from "../../entities/v1/cachemetadata"; /** * @generated from protobuf message github.actions.results.api.v1.CreateCacheEntryRequest @@ -139,6 +139,12 @@ export interface GetCacheEntryDownloadURLResponse { * @generated from protobuf field: string signed_download_url = 2; */ signedDownloadUrl: string; + /** + * Key or restore key that matches the lookup + * + * @generated from protobuf field: string matched_key = 3; + */ + matchedKey: string; } /** * @generated from protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest @@ -200,62 +206,11 @@ export interface ListCacheEntriesRequest { */ export interface ListCacheEntriesResponse { /** - * @generated from protobuf field: repeated github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry entries = 1; - */ - entries: ListCacheEntriesResponse_CacheEntry[]; -} -/** - * @generated from protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry - */ -export interface ListCacheEntriesResponse_CacheEntry { - /** - * An explicit key for a cache entry - * - * @generated from protobuf field: string key = 1; - */ - key: string; - /** - * SHA256 hex digest of the cache archive - * - * @generated from protobuf field: string hash = 2; - */ - hash: string; - /** - * Cache entry size in bytes - * - * @generated from protobuf field: int64 size_bytes = 3; - */ - sizeBytes: string; - /** - * Access scope + * Cache entries in the defined scope * - * @generated from protobuf field: string scope = 4; + * @generated from protobuf field: repeated github.actions.results.entities.v1.CacheEntry entries = 1; */ - scope: string; - /** - * Version SHA256 hex digest - * - * @generated from protobuf field: string version = 5; - */ - version: string; - /** - * When the cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; - /** - * When the cache entry was last accessed - * - * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; - */ - lastAccessedAt?: Timestamp; - /** - * When the cache entry is set to expire - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; - */ - expiresAt?: Timestamp; + entries: CacheEntry[]; } /** * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryRequest @@ -296,61 +251,12 @@ export interface LookupCacheEntryResponse { * @generated from protobuf field: bool exists = 1; */ exists: boolean; -} -/** - * Matched cache entry metadata - * - * @generated from protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry - */ -export interface LookupCacheEntryResponse_CacheEntry { - /** - * An explicit key for a cache entry - * - * @generated from protobuf field: string key = 1; - */ - key: string; - /** - * SHA256 hex digest of the cache archive - * - * @generated from protobuf field: string hash = 2; - */ - hash: string; /** - * Cache entry size in bytes + * Matched cache entry metadata * - * @generated from protobuf field: int64 size_bytes = 3; + * @generated from protobuf field: github.actions.results.entities.v1.CacheEntry entry = 2; */ - sizeBytes: string; - /** - * Access scope - * - * @generated from protobuf field: string scope = 4; - */ - scope: string; - /** - * Version SHA256 hex digest - * - * @generated from protobuf field: string version = 5; - */ - version: string; - /** - * When the cache entry was created - * - * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; - */ - createdAt?: Timestamp; - /** - * When the cache entry was last accessed - * - * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; - */ - lastAccessedAt?: Timestamp; - /** - * When the cache entry is set to expire - * - * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; - */ - expiresAt?: Timestamp; + entry?: CacheEntry; } // @generated message type with reflection information, may provide speed optimized methods class CreateCacheEntryRequest$Type extends MessageType { @@ -662,11 +568,12 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType): GetCacheEntryDownloadURLResponse { - const message = { ok: false, signedDownloadUrl: "" }; + const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) reflectionMergePartial(this, message, value); @@ -683,6 +590,9 @@ class GetCacheEntryDownloadURLResponse$Type extends MessageType { constructor() { super("github.actions.results.api.v1.ListCacheEntriesResponse", [ - { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ListCacheEntriesResponse_CacheEntry } + { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => CacheEntry } ]); } create(value?: PartialMessage): ListCacheEntriesResponse { @@ -899,8 +812,8 @@ class ListCacheEntriesResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, - { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, - { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): ListCacheEntriesResponse_CacheEntry { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ListCacheEntriesResponse_CacheEntry): ListCacheEntriesResponse_CacheEntry { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string hash */ 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ 4: - message.scope = reader.string(); - break; - case /* string version */ 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ 7: - message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ 8: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: ListCacheEntriesResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string hash = 2; */ - if (message.hash !== "") - writer.tag(2, WireType.LengthDelimited).string(message.hash); - /* int64 size_bytes = 3; */ - if (message.sizeBytes !== "0") - writer.tag(3, WireType.Varint).int64(message.sizeBytes); - /* string scope = 4; */ - if (message.scope !== "") - writer.tag(4, WireType.LengthDelimited).string(message.scope); - /* string version = 5; */ - if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp last_accessed_at = 7; */ - if (message.lastAccessedAt) - Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp expires_at = 8; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse.CacheEntry - */ -export const ListCacheEntriesResponse_CacheEntry = new ListCacheEntriesResponse_CacheEntry$Type(); -// @generated message type with reflection information, may provide speed optimized methods class LookupCacheEntryRequest$Type extends MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryRequest", [ @@ -1095,7 +912,8 @@ export const LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); class LookupCacheEntryResponse$Type extends MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryResponse", [ - { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "entry", kind: "message", T: () => CacheEntry } ]); } create(value?: PartialMessage): LookupCacheEntryResponse { @@ -1113,6 +931,9 @@ class LookupCacheEntryResponse$Type extends MessageType { - constructor() { - super("github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry", [ - { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, - { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, - { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, - { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, - { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } - ]); - } - create(value?: PartialMessage): LookupCacheEntryResponse_CacheEntry { - const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; - globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== undefined) - reflectionMergePartial(this, message, value); - return message; - } - internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LookupCacheEntryResponse_CacheEntry): LookupCacheEntryResponse_CacheEntry { - let message = target ?? this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string key */ 1: - message.key = reader.string(); - break; - case /* string hash */ 2: - message.hash = reader.string(); - break; - case /* int64 size_bytes */ 3: - message.sizeBytes = reader.int64().toString(); - break; - case /* string scope */ 4: - message.scope = reader.string(); - break; - case /* string version */ 5: - message.version = reader.string(); - break; - case /* google.protobuf.Timestamp created_at */ 6: - message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); - break; - case /* google.protobuf.Timestamp last_accessed_at */ 7: - message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); - break; - case /* google.protobuf.Timestamp expires_at */ 8: - message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; - } - internalBinaryWrite(message: LookupCacheEntryResponse_CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { - /* string key = 1; */ - if (message.key !== "") - writer.tag(1, WireType.LengthDelimited).string(message.key); - /* string hash = 2; */ - if (message.hash !== "") - writer.tag(2, WireType.LengthDelimited).string(message.hash); - /* int64 size_bytes = 3; */ - if (message.sizeBytes !== "0") - writer.tag(3, WireType.Varint).int64(message.sizeBytes); - /* string scope = 4; */ - if (message.scope !== "") - writer.tag(4, WireType.LengthDelimited).string(message.scope); - /* string version = 5; */ - if (message.version !== "") - writer.tag(5, WireType.LengthDelimited).string(message.version); - /* google.protobuf.Timestamp created_at = 6; */ - if (message.createdAt) - Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp last_accessed_at = 7; */ - if (message.lastAccessedAt) - Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); - /* google.protobuf.Timestamp expires_at = 8; */ - if (message.expiresAt) - Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } -} -/** - * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse.CacheEntry - */ -export const LookupCacheEntryResponse_CacheEntry = new LookupCacheEntryResponse_CacheEntry$Type(); /** * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService */ diff --git a/packages/cache/src/generated/results/entities/v1/cacheentry.ts b/packages/cache/src/generated/results/entities/v1/cacheentry.ts new file mode 100644 index 0000000000..b55b4afaae --- /dev/null +++ b/packages/cache/src/generated/results/entities/v1/cacheentry.ts @@ -0,0 +1,163 @@ +// @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies +// @generated from protobuf file "results/entities/v1/cacheentry.proto" (package "github.actions.results.entities.v1", syntax proto3) +// tslint:disable +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Timestamp } from "../../../google/protobuf/timestamp"; +/** + * @generated from protobuf message github.actions.results.entities.v1.CacheEntry + */ +export interface CacheEntry { + /** + * An explicit key for a cache entry + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * SHA256 hex digest of the cache archive + * + * @generated from protobuf field: string hash = 2; + */ + hash: string; + /** + * Cache entry size in bytes + * + * @generated from protobuf field: int64 size_bytes = 3; + */ + sizeBytes: string; + /** + * Access scope + * + * @generated from protobuf field: string scope = 4; + */ + scope: string; + /** + * Version SHA256 hex digest + * + * @generated from protobuf field: string version = 5; + */ + version: string; + /** + * When the cache entry was created + * + * @generated from protobuf field: google.protobuf.Timestamp created_at = 6; + */ + createdAt?: Timestamp; + /** + * When the cache entry was last accessed + * + * @generated from protobuf field: google.protobuf.Timestamp last_accessed_at = 7; + */ + lastAccessedAt?: Timestamp; + /** + * When the cache entry is set to expire + * + * @generated from protobuf field: google.protobuf.Timestamp expires_at = 8; + */ + expiresAt?: Timestamp; +} +// @generated message type with reflection information, may provide speed optimized methods +class CacheEntry$Type extends MessageType { + constructor() { + super("github.actions.results.entities.v1.CacheEntry", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, + { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "created_at", kind: "message", T: () => Timestamp }, + { no: 7, name: "last_accessed_at", kind: "message", T: () => Timestamp }, + { no: 8, name: "expires_at", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): CacheEntry { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CacheEntry): CacheEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string hash */ 2: + message.hash = reader.string(); + break; + case /* int64 size_bytes */ 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string scope */ 4: + message.scope = reader.string(); + break; + case /* string version */ 5: + message.version = reader.string(); + break; + case /* google.protobuf.Timestamp created_at */ 6: + message.createdAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + case /* google.protobuf.Timestamp last_accessed_at */ 7: + message.lastAccessedAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); + break; + case /* google.protobuf.Timestamp expires_at */ 8: + message.expiresAt = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CacheEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string hash = 2; */ + if (message.hash !== "") + writer.tag(2, WireType.LengthDelimited).string(message.hash); + /* int64 size_bytes = 3; */ + if (message.sizeBytes !== "0") + writer.tag(3, WireType.Varint).int64(message.sizeBytes); + /* string scope = 4; */ + if (message.scope !== "") + writer.tag(4, WireType.LengthDelimited).string(message.scope); + /* string version = 5; */ + if (message.version !== "") + writer.tag(5, WireType.LengthDelimited).string(message.version); + /* google.protobuf.Timestamp created_at = 6; */ + if (message.createdAt) + Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp last_accessed_at = 7; */ + if (message.lastAccessedAt) + Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Timestamp expires_at = 8; */ + if (message.expiresAt) + Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry + */ +export const CacheEntry = new CacheEntry$Type(); diff --git a/packages/cache/src/internal/blob/download-cache.ts b/packages/cache/src/internal/blob/download-cache.ts index 807c73a436..e974cb2f1d 100644 --- a/packages/cache/src/internal/blob/download-cache.ts +++ b/packages/cache/src/internal/blob/download-cache.ts @@ -3,13 +3,14 @@ import * as core from '@actions/core' import { BlobClient, BlockBlobClient, - BlobDownloadOptions + BlobDownloadOptions, + BlobDownloadResponseParsed } from '@azure/storage-blob' export async function downloadCacheFile( signedUploadURL: string, archivePath: string -): Promise<{}> { +): Promise { const downloadOptions: BlobDownloadOptions = { maxRetryRequests: 5 }