mirror of
https://github.com/actions/cache.git
synced 2025-07-07 20:42:53 +00:00
Merge master into releases/v1
This commit is contained in:
parent
86dff562ab
commit
6491e51b66
18 changed files with 2421 additions and 171 deletions
|
@ -1,13 +1,40 @@
|
|||
import * as core from "@actions/core";
|
||||
import * as fs from "fs";
|
||||
|
||||
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
|
||||
import { HttpClient } from "typed-rest-client/HttpClient";
|
||||
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
|
||||
import { RestClient, IRequestOptions } from "typed-rest-client/RestClient";
|
||||
|
||||
import { IRequestOptions, RestClient } from "typed-rest-client/RestClient";
|
||||
import { ArtifactCacheEntry } from "./contracts";
|
||||
|
||||
function getCacheUrl(): string {
|
||||
// Ideally we just use ACTIONS_CACHE_URL
|
||||
const cacheUrl: string = (
|
||||
process.env["ACTIONS_CACHE_URL"] ||
|
||||
process.env["ACTIONS_RUNTIME_URL"] ||
|
||||
""
|
||||
).replace("pipelines", "artifactcache");
|
||||
if (!cacheUrl) {
|
||||
throw new Error(
|
||||
"Cache Service Url not found, unable to restore cache."
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(`Cache Url: ${cacheUrl}`);
|
||||
return cacheUrl;
|
||||
}
|
||||
|
||||
function createAcceptHeader(type: string, apiVersion: string): string {
|
||||
return `${type};api-version=${apiVersion}`;
|
||||
}
|
||||
|
||||
function getRequestOptions(): IRequestOptions {
|
||||
const requestOptions: IRequestOptions = {
|
||||
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
|
||||
};
|
||||
|
||||
return requestOptions;
|
||||
}
|
||||
|
||||
export async function getCacheEntry(
|
||||
keys: string[]
|
||||
): Promise<ArtifactCacheEntry | null> {
|
||||
|
@ -43,16 +70,6 @@ export async function getCacheEntry(
|
|||
return cacheResult;
|
||||
}
|
||||
|
||||
export async function downloadCache(
|
||||
cacheEntry: ArtifactCacheEntry,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createWriteStream(archivePath);
|
||||
const httpClient = new HttpClient("actions/cache");
|
||||
const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!);
|
||||
await pipeResponseToStream(downloadResponse, stream);
|
||||
}
|
||||
|
||||
async function pipeResponseToStream(
|
||||
response: IHttpClientResponse,
|
||||
stream: NodeJS.WritableStream
|
||||
|
@ -64,7 +81,23 @@ async function pipeResponseToStream(
|
|||
});
|
||||
}
|
||||
|
||||
export async function saveCache(stream: NodeJS.ReadableStream, key: string) {
|
||||
export async function downloadCache(
|
||||
cacheEntry: ArtifactCacheEntry,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createWriteStream(archivePath);
|
||||
const httpClient = new HttpClient("actions/cache");
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!);
|
||||
await pipeResponseToStream(downloadResponse, stream);
|
||||
}
|
||||
|
||||
export async function saveCache(
|
||||
key: string,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
|
||||
const cacheUrl = getCacheUrl();
|
||||
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
||||
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
||||
|
@ -93,32 +126,3 @@ export async function saveCache(stream: NodeJS.ReadableStream, key: string) {
|
|||
|
||||
core.info("Cache saved successfully");
|
||||
}
|
||||
|
||||
function getRequestOptions(): IRequestOptions {
|
||||
const requestOptions: IRequestOptions = {
|
||||
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
|
||||
};
|
||||
|
||||
return requestOptions;
|
||||
}
|
||||
|
||||
function createAcceptHeader(type: string, apiVersion: string): string {
|
||||
return `${type};api-version=${apiVersion}`;
|
||||
}
|
||||
|
||||
function getCacheUrl(): string {
|
||||
// Ideally we just use ACTIONS_CACHE_URL
|
||||
let cacheUrl: string = (
|
||||
process.env["ACTIONS_CACHE_URL"] ||
|
||||
process.env["ACTIONS_RUNTIME_URL"] ||
|
||||
""
|
||||
).replace("pipelines", "artifactcache");
|
||||
if (!cacheUrl) {
|
||||
throw new Error(
|
||||
"Cache Service Url not found, unable to restore cache."
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(`Cache Url: ${cacheUrl}`);
|
||||
return cacheUrl;
|
||||
}
|
||||
|
|
|
@ -1,14 +1,20 @@
|
|||
export namespace Inputs {
|
||||
export const Key = "key";
|
||||
export const Path = "path";
|
||||
export const RestoreKeys = "restore-keys";
|
||||
export enum Inputs {
|
||||
Key = "key",
|
||||
Path = "path",
|
||||
RestoreKeys = "restore-keys"
|
||||
}
|
||||
|
||||
export namespace Outputs {
|
||||
export const CacheHit = "cache-hit";
|
||||
export enum Outputs {
|
||||
CacheHit = "cache-hit"
|
||||
}
|
||||
|
||||
export namespace State {
|
||||
export const CacheKey = "CACHE_KEY";
|
||||
export const CacheResult = "CACHE_RESULT";
|
||||
export enum State {
|
||||
CacheKey = "CACHE_KEY",
|
||||
CacheResult = "CACHE_RESULT"
|
||||
}
|
||||
|
||||
export enum Events {
|
||||
Key = "GITHUB_EVENT_NAME",
|
||||
Push = "push",
|
||||
PullRequest = "pull_request"
|
||||
}
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
import * as core from "@actions/core";
|
||||
import { exec } from "@actions/exec";
|
||||
import * as io from "@actions/io";
|
||||
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as cacheHttpClient from "./cacheHttpClient";
|
||||
import { Inputs, State } from "./constants";
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function run() {
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
// Validate inputs, this can cause task failure
|
||||
let cachePath = utils.resolvePath(
|
||||
if (!utils.isValidEvent()) {
|
||||
core.setFailed(
|
||||
`Event Validation Error: The event type ${
|
||||
process.env[Events.Key]
|
||||
} is not supported. Only ${utils
|
||||
.getSupportedEvents()
|
||||
.join(", ")} events are supported at this time.`
|
||||
);
|
||||
}
|
||||
|
||||
const cachePath = utils.resolvePath(
|
||||
core.getInput(Inputs.Path, { required: true })
|
||||
);
|
||||
core.debug(`Cache Path: ${cachePath}`);
|
||||
|
@ -60,7 +67,7 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
|
||||
let archivePath = path.join(
|
||||
const archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
"cache.tgz"
|
||||
);
|
||||
|
@ -72,26 +79,33 @@ async function run() {
|
|||
// Download the cache from the cache entry
|
||||
await cacheHttpClient.downloadCache(cacheEntry, archivePath);
|
||||
|
||||
io.mkdirP(cachePath);
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
)} MB (${archiveFileSize} B)`
|
||||
);
|
||||
|
||||
// Create directory to extract tar into
|
||||
await io.mkdirP(cachePath);
|
||||
|
||||
// http://man7.org/linux/man-pages/man1/tar.1.html
|
||||
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
|
||||
const args = ["-xz"];
|
||||
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
if (IS_WINDOWS) {
|
||||
args.push("--force-local");
|
||||
archivePath = archivePath.replace(/\\/g, "/");
|
||||
cachePath = cachePath.replace(/\\/g, "/");
|
||||
}
|
||||
args.push(...["-f", archivePath, "-C", cachePath]);
|
||||
const args = IS_WINDOWS
|
||||
? [
|
||||
"-xz",
|
||||
"--force-local",
|
||||
"-f",
|
||||
archivePath.replace(/\\/g, "/"),
|
||||
"-C",
|
||||
cachePath.replace(/\\/g, "/")
|
||||
]
|
||||
: ["-xz", "-f", archivePath, "-C", cachePath];
|
||||
|
||||
const tarPath = await io.which("tar", true);
|
||||
core.debug(`Tar Path: ${tarPath}`);
|
||||
|
||||
const archiveFileSize = fs.statSync(archivePath).size;
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
|
||||
await exec(`"${tarPath}"`, args);
|
||||
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(
|
||||
|
|
37
src/save.ts
37
src/save.ts
|
@ -1,15 +1,12 @@
|
|||
import * as core from "@actions/core";
|
||||
import { exec } from "@actions/exec";
|
||||
|
||||
import * as io from "@actions/io";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import * as cacheHttpClient from "./cacheHttpClient";
|
||||
import { Inputs, State } from "./constants";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function run() {
|
||||
async function run(): Promise<void> {
|
||||
try {
|
||||
const state = utils.getCacheState();
|
||||
|
||||
|
@ -27,12 +24,12 @@ async function run() {
|
|||
return;
|
||||
}
|
||||
|
||||
let cachePath = utils.resolvePath(
|
||||
const cachePath = utils.resolvePath(
|
||||
core.getInput(Inputs.Path, { required: true })
|
||||
);
|
||||
core.debug(`Cache Path: ${cachePath}`);
|
||||
|
||||
let archivePath = path.join(
|
||||
const archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
"cache.tgz"
|
||||
);
|
||||
|
@ -40,32 +37,36 @@ async function run() {
|
|||
|
||||
// http://man7.org/linux/man-pages/man1/tar.1.html
|
||||
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
|
||||
const args = ["-cz"];
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
if (IS_WINDOWS) {
|
||||
args.push("--force-local");
|
||||
archivePath = archivePath.replace(/\\/g, "/");
|
||||
cachePath = cachePath.replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
args.push(...["-f", archivePath, "-C", cachePath, "."]);
|
||||
const args = IS_WINDOWS
|
||||
? [
|
||||
"-cz",
|
||||
"--force-local",
|
||||
"-f",
|
||||
archivePath.replace(/\\/g, "/"),
|
||||
"-C",
|
||||
cachePath.replace(/\\/g, "/"),
|
||||
"."
|
||||
]
|
||||
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
|
||||
|
||||
const tarPath = await io.which("tar", true);
|
||||
core.debug(`Tar Path: ${tarPath}`);
|
||||
await exec(`"${tarPath}"`, args);
|
||||
|
||||
const fileSizeLimit = 400 * 1024 * 1024; // 400MB
|
||||
const archiveFileSize = fs.statSync(archivePath).size;
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
core.warning(
|
||||
`Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.`
|
||||
`Cache size of ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
)} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
await cacheHttpClient.saveCache(stream, primaryKey);
|
||||
await cacheHttpClient.saveCache(primaryKey, archivePath);
|
||||
} catch (error) {
|
||||
core.warning(error.message);
|
||||
}
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import * as core from "@actions/core";
|
||||
import * as io from "@actions/io";
|
||||
import * as fs from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
import * as uuidV4 from "uuid/v4";
|
||||
|
||||
import { Outputs, State } from "../constants";
|
||||
import { Events, Outputs, State } from "../constants";
|
||||
import { ArtifactCacheEntry } from "../contracts";
|
||||
|
||||
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
|
||||
|
@ -32,6 +33,10 @@ export async function createTempDirectory(): Promise<string> {
|
|||
return dest;
|
||||
}
|
||||
|
||||
export function getArchiveFileSize(path: string): number {
|
||||
return fs.statSync(path).size;
|
||||
}
|
||||
|
||||
export function isExactKeyMatch(
|
||||
key: string,
|
||||
cacheResult?: ArtifactCacheEntry
|
||||
|
@ -45,10 +50,18 @@ export function isExactKeyMatch(
|
|||
);
|
||||
}
|
||||
|
||||
export function setCacheState(state: ArtifactCacheEntry): void {
|
||||
core.saveState(State.CacheResult, JSON.stringify(state));
|
||||
}
|
||||
|
||||
export function setCacheHitOutput(isCacheHit: boolean): void {
|
||||
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
|
||||
}
|
||||
|
||||
export function setOutputAndState(
|
||||
key: string,
|
||||
cacheResult?: ArtifactCacheEntry
|
||||
) {
|
||||
): void {
|
||||
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
|
||||
// Store the cache result if it exists
|
||||
cacheResult && setCacheState(cacheResult);
|
||||
|
@ -57,15 +70,11 @@ export function setOutputAndState(
|
|||
export function getCacheState(): ArtifactCacheEntry | undefined {
|
||||
const stateData = core.getState(State.CacheResult);
|
||||
core.debug(`State: ${stateData}`);
|
||||
return (stateData && JSON.parse(stateData)) as ArtifactCacheEntry;
|
||||
}
|
||||
if (stateData) {
|
||||
return JSON.parse(stateData) as ArtifactCacheEntry;
|
||||
}
|
||||
|
||||
export function setCacheState(state: ArtifactCacheEntry) {
|
||||
core.saveState(State.CacheResult, JSON.stringify(state));
|
||||
}
|
||||
|
||||
export function setCacheHitOutput(isCacheHit: boolean) {
|
||||
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export function resolvePath(filePath: string): string {
|
||||
|
@ -79,3 +88,15 @@ export function resolvePath(filePath: string): string {
|
|||
|
||||
return path.resolve(filePath);
|
||||
}
|
||||
|
||||
export function getSupportedEvents(): string[] {
|
||||
return [Events.Push, Events.PullRequest];
|
||||
}
|
||||
|
||||
// Currently the cache token is only authorized for push and pull_request events
|
||||
// All other events will fail when reading and saving the cache
|
||||
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
|
||||
export function isValidEvent(): boolean {
|
||||
const githubEvent = process.env[Events.Key] || "";
|
||||
return getSupportedEvents().includes(githubEvent);
|
||||
}
|
||||
|
|
|
@ -1,7 +1,29 @@
|
|||
import { Inputs } from "../constants";
|
||||
|
||||
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67
|
||||
function getInputName(name: string): string {
|
||||
return `INPUT_${name.replace(/ /g, "_").toUpperCase()}`;
|
||||
}
|
||||
|
||||
export function setInput(name: string, value: string) {
|
||||
export function setInput(name: string, value: string): void {
|
||||
process.env[getInputName(name)] = value;
|
||||
}
|
||||
|
||||
interface CacheInput {
|
||||
path: string;
|
||||
key: string;
|
||||
restoreKeys?: string[];
|
||||
}
|
||||
|
||||
export function setInputs(input: CacheInput): void {
|
||||
setInput(Inputs.Path, input.path);
|
||||
setInput(Inputs.Key, input.key);
|
||||
input.restoreKeys &&
|
||||
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
||||
}
|
||||
|
||||
export function clearInputs(): void {
|
||||
delete process.env[getInputName(Inputs.Path)];
|
||||
delete process.env[getInputName(Inputs.Key)];
|
||||
delete process.env[getInputName(Inputs.RestoreKeys)];
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue