mirror of
https://github.com/actions/setup-go.git
synced 2026-04-05 12:19:24 +00:00
perf: improve setup-go cache for build-cache + use improved version of hash dir based on file's meta
This commit is contained in:
parent
cdcb360436
commit
ed099b7083
7 changed files with 9350 additions and 8919 deletions
|
|
@ -17,6 +17,9 @@ inputs:
|
|||
default: true
|
||||
cache-dependency-path:
|
||||
description: 'Used to specify the path to a dependency file - go.sum'
|
||||
cache-key-prefix:
|
||||
description: 'Used to specify a prefix to the cache key - ex: special-cache'
|
||||
default: ''
|
||||
architecture:
|
||||
description: 'Target architecture for Go to use. Examples: x86, x64. Will use system architecture by default.'
|
||||
outputs:
|
||||
|
|
|
|||
8252
dist/cache-save/index.js
vendored
8252
dist/cache-save/index.js
vendored
File diff suppressed because it is too large
Load diff
9942
dist/setup/index.js
vendored
9942
dist/setup/index.js
vendored
File diff suppressed because it is too large
Load diff
|
|
@ -7,6 +7,7 @@ import fs from 'fs';
|
|||
import {State, Outputs} from './constants';
|
||||
import {PackageManagerInfo} from './package-managers';
|
||||
import {getCacheDirectoryPath, getPackageManagerInfo} from './cache-utils';
|
||||
import {computeMetaHash} from './hashdir';
|
||||
|
||||
export const restoreCache = async (
|
||||
versionSpec: string,
|
||||
|
|
@ -29,14 +30,28 @@ export const restoreCache = async (
|
|||
);
|
||||
}
|
||||
|
||||
let prefixKey = core.getInput('cache-key-prefix');
|
||||
if (prefixKey) {
|
||||
prefixKey += '-';
|
||||
}
|
||||
|
||||
const linuxVersion =
|
||||
process.env.RUNNER_OS === 'Linux' ? `${process.env.ImageOS}-` : '';
|
||||
const primaryKey = `setup-go-${platform}-${linuxVersion}go-${versionSpec}-${fileHash}`;
|
||||
const baseKey = `setup-go-${platform}-${linuxVersion}go-${versionSpec}`;
|
||||
const prefixBaseKey = `${prefixKey}${baseKey}`;
|
||||
core.saveState(State.CachePrefixBaseKey, prefixBaseKey);
|
||||
|
||||
const primaryKey = `${prefixBaseKey}-${fileHash}`;
|
||||
core.debug(`primary key is ${primaryKey}`);
|
||||
|
||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const cacheKey = await cache.restoreCache(cachePaths, primaryKey);
|
||||
const start = Date.now();
|
||||
const cacheKey = await cache.restoreCache(cachePaths, primaryKey, [
|
||||
prefixBaseKey,
|
||||
baseKey
|
||||
]);
|
||||
core.info(`Time taken to restore cache: ${Date.now() - start}ms`);
|
||||
core.setOutput(Outputs.CacheHit, Boolean(cacheKey));
|
||||
|
||||
if (!cacheKey) {
|
||||
|
|
@ -47,6 +62,12 @@ export const restoreCache = async (
|
|||
|
||||
core.saveState(State.CacheMatchedKey, cacheKey);
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
|
||||
if (cachePaths.length > 1) {
|
||||
const buildHash = computeMetaHash([cachePaths[1]]);
|
||||
core.debug(`build hash is ${buildHash}`);
|
||||
core.saveState(State.CacheBuildHash, buildHash);
|
||||
}
|
||||
};
|
||||
|
||||
const findDependencyFile = (packageManager: PackageManagerInfo) => {
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@ import * as cache from '@actions/cache';
|
|||
import fs from 'fs';
|
||||
import {State} from './constants';
|
||||
import {getCacheDirectoryPath, getPackageManagerInfo} from './cache-utils';
|
||||
import {computeMetaHash} from './hashdir';
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
|
|
@ -42,6 +43,7 @@ const cachePackages = async () => {
|
|||
|
||||
const state = core.getState(State.CacheMatchedKey);
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
const prevBuildHash = core.getState(State.CacheBuildHash);
|
||||
|
||||
const packageManagerInfo = await getPackageManagerInfo(packageManager);
|
||||
|
||||
|
|
@ -71,18 +73,23 @@ const cachePackages = async () => {
|
|||
return;
|
||||
}
|
||||
|
||||
if (primaryKey === state) {
|
||||
const buildHash = computeMetaHash([cachePaths[1]]);
|
||||
|
||||
if (primaryKey === state && buildHash === prevBuildHash) {
|
||||
core.info(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
`Cache hit occurred on the primary key ${primaryKey} and build hash ${buildHash}, not saving cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const cacheId = await cache.saveCache(cachePaths, primaryKey);
|
||||
const updatedKey = primaryKey + (buildHash ? `-${buildHash}` : '');
|
||||
const start = Date.now();
|
||||
const cacheId = await cache.saveCache(cachePaths, updatedKey);
|
||||
core.info(`Time taken to save cache: ${Date.now() - start}ms`);
|
||||
if (cacheId === -1) {
|
||||
return;
|
||||
}
|
||||
core.info(`Cache saved with the key: ${primaryKey}`);
|
||||
core.info(`Cache saved with the key: ${updatedKey}`);
|
||||
};
|
||||
|
||||
function logWarning(message: string): void {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
export enum State {
|
||||
CachePrefixBaseKey = 'CACHE_PREFIX_BASE_KEY',
|
||||
CachePrimaryKey = 'CACHE_KEY',
|
||||
CacheBuildHash = 'CACHE_HASH',
|
||||
CacheMatchedKey = 'CACHE_RESULT'
|
||||
}
|
||||
|
||||
|
|
|
|||
30
src/hashdir.ts
Normal file
30
src/hashdir.ts
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
import {Hash, createHash} from 'node:crypto';
|
||||
import {readdirSync, statSync} from 'node:fs';
|
||||
import {join} from 'node:path';
|
||||
|
||||
/**
|
||||
* Creates hash of given files/folders. Used to conditionally deploy custom
|
||||
* resources depending if source files have changed
|
||||
*/
|
||||
export function computeMetaHash(paths: string[], inputHash?: Hash) {
|
||||
const hash = inputHash ? inputHash : createHash('sha1');
|
||||
for (const path of paths) {
|
||||
const statInfo = statSync(path);
|
||||
if (statInfo.isDirectory()) {
|
||||
const directoryEntries = readdirSync(path, {withFileTypes: true});
|
||||
const fullPaths = directoryEntries.map(e => join(path, e.name));
|
||||
// recursively walk sub-folders
|
||||
computeMetaHash(fullPaths, hash);
|
||||
} else {
|
||||
const statInfo = statSync(path);
|
||||
// compute hash string name:size:mtime
|
||||
const fileInfo = `${path}:${statInfo.size}:${statInfo.mtimeMs}`;
|
||||
hash.update(fileInfo);
|
||||
}
|
||||
}
|
||||
// if not being called recursively, get the digest and return it as the hash result
|
||||
if (!inputHash) {
|
||||
return hash.digest('hex');
|
||||
}
|
||||
return;
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue