Compare commits

..

4 Commits

Author SHA1 Message Date
Daz DeBoer
e88ed3e650 Update README for v2.2.0 2022-06-06 12:34:02 -06:00
Daz DeBoer
de51428ba5 Build outputs 2022-06-06 11:53:30 -06:00
Daz DeBoer
8096e65e0a Better error reporting when file deletion fails
- Include file name in all logging
- Log inital attempts at debug to avoid noise
- Include output of 'jps -lm' when final attempt fails
2022-06-06 11:52:46 -06:00
Daz DeBoer
9cd70b5460 Report cache entry path for duplicate entry save 2022-06-06 08:48:03 -06:00
7 changed files with 81 additions and 29 deletions

View File

@@ -218,15 +218,18 @@ For example, this means that all jobs executing a particular version of the Grad
### Using the caches read-only ### Using the caches read-only
In some circumstances, it makes sense for a Gradle invocation to read any existing cache entries but not to write changes back. By default, the `gradle-build-action` will only write to the cache from Jobs on the default (`main`/`master`) branch.
For example, you may want to write cache entries for builds on your `main` branch, but not for any PR build invocations. Jobs on other branches will read entries from the cache but will not write updated entries.
See [Optimizing cache effectiveness](#optimizing-cache-effectiveness) for a more detailed explanation.
You can enable read-only caching for any of the caches as follows: In some circumstances it makes sense to change this default, and to configure a workflow Job to read existing cache entries but not to write changes back.
You can configure read-only caching for the `gradle-build-action` as follows:
```yaml ```yaml
# Only write to the cache for builds on the 'main' branch. # Only write to the cache for builds on the 'main' and 'release' branches. (Default is 'main' only.)
# Builds on other branches will only read existing entries from the cache. # Builds on other branches will only read existing entries from the cache.
cache-read-only: ${{ github.ref != 'refs/heads/main' }} cache-read-only: ${{ github.ref != 'refs/heads/main' && github.ref != 'refs/heads/release' }}
``` ```
### Gradle User Home cache tuning ### Gradle User Home cache tuning
@@ -272,13 +275,19 @@ Eviction of shared cache entries can reduce cache effectiveness, slowing down yo
There are a number of actions you can take if your cache use is less effective due to entry eviction. There are a number of actions you can take if your cache use is less effective due to entry eviction.
#### Only write to the cache from the default branch #### Select branches that should write to the cache
GitHub cache entries are not shared between builds on different branches. This means that identical cache entries will be stored separately for different branches. GitHub cache entries are not shared between builds on different branches. This means that identical cache entries will be stored separately for different branches.
The exception to the is cache entries for the default (`master`/`main`) branch can be read by actions invoked for other branches. An exception to this is that cache entries for the default (`master`/`main`) branch can be read by actions invoked for other branches.
An easy way to reduce cache usage when you run builds on many different branches is to only permit your default branch to write to the cache, By default, the `gradle-build-action` will only _write_ to the cache for builds run on the default branch.
with all other branch builds using `cache-read-only`. See [Using the caches read-only](#using-the-caches-read-only) for more details. Jobs run on other branches will only read from the cache. In most cases, this is the desired behaviour,
because Jobs run against other branches will benefit from the cache Gradle User Home from `main`,
without writing private cache entries that could lead to evicting shared entries.
If you have other long-lived development branches that would benefit from writing to the cache,
you can configure these by overriding the `cache-read-only` action parameter.
See [Using the caches read-only](#using-the-caches-read-only) for more details.
Similarly, you could use `cache-read-only` for certain jobs in the workflow, and instead have these jobs reuse the cache content from upstream jobs. Similarly, you could use `cache-read-only` for certain jobs in the workflow, and instead have these jobs reuse the cache content from upstream jobs.

28
dist/main/index.js vendored
View File

@@ -65528,6 +65528,7 @@ exports.tryDelete = exports.handleCacheFailure = exports.cacheDebug = exports.sa
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const cache = __importStar(__nccwpck_require__(7799)); const cache = __importStar(__nccwpck_require__(7799));
const github = __importStar(__nccwpck_require__(5438)); const github = __importStar(__nccwpck_require__(5438));
const exec = __importStar(__nccwpck_require__(1514));
const crypto = __importStar(__nccwpck_require__(6113)); const crypto = __importStar(__nccwpck_require__(6113));
const path = __importStar(__nccwpck_require__(1017)); const path = __importStar(__nccwpck_require__(1017));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
@@ -65639,7 +65640,7 @@ function saveCache(cachePath, cacheKey, listener) {
if (error instanceof cache.ReserveCacheError) { if (error instanceof cache.ReserveCacheError) {
listener.markAlreadyExists(cacheKey); listener.markAlreadyExists(cacheKey);
} }
handleCacheFailure(error, `Failed to save cache entry ${cacheKey}`); handleCacheFailure(error, `Failed to save cache entry with path '${cachePath}' and key: ${cacheKey}`);
} }
}); });
} }
@@ -65670,8 +65671,9 @@ function handleCacheFailure(error, message) {
exports.handleCacheFailure = handleCacheFailure; exports.handleCacheFailure = handleCacheFailure;
function tryDelete(file) { function tryDelete(file) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const maxAttempts = 5;
const stat = fs.lstatSync(file); const stat = fs.lstatSync(file);
for (let count = 0; count < 3; count++) { for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try { try {
if (stat.isDirectory()) { if (stat.isDirectory()) {
fs.rmdirSync(file, { recursive: true }); fs.rmdirSync(file, { recursive: true });
@@ -65682,11 +65684,14 @@ function tryDelete(file) {
return; return;
} }
catch (error) { catch (error) {
if (count === 2) { if (attempt === maxAttempts) {
core.warning(`Failed to delete ${file}, which will impact caching.
It is likely locked by another process. Output of 'jps -ml':
${yield getJavaProcesses()}`);
throw error; throw error;
} }
else { else {
core.warning(String(error)); cacheDebug(`Attempt to delete ${file} failed. Will try again.`);
yield delay(1000); yield delay(1000);
} }
} }
@@ -65699,6 +65704,12 @@ function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise(resolve => setTimeout(resolve, ms));
}); });
} }
function getJavaProcesses() {
return __awaiter(this, void 0, void 0, function* () {
const jpsOutput = yield exec.getExecOutput('jps', ['-lm']);
return jpsOutput.stdout;
});
}
/***/ }), /***/ }),
@@ -66381,6 +66392,10 @@ const job_summary_1 = __nccwpck_require__(7345);
const GRADLE_SETUP_VAR = 'GRADLE_BUILD_ACTION_SETUP_COMPLETED'; const GRADLE_SETUP_VAR = 'GRADLE_BUILD_ACTION_SETUP_COMPLETED';
const GRADLE_USER_HOME = 'GRADLE_USER_HOME'; const GRADLE_USER_HOME = 'GRADLE_USER_HOME';
const CACHE_LISTENER = 'CACHE_LISTENER'; const CACHE_LISTENER = 'CACHE_LISTENER';
const JOB_SUMMARY_ENABLED_PARAMETER = 'generate-job-summary';
function shouldGenerateJobSummary() {
return core.getBooleanInput(JOB_SUMMARY_ENABLED_PARAMETER);
}
function setup(buildRootDirectory) { function setup(buildRootDirectory) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const gradleUserHome = yield determineGradleUserHome(buildRootDirectory); const gradleUserHome = yield determineGradleUserHome(buildRootDirectory);
@@ -66399,7 +66414,6 @@ function setup(buildRootDirectory) {
exports.setup = setup; exports.setup = setup;
function complete() { function complete() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
core.info('Inside setupGradle.complete()');
if (!core.getState(GRADLE_SETUP_VAR)) { if (!core.getState(GRADLE_SETUP_VAR)) {
core.info('Gradle setup post-action only performed for first gradle-build-action step in workflow.'); core.info('Gradle setup post-action only performed for first gradle-build-action step in workflow.');
return; return;
@@ -66411,7 +66425,9 @@ function complete() {
const cacheListener = cache_reporting_1.CacheListener.rehydrate(core.getState(CACHE_LISTENER)); const cacheListener = cache_reporting_1.CacheListener.rehydrate(core.getState(CACHE_LISTENER));
const gradleUserHome = core.getState(GRADLE_USER_HOME); const gradleUserHome = core.getState(GRADLE_USER_HOME);
yield caches.save(gradleUserHome, cacheListener); yield caches.save(gradleUserHome, cacheListener);
(0, job_summary_1.writeJobSummary)(buildResults, cacheListener); if (shouldGenerateJobSummary()) {
(0, job_summary_1.writeJobSummary)(buildResults, cacheListener);
}
}); });
} }
exports.complete = complete; exports.complete = complete;

File diff suppressed because one or more lines are too long

28
dist/post/index.js vendored
View File

@@ -64579,6 +64579,7 @@ exports.tryDelete = exports.handleCacheFailure = exports.cacheDebug = exports.sa
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const cache = __importStar(__nccwpck_require__(7799)); const cache = __importStar(__nccwpck_require__(7799));
const github = __importStar(__nccwpck_require__(5438)); const github = __importStar(__nccwpck_require__(5438));
const exec = __importStar(__nccwpck_require__(1514));
const crypto = __importStar(__nccwpck_require__(6113)); const crypto = __importStar(__nccwpck_require__(6113));
const path = __importStar(__nccwpck_require__(1017)); const path = __importStar(__nccwpck_require__(1017));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
@@ -64690,7 +64691,7 @@ function saveCache(cachePath, cacheKey, listener) {
if (error instanceof cache.ReserveCacheError) { if (error instanceof cache.ReserveCacheError) {
listener.markAlreadyExists(cacheKey); listener.markAlreadyExists(cacheKey);
} }
handleCacheFailure(error, `Failed to save cache entry ${cacheKey}`); handleCacheFailure(error, `Failed to save cache entry with path '${cachePath}' and key: ${cacheKey}`);
} }
}); });
} }
@@ -64721,8 +64722,9 @@ function handleCacheFailure(error, message) {
exports.handleCacheFailure = handleCacheFailure; exports.handleCacheFailure = handleCacheFailure;
function tryDelete(file) { function tryDelete(file) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const maxAttempts = 5;
const stat = fs.lstatSync(file); const stat = fs.lstatSync(file);
for (let count = 0; count < 3; count++) { for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try { try {
if (stat.isDirectory()) { if (stat.isDirectory()) {
fs.rmdirSync(file, { recursive: true }); fs.rmdirSync(file, { recursive: true });
@@ -64733,11 +64735,14 @@ function tryDelete(file) {
return; return;
} }
catch (error) { catch (error) {
if (count === 2) { if (attempt === maxAttempts) {
core.warning(`Failed to delete ${file}, which will impact caching.
It is likely locked by another process. Output of 'jps -ml':
${yield getJavaProcesses()}`);
throw error; throw error;
} }
else { else {
core.warning(String(error)); cacheDebug(`Attempt to delete ${file} failed. Will try again.`);
yield delay(1000); yield delay(1000);
} }
} }
@@ -64750,6 +64755,12 @@ function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms)); return new Promise(resolve => setTimeout(resolve, ms));
}); });
} }
function getJavaProcesses() {
return __awaiter(this, void 0, void 0, function* () {
const jpsOutput = yield exec.getExecOutput('jps', ['-lm']);
return jpsOutput.stdout;
});
}
/***/ }), /***/ }),
@@ -65065,6 +65076,10 @@ const job_summary_1 = __nccwpck_require__(7345);
const GRADLE_SETUP_VAR = 'GRADLE_BUILD_ACTION_SETUP_COMPLETED'; const GRADLE_SETUP_VAR = 'GRADLE_BUILD_ACTION_SETUP_COMPLETED';
const GRADLE_USER_HOME = 'GRADLE_USER_HOME'; const GRADLE_USER_HOME = 'GRADLE_USER_HOME';
const CACHE_LISTENER = 'CACHE_LISTENER'; const CACHE_LISTENER = 'CACHE_LISTENER';
const JOB_SUMMARY_ENABLED_PARAMETER = 'generate-job-summary';
function shouldGenerateJobSummary() {
return core.getBooleanInput(JOB_SUMMARY_ENABLED_PARAMETER);
}
function setup(buildRootDirectory) { function setup(buildRootDirectory) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const gradleUserHome = yield determineGradleUserHome(buildRootDirectory); const gradleUserHome = yield determineGradleUserHome(buildRootDirectory);
@@ -65083,7 +65098,6 @@ function setup(buildRootDirectory) {
exports.setup = setup; exports.setup = setup;
function complete() { function complete() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
core.info('Inside setupGradle.complete()');
if (!core.getState(GRADLE_SETUP_VAR)) { if (!core.getState(GRADLE_SETUP_VAR)) {
core.info('Gradle setup post-action only performed for first gradle-build-action step in workflow.'); core.info('Gradle setup post-action only performed for first gradle-build-action step in workflow.');
return; return;
@@ -65095,7 +65109,9 @@ function complete() {
const cacheListener = cache_reporting_1.CacheListener.rehydrate(core.getState(CACHE_LISTENER)); const cacheListener = cache_reporting_1.CacheListener.rehydrate(core.getState(CACHE_LISTENER));
const gradleUserHome = core.getState(GRADLE_USER_HOME); const gradleUserHome = core.getState(GRADLE_USER_HOME);
yield caches.save(gradleUserHome, cacheListener); yield caches.save(gradleUserHome, cacheListener);
(0, job_summary_1.writeJobSummary)(buildResults, cacheListener); if (shouldGenerateJobSummary()) {
(0, job_summary_1.writeJobSummary)(buildResults, cacheListener);
}
}); });
} }
exports.complete = complete; exports.complete = complete;

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,8 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as cache from '@actions/cache' import * as cache from '@actions/cache'
import * as github from '@actions/github' import * as github from '@actions/github'
import * as exec from '@actions/exec'
import * as crypto from 'crypto' import * as crypto from 'crypto'
import * as path from 'path' import * as path from 'path'
import * as fs from 'fs' import * as fs from 'fs'
@@ -162,7 +164,7 @@ export async function saveCache(cachePath: string[], cacheKey: string, listener:
if (error instanceof cache.ReserveCacheError) { if (error instanceof cache.ReserveCacheError) {
listener.markAlreadyExists(cacheKey) listener.markAlreadyExists(cacheKey)
} }
handleCacheFailure(error, `Failed to save cache entry ${cacheKey}`) handleCacheFailure(error, `Failed to save cache entry with path '${cachePath}' and key: ${cacheKey}`)
} }
} }
@@ -195,8 +197,9 @@ export function handleCacheFailure(error: unknown, message: string): void {
* Attempt to delete a file or directory, waiting to allow locks to be released * Attempt to delete a file or directory, waiting to allow locks to be released
*/ */
export async function tryDelete(file: string): Promise<void> { export async function tryDelete(file: string): Promise<void> {
const maxAttempts = 5
const stat = fs.lstatSync(file) const stat = fs.lstatSync(file)
for (let count = 0; count < 3; count++) { for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try { try {
if (stat.isDirectory()) { if (stat.isDirectory()) {
fs.rmdirSync(file, {recursive: true}) fs.rmdirSync(file, {recursive: true})
@@ -205,10 +208,13 @@ export async function tryDelete(file: string): Promise<void> {
} }
return return
} catch (error) { } catch (error) {
if (count === 2) { if (attempt === maxAttempts) {
core.warning(`Failed to delete ${file}, which will impact caching.
It is likely locked by another process. Output of 'jps -ml':
${await getJavaProcesses()}`)
throw error throw error
} else { } else {
core.warning(String(error)) cacheDebug(`Attempt to delete ${file} failed. Will try again.`)
await delay(1000) await delay(1000)
} }
} }
@@ -218,3 +224,8 @@ export async function tryDelete(file: string): Promise<void> {
async function delay(ms: number): Promise<void> { async function delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms)) return new Promise(resolve => setTimeout(resolve, ms))
} }
async function getJavaProcesses(): Promise<string> {
const jpsOutput = await exec.getExecOutput('jps', ['-lm'])
return jpsOutput.stdout
}

View File

@@ -13,7 +13,7 @@ const GRADLE_USER_HOME = 'GRADLE_USER_HOME'
const CACHE_LISTENER = 'CACHE_LISTENER' const CACHE_LISTENER = 'CACHE_LISTENER'
const JOB_SUMMARY_ENABLED_PARAMETER = 'generate-job-summary' const JOB_SUMMARY_ENABLED_PARAMETER = 'generate-job-summary'
function generateJobSummary(): boolean { function shouldGenerateJobSummary(): boolean {
return core.getBooleanInput(JOB_SUMMARY_ENABLED_PARAMETER) return core.getBooleanInput(JOB_SUMMARY_ENABLED_PARAMETER)
} }
@@ -56,7 +56,7 @@ export async function complete(): Promise<void> {
const gradleUserHome = core.getState(GRADLE_USER_HOME) const gradleUserHome = core.getState(GRADLE_USER_HOME)
await caches.save(gradleUserHome, cacheListener) await caches.save(gradleUserHome, cacheListener)
if (generateJobSummary()) { if (shouldGenerateJobSummary()) {
writeJobSummary(buildResults, cacheListener) writeJobSummary(buildResults, cacheListener)
} }
} }