mirror of
https://github.com/gradle/actions.git
synced 2025-11-26 17:09:10 +08:00
Combine all sources into a sub-directory
This commit is contained in:
27
sources/src/build-results.ts
Normal file
27
sources/src/build-results.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
|
||||
export interface BuildResult {
|
||||
get rootProjectName(): string
|
||||
get rootProjectDir(): string
|
||||
get requestedTasks(): string
|
||||
get gradleVersion(): string
|
||||
get gradleHomeDir(): string
|
||||
get buildFailed(): boolean
|
||||
get buildScanUri(): string
|
||||
get buildScanFailed(): boolean
|
||||
}
|
||||
|
||||
export function loadBuildResults(): BuildResult[] {
|
||||
const buildResultsDir = path.resolve(process.env['RUNNER_TEMP']!, '.build-results')
|
||||
if (!fs.existsSync(buildResultsDir)) {
|
||||
return []
|
||||
}
|
||||
|
||||
return fs.readdirSync(buildResultsDir).map(file => {
|
||||
// Every file in the .build-results dir should be a BuildResults JSON
|
||||
const filePath = path.join(buildResultsDir, file)
|
||||
const content = fs.readFileSync(filePath, 'utf8')
|
||||
return JSON.parse(content) as BuildResult
|
||||
})
|
||||
}
|
||||
33
sources/src/build-scan.ts
Normal file
33
sources/src/build-scan.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import * as core from '@actions/core'
|
||||
import {
|
||||
getBuildScanPublishEnabled,
|
||||
getBuildScanTermsOfServiceUrl,
|
||||
getBuildScanTermsOfServiceAgree
|
||||
} from './input-params'
|
||||
|
||||
export function setup(): void {
|
||||
if (getBuildScanPublishEnabled() && verifyTermsOfServiceAgreement()) {
|
||||
maybeExportVariable('DEVELOCITY_INJECTION_ENABLED', 'true')
|
||||
maybeExportVariable('DEVELOCITY_PLUGIN_VERSION', '3.16.1')
|
||||
maybeExportVariable('DEVELOCITY_CCUD_PLUGIN_VERSION', '1.12.1')
|
||||
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_URL', getBuildScanTermsOfServiceUrl())
|
||||
maybeExportVariable('BUILD_SCAN_TERMS_OF_SERVICE_AGREE', getBuildScanTermsOfServiceAgree())
|
||||
}
|
||||
}
|
||||
|
||||
function verifyTermsOfServiceAgreement(): boolean {
|
||||
if (
|
||||
getBuildScanTermsOfServiceUrl() !== 'https://gradle.com/terms-of-service' ||
|
||||
getBuildScanTermsOfServiceAgree() !== 'yes'
|
||||
) {
|
||||
core.warning(`Terms of service must be agreed in order to publish build scans.`)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
function maybeExportVariable(variableName: string, value: unknown): void {
|
||||
if (!process.env[variableName]) {
|
||||
core.exportVariable(variableName, value)
|
||||
}
|
||||
}
|
||||
274
sources/src/cache-base.ts
Normal file
274
sources/src/cache-base.ts
Normal file
@@ -0,0 +1,274 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as exec from '@actions/exec'
|
||||
import * as glob from '@actions/glob'
|
||||
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import * as params from './input-params'
|
||||
import {CacheListener} from './cache-reporting'
|
||||
import {saveCache, restoreCache, cacheDebug, isCacheDebuggingEnabled, tryDelete, generateCacheKey} from './cache-utils'
|
||||
import {GradleHomeEntryExtractor, ConfigurationCacheEntryExtractor} from './cache-extract-entries'
|
||||
|
||||
const RESTORED_CACHE_KEY_KEY = 'restored-cache-key'
|
||||
|
||||
export const META_FILE_DIR = '.gradle-build-action'
|
||||
|
||||
export class GradleStateCache {
|
||||
private cacheName: string
|
||||
private cacheDescription: string
|
||||
|
||||
protected readonly userHome: string
|
||||
protected readonly gradleUserHome: string
|
||||
|
||||
constructor(userHome: string, gradleUserHome: string) {
|
||||
this.userHome = userHome
|
||||
this.gradleUserHome = gradleUserHome
|
||||
this.cacheName = 'gradle'
|
||||
this.cacheDescription = 'Gradle User Home'
|
||||
}
|
||||
|
||||
init(): void {
|
||||
this.initializeGradleUserHome()
|
||||
|
||||
// Export the GRADLE_ENCRYPTION_KEY variable if provided
|
||||
const encryptionKey = params.getCacheEncryptionKey()
|
||||
if (encryptionKey) {
|
||||
core.exportVariable('GRADLE_ENCRYPTION_KEY', encryptionKey)
|
||||
}
|
||||
}
|
||||
|
||||
cacheOutputExists(): boolean {
|
||||
const cachesDir = path.resolve(this.gradleUserHome, 'caches')
|
||||
if (fs.existsSync(cachesDir)) {
|
||||
cacheDebug(`Cache output exists at ${cachesDir}`)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores the cache entry, finding the closest match to the currently running job.
|
||||
*/
|
||||
async restore(listener: CacheListener): Promise<void> {
|
||||
const entryListener = listener.entry(this.cacheDescription)
|
||||
|
||||
const cacheKey = generateCacheKey(this.cacheName)
|
||||
|
||||
cacheDebug(
|
||||
`Requesting ${this.cacheDescription} with
|
||||
key:${cacheKey.key}
|
||||
restoreKeys:[${cacheKey.restoreKeys}]`
|
||||
)
|
||||
|
||||
const cacheResult = await restoreCache(this.getCachePath(), cacheKey.key, cacheKey.restoreKeys, entryListener)
|
||||
if (!cacheResult) {
|
||||
core.info(`${this.cacheDescription} cache not found. Will initialize empty.`)
|
||||
return
|
||||
}
|
||||
|
||||
core.saveState(RESTORED_CACHE_KEY_KEY, cacheResult.key)
|
||||
|
||||
core.info(`Restored ${this.cacheDescription} from cache key: ${cacheResult.key}`)
|
||||
|
||||
try {
|
||||
await this.afterRestore(listener)
|
||||
} catch (error) {
|
||||
core.warning(`Restore ${this.cacheDescription} failed in 'afterRestore': ${error}`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore any extracted cache entries after the main Gradle User Home entry is restored.
|
||||
*/
|
||||
async afterRestore(listener: CacheListener): Promise<void> {
|
||||
await this.debugReportGradleUserHomeSize('as restored from cache')
|
||||
await new GradleHomeEntryExtractor(this.gradleUserHome).restore(listener)
|
||||
await new ConfigurationCacheEntryExtractor(this.gradleUserHome).restore(listener)
|
||||
await this.debugReportGradleUserHomeSize('after restoring common artifacts')
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves the cache entry based on the current cache key unless the cache was restored with the exact key,
|
||||
* in which case we cannot overwrite it.
|
||||
*
|
||||
* If the cache entry was restored with a partial match on a restore key, then
|
||||
* it is saved with the exact key.
|
||||
*/
|
||||
async save(listener: CacheListener): Promise<void> {
|
||||
const cacheKey = generateCacheKey(this.cacheName).key
|
||||
const restoredCacheKey = core.getState(RESTORED_CACHE_KEY_KEY)
|
||||
const gradleHomeEntryListener = listener.entry(this.cacheDescription)
|
||||
|
||||
if (restoredCacheKey && cacheKey === restoredCacheKey) {
|
||||
core.info(`Cache hit occurred on the cache key ${cacheKey}, not saving cache.`)
|
||||
|
||||
for (const entryListener of listener.cacheEntries) {
|
||||
if (entryListener === gradleHomeEntryListener) {
|
||||
entryListener.markNotSaved('cache key not changed')
|
||||
} else {
|
||||
entryListener.markNotSaved(`referencing '${this.cacheDescription}' cache entry not saved`)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
await this.beforeSave(listener)
|
||||
} catch (error) {
|
||||
core.warning(`Save ${this.cacheDescription} failed in 'beforeSave': ${error}`)
|
||||
return
|
||||
}
|
||||
|
||||
core.info(`Caching ${this.cacheDescription} with cache key: ${cacheKey}`)
|
||||
const cachePath = this.getCachePath()
|
||||
await saveCache(cachePath, cacheKey, gradleHomeEntryListener)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract and save any defined extracted cache entries prior to the main Gradle User Home entry being saved.
|
||||
*/
|
||||
async beforeSave(listener: CacheListener): Promise<void> {
|
||||
await this.debugReportGradleUserHomeSize('before saving common artifacts')
|
||||
await this.deleteExcludedPaths()
|
||||
await Promise.all([
|
||||
new GradleHomeEntryExtractor(this.gradleUserHome).extract(listener),
|
||||
new ConfigurationCacheEntryExtractor(this.gradleUserHome).extract(listener)
|
||||
])
|
||||
await this.debugReportGradleUserHomeSize(
|
||||
"after extracting common artifacts (only 'caches' and 'notifications' will be stored)"
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete any file paths that are excluded by the `gradle-home-cache-excludes` parameter.
|
||||
*/
|
||||
private async deleteExcludedPaths(): Promise<void> {
|
||||
const rawPaths: string[] = params.getCacheExcludes()
|
||||
rawPaths.push('caches/*/cc-keystore')
|
||||
const resolvedPaths = rawPaths.map(x => path.resolve(this.gradleUserHome, x))
|
||||
|
||||
for (const p of resolvedPaths) {
|
||||
cacheDebug(`Removing excluded path: ${p}`)
|
||||
const globber = await glob.create(p, {
|
||||
implicitDescendants: false
|
||||
})
|
||||
|
||||
for (const toDelete of await globber.glob()) {
|
||||
cacheDebug(`Removing excluded file: ${toDelete}`)
|
||||
await tryDelete(toDelete)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the paths within Gradle User Home to cache.
|
||||
* By default, this is the 'caches' and 'notifications' directories,
|
||||
* but this can be overridden by the `gradle-home-cache-includes` parameter.
|
||||
*/
|
||||
protected getCachePath(): string[] {
|
||||
const rawPaths: string[] = params.getCacheIncludes()
|
||||
rawPaths.push(META_FILE_DIR)
|
||||
const resolvedPaths = rawPaths.map(x => this.resolveCachePath(x))
|
||||
cacheDebug(`Using cache paths: ${resolvedPaths}`)
|
||||
return resolvedPaths
|
||||
}
|
||||
|
||||
private resolveCachePath(rawPath: string): string {
|
||||
if (rawPath.startsWith('!')) {
|
||||
const resolved = this.resolveCachePath(rawPath.substring(1))
|
||||
return `!${resolved}`
|
||||
}
|
||||
return path.resolve(this.gradleUserHome, rawPath)
|
||||
}
|
||||
|
||||
private initializeGradleUserHome(): void {
|
||||
// Create a directory for storing action metadata
|
||||
const actionCacheDir = path.resolve(this.gradleUserHome, '.gradle-build-action')
|
||||
fs.mkdirSync(actionCacheDir, {recursive: true})
|
||||
|
||||
this.copyInitScripts()
|
||||
|
||||
// Copy the default toolchain definitions to `~/.m2/toolchains.xml`
|
||||
this.registerToolchains()
|
||||
}
|
||||
|
||||
private copyInitScripts(): void {
|
||||
// Copy init scripts from src/resources to Gradle UserHome
|
||||
const initScriptsDir = path.resolve(this.gradleUserHome, 'init.d')
|
||||
fs.mkdirSync(initScriptsDir, {recursive: true})
|
||||
const initScriptFilenames = [
|
||||
'gradle-build-action.build-result-capture.init.gradle',
|
||||
'gradle-build-action.build-result-capture-service.plugin.groovy',
|
||||
'gradle-build-action.github-dependency-graph.init.gradle',
|
||||
'gradle-build-action.github-dependency-graph-gradle-plugin-apply.groovy',
|
||||
'gradle-build-action.inject-develocity.init.gradle'
|
||||
]
|
||||
for (const initScriptFilename of initScriptFilenames) {
|
||||
const initScriptContent = this.readResourceFileAsString('init-scripts', initScriptFilename)
|
||||
const initScriptPath = path.resolve(initScriptsDir, initScriptFilename)
|
||||
fs.writeFileSync(initScriptPath, initScriptContent)
|
||||
}
|
||||
}
|
||||
|
||||
private registerToolchains(): void {
|
||||
const preInstalledToolchains = this.readResourceFileAsString('toolchains.xml')
|
||||
const m2dir = path.resolve(this.userHome, '.m2')
|
||||
const toolchainXmlTarget = path.resolve(m2dir, 'toolchains.xml')
|
||||
if (!fs.existsSync(toolchainXmlTarget)) {
|
||||
// Write a new toolchains.xml file if it doesn't exist
|
||||
fs.mkdirSync(m2dir, {recursive: true})
|
||||
fs.writeFileSync(toolchainXmlTarget, preInstalledToolchains)
|
||||
|
||||
core.info(`Wrote default JDK locations to ${toolchainXmlTarget}`)
|
||||
} else {
|
||||
// Merge into an existing toolchains.xml file
|
||||
const existingToolchainContent = fs.readFileSync(toolchainXmlTarget, 'utf8')
|
||||
const appendedContent = preInstalledToolchains.split('<toolchains>').pop()!
|
||||
const mergedContent = existingToolchainContent.replace('</toolchains>', appendedContent)
|
||||
|
||||
fs.writeFileSync(toolchainXmlTarget, mergedContent)
|
||||
core.info(`Merged default JDK locations into ${toolchainXmlTarget}`)
|
||||
}
|
||||
}
|
||||
|
||||
private readResourceFileAsString(...paths: string[]): string {
|
||||
// Resolving relative to __dirname will allow node to find the resource at runtime
|
||||
const absolutePath = path.resolve(__dirname, '..', '..', 'sources', 'src', 'resources', ...paths)
|
||||
return fs.readFileSync(absolutePath, 'utf8')
|
||||
}
|
||||
|
||||
/**
|
||||
* When cache debugging is enabled, this method will give a detailed report
|
||||
* of the Gradle User Home contents.
|
||||
*/
|
||||
private async debugReportGradleUserHomeSize(label: string): Promise<void> {
|
||||
if (!isCacheDebuggingEnabled()) {
|
||||
return
|
||||
}
|
||||
if (!fs.existsSync(this.gradleUserHome)) {
|
||||
return
|
||||
}
|
||||
const result = await exec.getExecOutput('du', ['-h', '-c', '-t', '5M'], {
|
||||
cwd: this.gradleUserHome,
|
||||
silent: true,
|
||||
ignoreReturnCode: true
|
||||
})
|
||||
|
||||
core.info(`Gradle User Home (directories >5M): ${label}`)
|
||||
|
||||
core.info(
|
||||
result.stdout
|
||||
.trimEnd()
|
||||
.replace(/\t/g, ' ')
|
||||
.split('\n')
|
||||
.map(it => {
|
||||
return ` ${it}`
|
||||
})
|
||||
.join('\n')
|
||||
)
|
||||
|
||||
core.info('-----------------------')
|
||||
}
|
||||
}
|
||||
69
sources/src/cache-cleaner.ts
Normal file
69
sources/src/cache-cleaner.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as exec from '@actions/exec'
|
||||
import * as glob from '@actions/glob'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
export class CacheCleaner {
|
||||
private readonly gradleUserHome: string
|
||||
private readonly tmpDir: string
|
||||
|
||||
constructor(gradleUserHome: string, tmpDir: string) {
|
||||
this.gradleUserHome = gradleUserHome
|
||||
this.tmpDir = tmpDir
|
||||
}
|
||||
|
||||
async prepare(): Promise<void> {
|
||||
// Reset the file-access journal so that files appear not to have been used recently
|
||||
fs.rmSync(path.resolve(this.gradleUserHome, 'caches/journal-1'), {recursive: true, force: true})
|
||||
fs.mkdirSync(path.resolve(this.gradleUserHome, 'caches/journal-1'), {recursive: true})
|
||||
fs.writeFileSync(
|
||||
path.resolve(this.gradleUserHome, 'caches/journal-1/file-access.properties'),
|
||||
'inceptionTimestamp=0'
|
||||
)
|
||||
|
||||
// Set the modification time of all files to the past: this timestamp is used when there is no matching entry in the journal
|
||||
await this.ageAllFiles()
|
||||
|
||||
// Touch all 'gc' files so that cache cleanup won't run immediately.
|
||||
await this.touchAllFiles('gc.properties')
|
||||
}
|
||||
|
||||
async forceCleanup(): Promise<void> {
|
||||
// Age all 'gc' files so that cache cleanup will run immediately.
|
||||
await this.ageAllFiles('gc.properties')
|
||||
|
||||
// Run a dummy Gradle build to trigger cache cleanup
|
||||
const cleanupProjectDir = path.resolve(this.tmpDir, 'dummy-cleanup-project')
|
||||
fs.mkdirSync(cleanupProjectDir, {recursive: true})
|
||||
fs.writeFileSync(
|
||||
path.resolve(cleanupProjectDir, 'settings.gradle'),
|
||||
'rootProject.name = "dummy-cleanup-project"'
|
||||
)
|
||||
fs.writeFileSync(path.resolve(cleanupProjectDir, 'build.gradle'), 'task("noop") {}')
|
||||
|
||||
const gradleCommand = `gradle -g ${this.gradleUserHome} --no-daemon --build-cache --no-scan --quiet -DGITHUB_DEPENDENCY_GRAPH_ENABLED=false noop`
|
||||
await exec.exec(gradleCommand, [], {
|
||||
cwd: cleanupProjectDir
|
||||
})
|
||||
}
|
||||
|
||||
private async ageAllFiles(fileName = '*'): Promise<void> {
|
||||
core.debug(`Aging all files in Gradle User Home with name ${fileName}`)
|
||||
await this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date(0))
|
||||
}
|
||||
|
||||
private async touchAllFiles(fileName = '*'): Promise<void> {
|
||||
core.debug(`Touching all files in Gradle User Home with name ${fileName}`)
|
||||
await this.setUtimes(`${this.gradleUserHome}/**/${fileName}`, new Date())
|
||||
}
|
||||
|
||||
private async setUtimes(pattern: string, timestamp: Date): Promise<void> {
|
||||
const globber = await glob.create(pattern, {
|
||||
implicitDescendants: false
|
||||
})
|
||||
for await (const file of globber.globGenerator()) {
|
||||
fs.utimesSync(file, timestamp, timestamp)
|
||||
}
|
||||
}
|
||||
}
|
||||
467
sources/src/cache-extract-entries.ts
Normal file
467
sources/src/cache-extract-entries.ts
Normal file
@@ -0,0 +1,467 @@
|
||||
import path from 'path'
|
||||
import fs from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import * as glob from '@actions/glob'
|
||||
import * as semver from 'semver'
|
||||
|
||||
import * as params from './input-params'
|
||||
|
||||
import {META_FILE_DIR} from './cache-base'
|
||||
import {CacheEntryListener, CacheListener} from './cache-reporting'
|
||||
import {cacheDebug, getCacheKeyPrefix, hashFileNames, restoreCache, saveCache, tryDelete} from './cache-utils'
|
||||
import {BuildResult, loadBuildResults} from './build-results'
|
||||
|
||||
const SKIP_RESTORE_VAR = 'GRADLE_BUILD_ACTION_SKIP_RESTORE'
|
||||
|
||||
/**
|
||||
* Represents the result of attempting to load or store an extracted cache entry.
|
||||
* An undefined cacheKey indicates that the operation did not succeed.
|
||||
* The collected results are then used to populate the `cache-metadata.json` file for later use.
|
||||
*/
|
||||
class ExtractedCacheEntry {
|
||||
artifactType: string
|
||||
pattern: string
|
||||
cacheKey: string | undefined
|
||||
|
||||
constructor(artifactType: string, pattern: string, cacheKey: string | undefined) {
|
||||
this.artifactType = artifactType
|
||||
this.pattern = pattern
|
||||
this.cacheKey = cacheKey
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Representation of all of the extracted cache entries for this Gradle User Home.
|
||||
* This object is persisted to JSON file in the Gradle User Home directory for storing,
|
||||
* and subsequently used to restore the Gradle User Home.
|
||||
*/
|
||||
class ExtractedCacheEntryMetadata {
|
||||
entries: ExtractedCacheEntry[] = []
|
||||
}
|
||||
|
||||
/**
|
||||
* The specification for a type of extracted cache entry.
|
||||
*/
|
||||
class ExtractedCacheEntryDefinition {
|
||||
artifactType: string
|
||||
pattern: string
|
||||
bundle: boolean
|
||||
uniqueFileNames = true
|
||||
notCacheableReason: string | undefined
|
||||
|
||||
constructor(artifactType: string, pattern: string, bundle: boolean) {
|
||||
this.artifactType = artifactType
|
||||
this.pattern = pattern
|
||||
this.bundle = bundle
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicate that the file names matching the cache entry pattern are NOT sufficient to uniquely identify the contents.
|
||||
* If the file names are sufficient, then we use a hash of the file names to identify the entry.
|
||||
* With non-unique-file-names, we hash the file contents to identify the cache entry.
|
||||
*/
|
||||
withNonUniqueFileNames(): ExtractedCacheEntryDefinition {
|
||||
this.uniqueFileNames = false
|
||||
return this
|
||||
}
|
||||
|
||||
/**
|
||||
* Specify that the cache entry, should not be saved for some reason, even though the contents exist.
|
||||
* This is used to prevent configuration-cache entries being cached when they were generated by Gradle < 8.6,
|
||||
*/
|
||||
notCacheableBecause(reason: string): ExtractedCacheEntryDefinition {
|
||||
this.notCacheableReason = reason
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Caches and restores the entire Gradle User Home directory, extracting entries containing common artifacts
|
||||
* for more efficient storage.
|
||||
*/
|
||||
abstract class AbstractEntryExtractor {
|
||||
protected readonly gradleUserHome: string
|
||||
private extractorName: string
|
||||
|
||||
constructor(gradleUserHome: string, extractorName: string) {
|
||||
this.gradleUserHome = gradleUserHome
|
||||
this.extractorName = extractorName
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores any artifacts that were cached separately, based on the information in the `cache-metadata.json` file.
|
||||
* Each extracted cache entry is restored in parallel, except when debugging is enabled.
|
||||
*/
|
||||
async restore(listener: CacheListener): Promise<void> {
|
||||
const previouslyExtractedCacheEntries = this.loadExtractedCacheEntries()
|
||||
|
||||
const processes: Promise<ExtractedCacheEntry>[] = []
|
||||
|
||||
for (const cacheEntry of previouslyExtractedCacheEntries) {
|
||||
const artifactType = cacheEntry.artifactType
|
||||
const entryListener = listener.entry(cacheEntry.pattern)
|
||||
|
||||
// Handle case where the extracted-cache-entry definitions have been changed
|
||||
const skipRestore = process.env[SKIP_RESTORE_VAR] || ''
|
||||
if (skipRestore.includes(artifactType)) {
|
||||
core.info(`Not restoring extracted cache entry for ${artifactType}`)
|
||||
entryListener.markRequested('SKIP_RESTORE')
|
||||
} else {
|
||||
processes.push(
|
||||
this.awaitForDebugging(
|
||||
this.restoreExtractedCacheEntry(
|
||||
artifactType,
|
||||
cacheEntry.cacheKey!,
|
||||
cacheEntry.pattern,
|
||||
entryListener
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
this.saveMetadataForCacheResults(await Promise.all(processes))
|
||||
}
|
||||
|
||||
private async restoreExtractedCacheEntry(
|
||||
artifactType: string,
|
||||
cacheKey: string,
|
||||
pattern: string,
|
||||
listener: CacheEntryListener
|
||||
): Promise<ExtractedCacheEntry> {
|
||||
const restoredEntry = await restoreCache([pattern], cacheKey, [], listener)
|
||||
if (restoredEntry) {
|
||||
core.info(`Restored ${artifactType} with key ${cacheKey} to ${pattern}`)
|
||||
return new ExtractedCacheEntry(artifactType, pattern, cacheKey)
|
||||
} else {
|
||||
core.info(`Did not restore ${artifactType} with key ${cacheKey} to ${pattern}`)
|
||||
return new ExtractedCacheEntry(artifactType, pattern, undefined)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves any artifacts that are configured to be cached separately, based on the extracted cache entry definitions.
|
||||
* Each entry is extracted and saved in parallel, except when debugging is enabled.
|
||||
*/
|
||||
async extract(listener: CacheListener): Promise<void> {
|
||||
// Load the cache entry definitions (from config) and the previously restored entries (from persisted metadata file)
|
||||
const cacheEntryDefinitions = this.getExtractedCacheEntryDefinitions()
|
||||
cacheDebug(
|
||||
`Extracting cache entries for ${this.extractorName}: ${JSON.stringify(cacheEntryDefinitions, null, 2)}`
|
||||
)
|
||||
|
||||
const previouslyRestoredEntries = this.loadExtractedCacheEntries()
|
||||
const cacheActions: Promise<ExtractedCacheEntry>[] = []
|
||||
|
||||
// For each cache entry definition, determine if it has already been restored, and if not, extract it
|
||||
for (const cacheEntryDefinition of cacheEntryDefinitions) {
|
||||
const artifactType = cacheEntryDefinition.artifactType
|
||||
const pattern = cacheEntryDefinition.pattern
|
||||
|
||||
if (cacheEntryDefinition.notCacheableReason) {
|
||||
listener.entry(pattern).markNotSaved(cacheEntryDefinition.notCacheableReason)
|
||||
continue
|
||||
}
|
||||
|
||||
// Find all matching files for this cache entry definition
|
||||
const globber = await glob.create(pattern, {
|
||||
implicitDescendants: false
|
||||
})
|
||||
const matchingFiles = await globber.glob()
|
||||
|
||||
if (matchingFiles.length === 0) {
|
||||
cacheDebug(`No files found to cache for ${artifactType}`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (cacheEntryDefinition.bundle) {
|
||||
// For an extracted "bundle", use the defined pattern and cache all matching files in a single entry.
|
||||
cacheActions.push(
|
||||
this.awaitForDebugging(
|
||||
this.saveExtractedCacheEntry(
|
||||
matchingFiles,
|
||||
artifactType,
|
||||
pattern,
|
||||
cacheEntryDefinition.uniqueFileNames,
|
||||
previouslyRestoredEntries,
|
||||
listener.entry(pattern)
|
||||
)
|
||||
)
|
||||
)
|
||||
} else {
|
||||
// Otherwise cache each matching file in a separate entry, using the complete file path as the cache pattern.
|
||||
for (const cacheFile of matchingFiles) {
|
||||
cacheActions.push(
|
||||
this.awaitForDebugging(
|
||||
this.saveExtractedCacheEntry(
|
||||
[cacheFile],
|
||||
artifactType,
|
||||
cacheFile,
|
||||
cacheEntryDefinition.uniqueFileNames,
|
||||
previouslyRestoredEntries,
|
||||
listener.entry(cacheFile)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.saveMetadataForCacheResults(await Promise.all(cacheActions))
|
||||
}
|
||||
|
||||
private async saveExtractedCacheEntry(
|
||||
matchingFiles: string[],
|
||||
artifactType: string,
|
||||
pattern: string,
|
||||
uniqueFileNames: boolean,
|
||||
previouslyRestoredEntries: ExtractedCacheEntry[],
|
||||
entryListener: CacheEntryListener
|
||||
): Promise<ExtractedCacheEntry> {
|
||||
const cacheKey = uniqueFileNames
|
||||
? this.createCacheKeyFromFileNames(artifactType, matchingFiles)
|
||||
: await this.createCacheKeyFromFileContents(artifactType, pattern)
|
||||
const previouslyRestoredKey = previouslyRestoredEntries.find(
|
||||
x => x.artifactType === artifactType && x.pattern === pattern
|
||||
)?.cacheKey
|
||||
|
||||
if (previouslyRestoredKey === cacheKey) {
|
||||
cacheDebug(`No change to previously restored ${artifactType}. Not saving.`)
|
||||
entryListener.markNotSaved('contents unchanged')
|
||||
} else {
|
||||
core.info(`Caching ${artifactType} with path '${pattern}' and cache key: ${cacheKey}`)
|
||||
await saveCache([pattern], cacheKey, entryListener)
|
||||
}
|
||||
|
||||
for (const file of matchingFiles) {
|
||||
tryDelete(file)
|
||||
}
|
||||
|
||||
return new ExtractedCacheEntry(artifactType, pattern, cacheKey)
|
||||
}
|
||||
|
||||
protected createCacheKeyFromFileNames(artifactType: string, files: string[]): string {
|
||||
const cacheKeyPrefix = getCacheKeyPrefix()
|
||||
const relativeFiles = files.map(x => path.relative(this.gradleUserHome, x))
|
||||
const key = hashFileNames(relativeFiles)
|
||||
|
||||
cacheDebug(`Generating cache key for ${artifactType} from file names: ${relativeFiles}`)
|
||||
|
||||
return `${cacheKeyPrefix}${artifactType}-${key}`
|
||||
}
|
||||
|
||||
protected async createCacheKeyFromFileContents(artifactType: string, pattern: string): Promise<string> {
|
||||
const cacheKeyPrefix = getCacheKeyPrefix()
|
||||
const key = await glob.hashFiles(pattern)
|
||||
|
||||
cacheDebug(`Generating cache key for ${artifactType} from files matching: ${pattern}`)
|
||||
|
||||
return `${cacheKeyPrefix}${artifactType}-${key}`
|
||||
}
|
||||
|
||||
// Run actions sequentially if debugging is enabled
|
||||
private async awaitForDebugging(p: Promise<ExtractedCacheEntry>): Promise<ExtractedCacheEntry> {
|
||||
if (params.isCacheDebuggingEnabled()) {
|
||||
await p
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
/**
|
||||
* Load information about the extracted cache entries previously restored/saved. This is loaded from the 'cache-metadata.json' file.
|
||||
*/
|
||||
protected loadExtractedCacheEntries(): ExtractedCacheEntry[] {
|
||||
const cacheMetadataFile = this.getCacheMetadataFile()
|
||||
if (!fs.existsSync(cacheMetadataFile)) {
|
||||
return []
|
||||
}
|
||||
|
||||
const filedata = fs.readFileSync(cacheMetadataFile, 'utf-8')
|
||||
cacheDebug(`Loaded cache metadata for ${this.extractorName}: ${filedata}`)
|
||||
const extractedCacheEntryMetadata = JSON.parse(filedata) as ExtractedCacheEntryMetadata
|
||||
return extractedCacheEntryMetadata.entries
|
||||
}
|
||||
|
||||
/**
|
||||
* Saves information about the extracted cache entries into the 'cache-metadata.json' file.
|
||||
*/
|
||||
protected saveMetadataForCacheResults(results: ExtractedCacheEntry[]): void {
|
||||
const extractedCacheEntryMetadata = new ExtractedCacheEntryMetadata()
|
||||
extractedCacheEntryMetadata.entries = results.filter(x => x.cacheKey !== undefined)
|
||||
|
||||
const filedata = JSON.stringify(extractedCacheEntryMetadata)
|
||||
cacheDebug(`Saving cache metadata for ${this.extractorName}: ${filedata}`)
|
||||
|
||||
fs.writeFileSync(this.getCacheMetadataFile(), filedata, 'utf-8')
|
||||
}
|
||||
|
||||
private getCacheMetadataFile(): string {
|
||||
const actionMetadataDirectory = path.resolve(this.gradleUserHome, META_FILE_DIR)
|
||||
fs.mkdirSync(actionMetadataDirectory, {recursive: true})
|
||||
|
||||
return path.resolve(actionMetadataDirectory, `${this.extractorName}-entry-metadata.json`)
|
||||
}
|
||||
|
||||
protected abstract getExtractedCacheEntryDefinitions(): ExtractedCacheEntryDefinition[]
|
||||
}
|
||||
|
||||
export class GradleHomeEntryExtractor extends AbstractEntryExtractor {
|
||||
constructor(gradleUserHome: string) {
|
||||
super(gradleUserHome, 'gradle-home')
|
||||
}
|
||||
|
||||
async extract(listener: CacheListener): Promise<void> {
|
||||
await this.deleteWrapperZips()
|
||||
return super.extract(listener)
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete any downloaded wrapper zip files that are not needed after extraction.
|
||||
* These files are cleaned up by Gradle >= 7.5, but for older versions we remove them manually.
|
||||
*/
|
||||
private async deleteWrapperZips(): Promise<void> {
|
||||
const wrapperZips = path.resolve(this.gradleUserHome, 'wrapper/dists/*/*/*.zip')
|
||||
const globber = await glob.create(wrapperZips, {
|
||||
implicitDescendants: false
|
||||
})
|
||||
|
||||
for (const wrapperZip of await globber.glob()) {
|
||||
cacheDebug(`Deleting wrapper zip: ${wrapperZip}`)
|
||||
await tryDelete(wrapperZip)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the extracted cache entry definitions, which determine which artifacts will be cached
|
||||
* separately from the rest of the Gradle User Home cache entry.
|
||||
*/
|
||||
protected getExtractedCacheEntryDefinitions(): ExtractedCacheEntryDefinition[] {
|
||||
const entryDefinition = (
|
||||
artifactType: string,
|
||||
patterns: string[],
|
||||
bundle: boolean
|
||||
): ExtractedCacheEntryDefinition => {
|
||||
const resolvedPatterns = patterns
|
||||
.map(x => {
|
||||
const isDir = x.endsWith('/')
|
||||
const resolved = path.resolve(this.gradleUserHome, x)
|
||||
return isDir ? `${resolved}/` : resolved // Restore trailing '/' removed by path.resolve()
|
||||
})
|
||||
.join('\n')
|
||||
return new ExtractedCacheEntryDefinition(artifactType, resolvedPatterns, bundle)
|
||||
}
|
||||
|
||||
return [
|
||||
entryDefinition('generated-gradle-jars', ['caches/*/generated-gradle-jars/*.jar'], false),
|
||||
entryDefinition('wrapper-zips', ['wrapper/dists/*/*/'], false), // Each wrapper directory cached separately
|
||||
entryDefinition('java-toolchains', ['jdks/*/'], false), // Each extracted JDK cached separately
|
||||
entryDefinition('dependencies', ['caches/modules-*/files-*/*/*/*/*'], true),
|
||||
entryDefinition('instrumented-jars', ['caches/jars-*/*'], true),
|
||||
entryDefinition('kotlin-dsl', ['caches/*/kotlin-dsl/*/*'], true)
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
export class ConfigurationCacheEntryExtractor extends AbstractEntryExtractor {
|
||||
constructor(gradleUserHome: string) {
|
||||
super(gradleUserHome, 'configuration-cache')
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle the case where Gradle User Home has not been fully restored, so that the configuration-cache
|
||||
* entry is not reusable.
|
||||
*/
|
||||
async restore(listener: CacheListener): Promise<void> {
|
||||
if (!listener.fullyRestored) {
|
||||
this.markNotRestored(listener, 'Gradle User Home was not fully restored')
|
||||
return
|
||||
}
|
||||
|
||||
if (!params.getCacheEncryptionKey()) {
|
||||
this.markNotRestored(listener, 'Encryption Key was not provided')
|
||||
return
|
||||
}
|
||||
|
||||
return await super.restore(listener)
|
||||
}
|
||||
|
||||
private markNotRestored(listener: CacheListener, reason: string): void {
|
||||
const cacheEntries = this.loadExtractedCacheEntries()
|
||||
if (cacheEntries.length > 0) {
|
||||
core.info(`Not restoring configuration-cache state, as ${reason}`)
|
||||
for (const cacheEntry of cacheEntries) {
|
||||
listener.entry(cacheEntry.pattern).markNotRestored(reason)
|
||||
}
|
||||
|
||||
// Update the results file based on no entries restored
|
||||
this.saveMetadataForCacheResults([])
|
||||
}
|
||||
}
|
||||
|
||||
async extract(listener: CacheListener): Promise<void> {
|
||||
if (!params.getCacheEncryptionKey()) {
|
||||
const cacheEntryDefinitions = this.getExtractedCacheEntryDefinitions()
|
||||
if (cacheEntryDefinitions.length > 0) {
|
||||
core.info('Not saving configuration-cache state, as no encryption key was provided')
|
||||
for (const cacheEntry of cacheEntryDefinitions) {
|
||||
listener.entry(cacheEntry.pattern).markNotSaved('No encryption key provided')
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
await super.extract(listener)
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract cache entries for the configuration cache in each project.
|
||||
*/
|
||||
protected getExtractedCacheEntryDefinitions(): ExtractedCacheEntryDefinition[] {
|
||||
// Group BuildResult by existing configCacheDir
|
||||
const groupedResults = this.getConfigCacheDirectoriesWithAssociatedBuildResults()
|
||||
|
||||
return Object.entries(groupedResults).map(([configCachePath, pathResults]) => {
|
||||
// Create a entry definition for each unique configuration cache directory
|
||||
const definition = new ExtractedCacheEntryDefinition(
|
||||
'configuration-cache',
|
||||
configCachePath,
|
||||
true
|
||||
).withNonUniqueFileNames()
|
||||
|
||||
// If any associated build result used Gradle < 8.6, then mark it as not cacheable
|
||||
if (
|
||||
pathResults.find(result => {
|
||||
const gradleVersion = semver.coerce(result.gradleVersion)
|
||||
return gradleVersion && semver.lt(gradleVersion, '8.6.0')
|
||||
})
|
||||
) {
|
||||
core.info(
|
||||
`Not saving config-cache data for ${configCachePath}. Configuration cache data is only saved for Gradle 8.6+`
|
||||
)
|
||||
definition.notCacheableBecause('Configuration cache data only saved for Gradle 8.6+')
|
||||
}
|
||||
return definition
|
||||
})
|
||||
}
|
||||
|
||||
private getConfigCacheDirectoriesWithAssociatedBuildResults(): Record<string, BuildResult[]> {
|
||||
return loadBuildResults().reduce(
|
||||
(acc, buildResult) => {
|
||||
// For each build result, find the config-cache dir
|
||||
const configCachePath = path.resolve(buildResult.rootProjectDir, '.gradle/configuration-cache')
|
||||
// Ignore case where config-cache dir doesn't exist
|
||||
if (!fs.existsSync(configCachePath)) {
|
||||
return acc
|
||||
}
|
||||
|
||||
// Group by unique config cache directories and collect associated build results
|
||||
if (!acc[configCachePath]) {
|
||||
acc[configCachePath] = []
|
||||
}
|
||||
acc[configCachePath].push(buildResult)
|
||||
return acc
|
||||
},
|
||||
{} as Record<string, BuildResult[]>
|
||||
)
|
||||
}
|
||||
}
|
||||
220
sources/src/cache-reporting.ts
Normal file
220
sources/src/cache-reporting.ts
Normal file
@@ -0,0 +1,220 @@
|
||||
import * as cache from '@actions/cache'
|
||||
|
||||
/**
|
||||
* Collects information on what entries were saved and restored during the action.
|
||||
* This information is used to generate a summary of the cache usage.
|
||||
*/
|
||||
export class CacheListener {
|
||||
cacheEntries: CacheEntryListener[] = []
|
||||
cacheReadOnly = false
|
||||
cacheWriteOnly = false
|
||||
cacheDisabled = false
|
||||
cacheDisabledReason = 'disabled'
|
||||
|
||||
get fullyRestored(): boolean {
|
||||
return this.cacheEntries.every(x => !x.wasRequestedButNotRestored())
|
||||
}
|
||||
|
||||
get cacheStatus(): string {
|
||||
if (!cache.isFeatureAvailable()) return 'not available'
|
||||
if (this.cacheDisabled) return this.cacheDisabledReason
|
||||
if (this.cacheWriteOnly) return 'write-only'
|
||||
if (this.cacheReadOnly) return 'read-only'
|
||||
return 'enabled'
|
||||
}
|
||||
|
||||
entry(name: string): CacheEntryListener {
|
||||
for (const entry of this.cacheEntries) {
|
||||
if (entry.entryName === name) {
|
||||
return entry
|
||||
}
|
||||
}
|
||||
|
||||
const newEntry = new CacheEntryListener(name)
|
||||
this.cacheEntries.push(newEntry)
|
||||
return newEntry
|
||||
}
|
||||
|
||||
stringify(): string {
|
||||
return JSON.stringify(this)
|
||||
}
|
||||
|
||||
static rehydrate(stringRep: string): CacheListener {
|
||||
if (stringRep === '') {
|
||||
return new CacheListener()
|
||||
}
|
||||
const rehydrated: CacheListener = Object.assign(new CacheListener(), JSON.parse(stringRep))
|
||||
const entries = rehydrated.cacheEntries
|
||||
for (let index = 0; index < entries.length; index++) {
|
||||
const rawEntry = entries[index]
|
||||
entries[index] = Object.assign(new CacheEntryListener(rawEntry.entryName), rawEntry)
|
||||
}
|
||||
return rehydrated
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects information on the state of a single cache entry.
|
||||
*/
|
||||
export class CacheEntryListener {
|
||||
entryName: string
|
||||
requestedKey: string | undefined
|
||||
requestedRestoreKeys: string[] | undefined
|
||||
restoredKey: string | undefined
|
||||
restoredSize: number | undefined
|
||||
notRestored: string | undefined
|
||||
|
||||
savedKey: string | undefined
|
||||
savedSize: number | undefined
|
||||
notSaved: string | undefined
|
||||
|
||||
constructor(entryName: string) {
|
||||
this.entryName = entryName
|
||||
}
|
||||
|
||||
wasRequestedButNotRestored(): boolean {
|
||||
return this.requestedKey !== undefined && this.restoredKey === undefined
|
||||
}
|
||||
|
||||
markRequested(key: string, restoreKeys: string[] = []): CacheEntryListener {
|
||||
this.requestedKey = key
|
||||
this.requestedRestoreKeys = restoreKeys
|
||||
return this
|
||||
}
|
||||
|
||||
markRestored(key: string, size: number | undefined): CacheEntryListener {
|
||||
this.restoredKey = key
|
||||
this.restoredSize = size
|
||||
return this
|
||||
}
|
||||
|
||||
markNotRestored(message: string): CacheEntryListener {
|
||||
this.notRestored = message
|
||||
return this
|
||||
}
|
||||
|
||||
markSaved(key: string, size: number | undefined): CacheEntryListener {
|
||||
this.savedKey = key
|
||||
this.savedSize = size
|
||||
return this
|
||||
}
|
||||
|
||||
markAlreadyExists(key: string): CacheEntryListener {
|
||||
this.savedKey = key
|
||||
this.savedSize = 0
|
||||
return this
|
||||
}
|
||||
|
||||
markNotSaved(message: string): CacheEntryListener {
|
||||
this.notSaved = message
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
export function generateCachingReport(listener: CacheListener): string {
|
||||
const entries = listener.cacheEntries
|
||||
|
||||
return `
|
||||
<details>
|
||||
<summary><h4>Caching for gradle-build-action was ${listener.cacheStatus} - expand for details</h4></summary>
|
||||
${renderEntryTable(entries)}
|
||||
|
||||
<h5>Cache Entry Details</h5>
|
||||
<pre>
|
||||
${renderEntryDetails(listener)}
|
||||
</pre>
|
||||
</details>
|
||||
`
|
||||
}
|
||||
|
||||
function renderEntryTable(entries: CacheEntryListener[]): string {
|
||||
return `
|
||||
<table>
|
||||
<tr><td></td><th>Count</th><th>Total Size (Mb)</th></tr>
|
||||
<tr><td>Entries Restored</td>
|
||||
<td>${getCount(entries, e => e.restoredSize)}</td>
|
||||
<td>${getSize(entries, e => e.restoredSize)}</td>
|
||||
</tr>
|
||||
<tr><td>Entries Saved</td>
|
||||
<td>${getCount(entries, e => e.savedSize)}</td>
|
||||
<td>${getSize(entries, e => e.savedSize)}</td>
|
||||
</tr>
|
||||
</table>
|
||||
`
|
||||
}
|
||||
|
||||
function renderEntryDetails(listener: CacheListener): string {
|
||||
return listener.cacheEntries
|
||||
.map(
|
||||
entry => `Entry: ${entry.entryName}
|
||||
Requested Key : ${entry.requestedKey ?? ''}
|
||||
Restored Key : ${entry.restoredKey ?? ''}
|
||||
Size: ${formatSize(entry.restoredSize)}
|
||||
${getRestoredMessage(entry, listener.cacheWriteOnly)}
|
||||
Saved Key : ${entry.savedKey ?? ''}
|
||||
Size: ${formatSize(entry.savedSize)}
|
||||
${getSavedMessage(entry, listener.cacheReadOnly)}
|
||||
`
|
||||
)
|
||||
.join('---\n')
|
||||
}
|
||||
|
||||
function getRestoredMessage(entry: CacheEntryListener, cacheWriteOnly: boolean): string {
|
||||
if (entry.notRestored) {
|
||||
return `(Entry not restored: ${entry.notRestored})`
|
||||
}
|
||||
if (cacheWriteOnly) {
|
||||
return '(Entry not restored: cache is write-only)'
|
||||
}
|
||||
if (entry.requestedKey === undefined) {
|
||||
return '(Entry not restored: not requested)'
|
||||
}
|
||||
if (entry.restoredKey === undefined) {
|
||||
return '(Entry not restored: no match found)'
|
||||
}
|
||||
if (entry.restoredKey === entry.requestedKey) {
|
||||
return '(Entry restored: exact match found)'
|
||||
}
|
||||
return '(Entry restored: partial match found)'
|
||||
}
|
||||
|
||||
function getSavedMessage(entry: CacheEntryListener, cacheReadOnly: boolean): string {
|
||||
if (entry.notSaved) {
|
||||
return `(Entry not saved: ${entry.notSaved})`
|
||||
}
|
||||
if (entry.savedKey === undefined) {
|
||||
if (cacheReadOnly) {
|
||||
return '(Entry not saved: cache is read-only)'
|
||||
}
|
||||
if (entry.notRestored) {
|
||||
return '(Entry not saved: not restored)'
|
||||
}
|
||||
return '(Entry not saved: reason unknown)'
|
||||
}
|
||||
if (entry.savedSize === 0) {
|
||||
return '(Entry not saved: entry with key already exists)'
|
||||
}
|
||||
return '(Entry saved)'
|
||||
}
|
||||
|
||||
function getCount(
|
||||
cacheEntries: CacheEntryListener[],
|
||||
predicate: (value: CacheEntryListener) => number | undefined
|
||||
): number {
|
||||
return cacheEntries.filter(e => predicate(e)).length
|
||||
}
|
||||
|
||||
function getSize(
|
||||
cacheEntries: CacheEntryListener[],
|
||||
predicate: (value: CacheEntryListener) => number | undefined
|
||||
): number {
|
||||
const bytes = cacheEntries.map(e => predicate(e) ?? 0).reduce((p, v) => p + v, 0)
|
||||
return Math.round(bytes / (1024 * 1024))
|
||||
}
|
||||
|
||||
function formatSize(bytes: number | undefined): string {
|
||||
if (bytes === undefined || bytes === 0) {
|
||||
return ''
|
||||
}
|
||||
return `${Math.round(bytes / (1024 * 1024))} MB (${bytes} B)`
|
||||
}
|
||||
250
sources/src/cache-utils.ts
Normal file
250
sources/src/cache-utils.ts
Normal file
@@ -0,0 +1,250 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as cache from '@actions/cache'
|
||||
import * as github from '@actions/github'
|
||||
import * as exec from '@actions/exec'
|
||||
|
||||
import * as crypto from 'crypto'
|
||||
import * as path from 'path'
|
||||
import * as fs from 'fs'
|
||||
|
||||
import * as params from './input-params'
|
||||
|
||||
import {CacheEntryListener} from './cache-reporting'
|
||||
|
||||
const CACHE_PROTOCOL_VERSION = 'v9-'
|
||||
|
||||
const CACHE_KEY_PREFIX_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_PREFIX'
|
||||
const CACHE_KEY_OS_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_ENVIRONMENT'
|
||||
const CACHE_KEY_JOB_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB'
|
||||
const CACHE_KEY_JOB_INSTANCE_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB_INSTANCE'
|
||||
const CACHE_KEY_JOB_EXECUTION_VAR = 'GRADLE_BUILD_ACTION_CACHE_KEY_JOB_EXECUTION'
|
||||
|
||||
const SEGMENT_DOWNLOAD_TIMEOUT_VAR = 'SEGMENT_DOWNLOAD_TIMEOUT_MINS'
|
||||
const SEGMENT_DOWNLOAD_TIMEOUT_DEFAULT = 10 * 60 * 1000 // 10 minutes
|
||||
|
||||
export function isCacheDisabled(): boolean {
|
||||
if (!cache.isFeatureAvailable()) {
|
||||
return true
|
||||
}
|
||||
return params.isCacheDisabled()
|
||||
}
|
||||
|
||||
export function isCacheReadOnly(): boolean {
|
||||
return !isCacheWriteOnly() && params.isCacheReadOnly()
|
||||
}
|
||||
|
||||
export function isCacheWriteOnly(): boolean {
|
||||
return params.isCacheWriteOnly()
|
||||
}
|
||||
|
||||
export function isCacheOverwriteExisting(): boolean {
|
||||
return params.isCacheOverwriteExisting()
|
||||
}
|
||||
|
||||
export function isCacheDebuggingEnabled(): boolean {
|
||||
return params.isCacheDebuggingEnabled()
|
||||
}
|
||||
|
||||
export function isCacheCleanupEnabled(): boolean {
|
||||
return params.isCacheCleanupEnabled()
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a key used to restore a cache entry.
|
||||
* The Github Actions cache will first try for an exact match on the key.
|
||||
* If that fails, it will try for a prefix match on any of the restoreKeys.
|
||||
*/
|
||||
export class CacheKey {
|
||||
key: string
|
||||
restoreKeys: string[]
|
||||
|
||||
constructor(key: string, restoreKeys: string[]) {
|
||||
this.key = key
|
||||
this.restoreKeys = restoreKeys
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a cache key specific to the current job execution.
|
||||
* The key is constructed from the following inputs (with some user overrides):
|
||||
* - The cache protocol version
|
||||
* - The name of the cache
|
||||
* - The runner operating system
|
||||
* - The name of the workflow and Job being executed
|
||||
* - The matrix values for the Job being executed (job context)
|
||||
* - The SHA of the commit being executed
|
||||
*
|
||||
* Caches are restored by trying to match the these key prefixes in order:
|
||||
* - The full key with SHA
|
||||
* - A previous key for this Job + matrix
|
||||
* - Any previous key for this Job (any matrix)
|
||||
* - Any previous key for this cache on the current OS
|
||||
*/
|
||||
export function generateCacheKey(cacheName: string): CacheKey {
|
||||
const cacheKeyBase = `${getCacheKeyPrefix()}${CACHE_PROTOCOL_VERSION}${cacheName}`
|
||||
|
||||
// At the most general level, share caches for all executions on the same OS
|
||||
const cacheKeyForEnvironment = `${cacheKeyBase}|${getCacheKeyEnvironment()}`
|
||||
|
||||
// Then prefer caches that run job with the same ID
|
||||
const cacheKeyForJob = `${cacheKeyForEnvironment}|${getCacheKeyJob()}`
|
||||
|
||||
// Prefer (even more) jobs that run this job in the same workflow with the same context (matrix)
|
||||
const cacheKeyForJobContext = `${cacheKeyForJob}[${getCacheKeyJobInstance()}]`
|
||||
|
||||
// Exact match on Git SHA
|
||||
const cacheKey = `${cacheKeyForJobContext}-${getCacheKeyJobExecution()}`
|
||||
|
||||
if (params.isCacheStrictMatch()) {
|
||||
return new CacheKey(cacheKey, [cacheKeyForJobContext])
|
||||
}
|
||||
|
||||
return new CacheKey(cacheKey, [cacheKeyForJobContext, cacheKeyForJob, cacheKeyForEnvironment])
|
||||
}
|
||||
|
||||
export function getCacheKeyPrefix(): string {
|
||||
// Prefix can be used to force change all cache keys (defaults to cache protocol version)
|
||||
return process.env[CACHE_KEY_PREFIX_VAR] || ''
|
||||
}
|
||||
|
||||
function getCacheKeyEnvironment(): string {
|
||||
const runnerOs = process.env['RUNNER_OS'] || ''
|
||||
return process.env[CACHE_KEY_OS_VAR] || runnerOs
|
||||
}
|
||||
|
||||
function getCacheKeyJob(): string {
|
||||
return process.env[CACHE_KEY_JOB_VAR] || github.context.job
|
||||
}
|
||||
|
||||
function getCacheKeyJobInstance(): string {
|
||||
const override = process.env[CACHE_KEY_JOB_INSTANCE_VAR]
|
||||
if (override) {
|
||||
return override
|
||||
}
|
||||
|
||||
// By default, we hash the workflow name and the full `matrix` data for the run, to uniquely identify this job invocation
|
||||
// The only way we can obtain the `matrix` data is via the `workflow-job-context` parameter in action.yml.
|
||||
const workflowName = github.context.workflow
|
||||
const workflowJobContext = params.getJobMatrix()
|
||||
return hashStrings([workflowName, workflowJobContext])
|
||||
}
|
||||
|
||||
function getCacheKeyJobExecution(): string {
|
||||
// Used to associate a cache key with a particular execution (default is bound to the git commit sha)
|
||||
return process.env[CACHE_KEY_JOB_EXECUTION_VAR] || github.context.sha
|
||||
}
|
||||
|
||||
export function hashFileNames(fileNames: string[]): string {
|
||||
return hashStrings(fileNames.map(x => x.replace(new RegExp(`\\${path.sep}`, 'g'), '/')))
|
||||
}
|
||||
|
||||
export function hashStrings(values: string[]): string {
|
||||
const hash = crypto.createHash('md5')
|
||||
for (const value of values) {
|
||||
hash.update(value)
|
||||
}
|
||||
return hash.digest('hex')
|
||||
}
|
||||
|
||||
export async function restoreCache(
|
||||
cachePath: string[],
|
||||
cacheKey: string,
|
||||
cacheRestoreKeys: string[],
|
||||
listener: CacheEntryListener
|
||||
): Promise<cache.CacheEntry | undefined> {
|
||||
listener.markRequested(cacheKey, cacheRestoreKeys)
|
||||
try {
|
||||
// Only override the read timeout if the SEGMENT_DOWNLOAD_TIMEOUT_MINS env var has NOT been set
|
||||
const cacheRestoreOptions = process.env[SEGMENT_DOWNLOAD_TIMEOUT_VAR]
|
||||
? {}
|
||||
: {segmentTimeoutInMs: SEGMENT_DOWNLOAD_TIMEOUT_DEFAULT}
|
||||
const restoredEntry = await cache.restoreCache(cachePath, cacheKey, cacheRestoreKeys, cacheRestoreOptions)
|
||||
if (restoredEntry !== undefined) {
|
||||
listener.markRestored(restoredEntry.key, restoredEntry.size)
|
||||
}
|
||||
return restoredEntry
|
||||
} catch (error) {
|
||||
listener.markNotRestored((error as Error).message)
|
||||
handleCacheFailure(error, `Failed to restore ${cacheKey}`)
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveCache(cachePath: string[], cacheKey: string, listener: CacheEntryListener): Promise<void> {
|
||||
try {
|
||||
const savedEntry = await cache.saveCache(cachePath, cacheKey)
|
||||
listener.markSaved(savedEntry.key, savedEntry.size)
|
||||
} catch (error) {
|
||||
if (error instanceof cache.ReserveCacheError) {
|
||||
listener.markAlreadyExists(cacheKey)
|
||||
} else {
|
||||
listener.markNotSaved((error as Error).message)
|
||||
}
|
||||
handleCacheFailure(error, `Failed to save cache entry with path '${cachePath}' and key: ${cacheKey}`)
|
||||
}
|
||||
}
|
||||
|
||||
export function cacheDebug(message: string): void {
|
||||
if (isCacheDebuggingEnabled()) {
|
||||
core.info(message)
|
||||
} else {
|
||||
core.debug(message)
|
||||
}
|
||||
}
|
||||
|
||||
export function handleCacheFailure(error: unknown, message: string): void {
|
||||
if (error instanceof cache.ValidationError) {
|
||||
// Fail on cache validation errors
|
||||
throw error
|
||||
}
|
||||
if (error instanceof cache.ReserveCacheError) {
|
||||
// Reserve cache errors are expected if the artifact has been previously cached
|
||||
core.info(`${message}: ${error}`)
|
||||
} else {
|
||||
// Warn on all other errors
|
||||
core.warning(`${message}: ${error}`)
|
||||
if (error instanceof Error && error.stack) {
|
||||
cacheDebug(error.stack)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to delete a file or directory, waiting to allow locks to be released
|
||||
*/
|
||||
export async function tryDelete(file: string): Promise<void> {
|
||||
const maxAttempts = 5
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
if (!fs.existsSync(file)) {
|
||||
return
|
||||
}
|
||||
try {
|
||||
const stat = fs.lstatSync(file)
|
||||
if (stat.isDirectory()) {
|
||||
fs.rmSync(file, {recursive: true})
|
||||
} else {
|
||||
fs.unlinkSync(file)
|
||||
}
|
||||
return
|
||||
} catch (error) {
|
||||
if (attempt === maxAttempts) {
|
||||
core.warning(`Failed to delete ${file}, which will impact caching.
|
||||
It is likely locked by another process. Output of 'jps -ml':
|
||||
${await getJavaProcesses()}`)
|
||||
throw error
|
||||
} else {
|
||||
cacheDebug(`Attempt to delete ${file} failed. Will try again.`)
|
||||
await delay(1000)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
async function getJavaProcesses(): Promise<string> {
|
||||
const jpsOutput = await exec.getExecOutput('jps', ['-lm'])
|
||||
return jpsOutput.stdout
|
||||
}
|
||||
104
sources/src/caches.ts
Normal file
104
sources/src/caches.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import * as core from '@actions/core'
|
||||
import {
|
||||
isCacheCleanupEnabled,
|
||||
isCacheDisabled,
|
||||
isCacheReadOnly,
|
||||
isCacheWriteOnly,
|
||||
isCacheOverwriteExisting
|
||||
} from './cache-utils'
|
||||
import {CacheListener} from './cache-reporting'
|
||||
import {DaemonController} from './daemon-controller'
|
||||
import {GradleStateCache} from './cache-base'
|
||||
import {CacheCleaner} from './cache-cleaner'
|
||||
|
||||
const CACHE_RESTORED_VAR = 'GRADLE_BUILD_ACTION_CACHE_RESTORED'
|
||||
|
||||
export async function restore(userHome: string, gradleUserHome: string, cacheListener: CacheListener): Promise<void> {
|
||||
// Bypass restore cache on all but first action step in workflow.
|
||||
if (process.env[CACHE_RESTORED_VAR]) {
|
||||
core.info('Cache only restored on first action step.')
|
||||
return
|
||||
}
|
||||
core.exportVariable(CACHE_RESTORED_VAR, true)
|
||||
|
||||
const gradleStateCache = new GradleStateCache(userHome, gradleUserHome)
|
||||
|
||||
if (isCacheDisabled()) {
|
||||
core.info('Cache is disabled: will not restore state from previous builds.')
|
||||
// Initialize the Gradle User Home even when caching is disabled.
|
||||
gradleStateCache.init()
|
||||
cacheListener.cacheDisabled = true
|
||||
return
|
||||
}
|
||||
|
||||
if (gradleStateCache.cacheOutputExists()) {
|
||||
if (!isCacheOverwriteExisting()) {
|
||||
core.info('Gradle User Home already exists: will not restore from cache.')
|
||||
// Initialize pre-existing Gradle User Home.
|
||||
gradleStateCache.init()
|
||||
cacheListener.cacheDisabled = true
|
||||
cacheListener.cacheDisabledReason = 'disabled due to pre-existing Gradle User Home'
|
||||
return
|
||||
}
|
||||
core.info('Gradle User Home already exists: will overwrite with cached contents.')
|
||||
}
|
||||
|
||||
gradleStateCache.init()
|
||||
// Mark the state as restored so that post-action will perform save.
|
||||
core.saveState(CACHE_RESTORED_VAR, true)
|
||||
|
||||
if (isCacheWriteOnly()) {
|
||||
core.info('Cache is write-only: will not restore from cache.')
|
||||
cacheListener.cacheWriteOnly = true
|
||||
return
|
||||
}
|
||||
|
||||
await core.group('Restore Gradle state from cache', async () => {
|
||||
await gradleStateCache.restore(cacheListener)
|
||||
})
|
||||
|
||||
if (isCacheCleanupEnabled() && !isCacheReadOnly()) {
|
||||
core.info('Preparing cache for cleanup.')
|
||||
const cacheCleaner = new CacheCleaner(gradleUserHome, process.env['RUNNER_TEMP']!)
|
||||
await cacheCleaner.prepare()
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(
|
||||
userHome: string,
|
||||
gradleUserHome: string,
|
||||
cacheListener: CacheListener,
|
||||
daemonController: DaemonController
|
||||
): Promise<void> {
|
||||
if (isCacheDisabled()) {
|
||||
core.info('Cache is disabled: will not save state for later builds.')
|
||||
return
|
||||
}
|
||||
|
||||
if (!core.getState(CACHE_RESTORED_VAR)) {
|
||||
core.info('Cache will not be saved: not restored in main action step.')
|
||||
return
|
||||
}
|
||||
|
||||
if (isCacheReadOnly()) {
|
||||
core.info('Cache is read-only: will not save state for use in subsequent builds.')
|
||||
cacheListener.cacheReadOnly = true
|
||||
return
|
||||
}
|
||||
|
||||
await daemonController.stopAllDaemons()
|
||||
|
||||
if (isCacheCleanupEnabled()) {
|
||||
core.info('Forcing cache cleanup.')
|
||||
const cacheCleaner = new CacheCleaner(gradleUserHome, process.env['RUNNER_TEMP']!)
|
||||
try {
|
||||
await cacheCleaner.forceCleanup()
|
||||
} catch (e) {
|
||||
core.warning(`Cache cleanup failed. Will continue. ${String(e)}`)
|
||||
}
|
||||
}
|
||||
|
||||
await core.group('Caching Gradle state', async () => {
|
||||
return new GradleStateCache(userHome, gradleUserHome).save(cacheListener)
|
||||
})
|
||||
}
|
||||
36
sources/src/daemon-controller.ts
Normal file
36
sources/src/daemon-controller.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as exec from '@actions/exec'
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import {BuildResult} from './build-results'
|
||||
|
||||
export class DaemonController {
|
||||
private readonly gradleHomes
|
||||
|
||||
constructor(buildResults: BuildResult[]) {
|
||||
const allHomes = buildResults.map(buildResult => buildResult.gradleHomeDir)
|
||||
this.gradleHomes = Array.from(new Set(allHomes))
|
||||
}
|
||||
|
||||
async stopAllDaemons(): Promise<void> {
|
||||
core.info('Stopping all Gradle daemons before saving Gradle User Home state')
|
||||
|
||||
const executions: Promise<number>[] = []
|
||||
const args = ['--stop']
|
||||
|
||||
for (const gradleHome of this.gradleHomes) {
|
||||
const executable = path.resolve(gradleHome, 'bin', 'gradle')
|
||||
if (!fs.existsSync(executable)) {
|
||||
core.warning(`Gradle executable not found at ${executable}. Could not stop Gradle daemons.`)
|
||||
continue
|
||||
}
|
||||
core.info(`Stopping Gradle daemons for ${gradleHome}`)
|
||||
executions.push(
|
||||
exec.exec(executable, args, {
|
||||
ignoreReturnCode: true
|
||||
})
|
||||
)
|
||||
}
|
||||
await Promise.all(executions)
|
||||
}
|
||||
}
|
||||
247
sources/src/dependency-graph.ts
Normal file
247
sources/src/dependency-graph.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import * as glob from '@actions/glob'
|
||||
import {DefaultArtifactClient} from '@actions/artifact'
|
||||
import {GitHub} from '@actions/github/lib/utils'
|
||||
import {RequestError} from '@octokit/request-error'
|
||||
import type {PullRequestEvent} from '@octokit/webhooks-types'
|
||||
|
||||
import * as path from 'path'
|
||||
import fs from 'fs'
|
||||
|
||||
import * as layout from './repository-layout'
|
||||
import {PostActionJobFailure} from './errors'
|
||||
import {
|
||||
DependencyGraphOption,
|
||||
getDependencyGraphContinueOnFailure,
|
||||
getJobMatrix,
|
||||
getArtifactRetentionDays
|
||||
} from './input-params'
|
||||
|
||||
const DEPENDENCY_GRAPH_PREFIX = 'dependency-graph_'
|
||||
|
||||
export async function setup(option: DependencyGraphOption): Promise<void> {
|
||||
if (option === DependencyGraphOption.Disabled) {
|
||||
return
|
||||
}
|
||||
// Download and submit early, for compatability with dependency review.
|
||||
if (option === DependencyGraphOption.DownloadAndSubmit) {
|
||||
await downloadAndSubmitDependencyGraphs()
|
||||
return
|
||||
}
|
||||
|
||||
core.info('Enabling dependency graph generation')
|
||||
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_ENABLED', 'true')
|
||||
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_CONTINUE_ON_FAILURE', getDependencyGraphContinueOnFailure())
|
||||
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR', getJobCorrelator())
|
||||
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_JOB_ID', github.context.runId)
|
||||
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_REF', github.context.ref)
|
||||
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_SHA', getShaFromContext())
|
||||
maybeExportVariable('GITHUB_DEPENDENCY_GRAPH_WORKSPACE', layout.workspaceDirectory())
|
||||
maybeExportVariable(
|
||||
'DEPENDENCY_GRAPH_REPORT_DIR',
|
||||
path.resolve(layout.workspaceDirectory(), 'dependency-graph-reports')
|
||||
)
|
||||
|
||||
// To clear the dependency graph, we generate an empty graph by excluding all projects and configurations
|
||||
if (option === DependencyGraphOption.Clear) {
|
||||
core.exportVariable('DEPENDENCY_GRAPH_INCLUDE_PROJECTS', '')
|
||||
core.exportVariable('DEPENDENCY_GRAPH_INCLUDE_CONFIGURATIONS', '')
|
||||
}
|
||||
}
|
||||
|
||||
function maybeExportVariable(variableName: string, value: unknown): void {
|
||||
if (!process.env[variableName]) {
|
||||
core.exportVariable(variableName, value)
|
||||
}
|
||||
}
|
||||
|
||||
export async function complete(option: DependencyGraphOption): Promise<void> {
|
||||
try {
|
||||
switch (option) {
|
||||
case DependencyGraphOption.Disabled:
|
||||
case DependencyGraphOption.Generate: // Performed via init-script: nothing to do here
|
||||
case DependencyGraphOption.DownloadAndSubmit: // Performed in setup
|
||||
return
|
||||
case DependencyGraphOption.GenerateAndSubmit:
|
||||
case DependencyGraphOption.Clear: // Submit the empty dependency graph
|
||||
await submitDependencyGraphs(await findGeneratedDependencyGraphFiles())
|
||||
return
|
||||
case DependencyGraphOption.GenerateAndUpload:
|
||||
await uploadDependencyGraphs(await findGeneratedDependencyGraphFiles())
|
||||
}
|
||||
} catch (e) {
|
||||
warnOrFail(option, e)
|
||||
}
|
||||
}
|
||||
|
||||
async function findGeneratedDependencyGraphFiles(): Promise<string[]> {
|
||||
const workspaceDirectory = layout.workspaceDirectory()
|
||||
return await findDependencyGraphFiles(workspaceDirectory)
|
||||
}
|
||||
|
||||
async function uploadDependencyGraphs(dependencyGraphFiles: string[]): Promise<void> {
|
||||
const workspaceDirectory = layout.workspaceDirectory()
|
||||
|
||||
const artifactClient = new DefaultArtifactClient()
|
||||
for (const dependencyGraphFile of dependencyGraphFiles) {
|
||||
const relativePath = getRelativePathFromWorkspace(dependencyGraphFile)
|
||||
core.info(`Uploading dependency graph file: ${relativePath}`)
|
||||
const artifactName = `${DEPENDENCY_GRAPH_PREFIX}${path.basename(dependencyGraphFile)}`
|
||||
await artifactClient.uploadArtifact(artifactName, [dependencyGraphFile], workspaceDirectory, {
|
||||
retentionDays: getArtifactRetentionDays()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadAndSubmitDependencyGraphs(): Promise<void> {
|
||||
try {
|
||||
await submitDependencyGraphs(await downloadDependencyGraphs())
|
||||
} catch (e) {
|
||||
warnOrFail(DependencyGraphOption.DownloadAndSubmit, e)
|
||||
}
|
||||
}
|
||||
|
||||
async function submitDependencyGraphs(dependencyGraphFiles: string[]): Promise<void> {
|
||||
for (const jsonFile of dependencyGraphFiles) {
|
||||
try {
|
||||
await submitDependencyGraphFile(jsonFile)
|
||||
} catch (error) {
|
||||
if (error instanceof RequestError) {
|
||||
throw new Error(translateErrorMessage(jsonFile, error))
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function translateErrorMessage(jsonFile: string, error: RequestError): string {
|
||||
const relativeJsonFile = getRelativePathFromWorkspace(jsonFile)
|
||||
const mainWarning = `Dependency submission failed for ${relativeJsonFile}.\n${String(error)}`
|
||||
if (error.message === 'Resource not accessible by integration') {
|
||||
return `${mainWarning}
|
||||
Please ensure that the 'contents: write' permission is available for the workflow job.
|
||||
Note that this permission is never available for a 'pull_request' trigger from a repository fork.
|
||||
`
|
||||
}
|
||||
return mainWarning
|
||||
}
|
||||
|
||||
async function submitDependencyGraphFile(jsonFile: string): Promise<void> {
|
||||
const octokit = getOctokit()
|
||||
const jsonContent = fs.readFileSync(jsonFile, 'utf8')
|
||||
|
||||
const jsonObject = JSON.parse(jsonContent)
|
||||
jsonObject.owner = github.context.repo.owner
|
||||
jsonObject.repo = github.context.repo.repo
|
||||
const response = await octokit.request('POST /repos/{owner}/{repo}/dependency-graph/snapshots', jsonObject)
|
||||
|
||||
const relativeJsonFile = getRelativePathFromWorkspace(jsonFile)
|
||||
core.notice(`Submitted ${relativeJsonFile}: ${response.data.message}`)
|
||||
}
|
||||
|
||||
async function downloadDependencyGraphs(): Promise<string[]> {
|
||||
const workspaceDirectory = layout.workspaceDirectory()
|
||||
|
||||
const findBy = github.context.payload.workflow_run
|
||||
? {
|
||||
token: getGithubToken(),
|
||||
workflowRunId: github.context.payload.workflow_run.id,
|
||||
repositoryName: github.context.repo.repo,
|
||||
repositoryOwner: github.context.repo.owner
|
||||
}
|
||||
: undefined
|
||||
|
||||
const artifactClient = new DefaultArtifactClient()
|
||||
const downloadPath = path.resolve(workspaceDirectory, 'dependency-graph')
|
||||
|
||||
const dependencyGraphArtifacts = (
|
||||
await artifactClient.listArtifacts({
|
||||
latest: true,
|
||||
findBy
|
||||
})
|
||||
).artifacts.filter(candidate => candidate.name.startsWith(DEPENDENCY_GRAPH_PREFIX))
|
||||
|
||||
for (const artifact of dependencyGraphArtifacts) {
|
||||
const downloadedArtifact = await artifactClient.downloadArtifact(artifact.id, {
|
||||
path: downloadPath,
|
||||
findBy
|
||||
})
|
||||
core.info(`Downloading dependency-graph artifact ${artifact.name} to ${downloadedArtifact.downloadPath}`)
|
||||
}
|
||||
|
||||
return findDependencyGraphFiles(downloadPath)
|
||||
}
|
||||
|
||||
async function findDependencyGraphFiles(dir: string): Promise<string[]> {
|
||||
const globber = await glob.create(`${dir}/dependency-graph-reports/*.json`)
|
||||
const graphFiles = globber.glob()
|
||||
return graphFiles
|
||||
}
|
||||
|
||||
function warnOrFail(option: String, error: unknown): void {
|
||||
if (!getDependencyGraphContinueOnFailure()) {
|
||||
throw new PostActionJobFailure(error)
|
||||
}
|
||||
|
||||
core.warning(`Failed to ${option} dependency graph. Will continue.\n${String(error)}`)
|
||||
}
|
||||
|
||||
function getOctokit(): InstanceType<typeof GitHub> {
|
||||
return github.getOctokit(getGithubToken())
|
||||
}
|
||||
|
||||
function getGithubToken(): string {
|
||||
return core.getInput('github-token', {required: true})
|
||||
}
|
||||
|
||||
function getRelativePathFromWorkspace(file: string): string {
|
||||
const workspaceDirectory = layout.workspaceDirectory()
|
||||
return path.relative(workspaceDirectory, file)
|
||||
}
|
||||
|
||||
function getShaFromContext(): string {
|
||||
const context = github.context
|
||||
const pullRequestEvents = [
|
||||
'pull_request',
|
||||
'pull_request_comment',
|
||||
'pull_request_review',
|
||||
'pull_request_review_comment'
|
||||
// Note that pull_request_target is omitted here.
|
||||
// That event runs in the context of the base commit of the PR,
|
||||
// so the snapshot should not be associated with the head commit.
|
||||
]
|
||||
if (pullRequestEvents.includes(context.eventName)) {
|
||||
const pr = (context.payload as PullRequestEvent).pull_request
|
||||
return pr.head.sha
|
||||
} else {
|
||||
return context.sha
|
||||
}
|
||||
}
|
||||
|
||||
function getJobCorrelator(): string {
|
||||
return constructJobCorrelator(github.context.workflow, github.context.job, getJobMatrix())
|
||||
}
|
||||
|
||||
export function constructJobCorrelator(workflow: string, jobId: string, matrixJson: string): string {
|
||||
const matrixString = describeMatrix(matrixJson)
|
||||
const label = matrixString ? `${workflow}-${jobId}-${matrixString}` : `${workflow}-${jobId}`
|
||||
return sanitize(label)
|
||||
}
|
||||
|
||||
function describeMatrix(matrixJson: string): string {
|
||||
core.debug(`Got matrix json: ${matrixJson}`)
|
||||
const matrix = JSON.parse(matrixJson)
|
||||
if (matrix) {
|
||||
return Object.values(matrix).join('-')
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
function sanitize(value: string): string {
|
||||
return value
|
||||
.replace(/[^a-zA-Z0-9_-\s]/g, '')
|
||||
.replace(/\s+/g, '_')
|
||||
.toLowerCase()
|
||||
}
|
||||
11
sources/src/errors.ts
Normal file
11
sources/src/errors.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export class PostActionJobFailure extends Error {
|
||||
constructor(error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
super(error.message)
|
||||
this.name = error.name
|
||||
this.stack = error.stack
|
||||
} else {
|
||||
super(String(error))
|
||||
}
|
||||
}
|
||||
}
|
||||
17
sources/src/execution.ts
Normal file
17
sources/src/execution.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as exec from '@actions/exec'
|
||||
import * as gradlew from './gradlew'
|
||||
|
||||
export async function executeGradleBuild(executable: string | undefined, root: string, args: string[]): Promise<void> {
|
||||
// Use the provided executable, or look for a Gradle wrapper script to run
|
||||
const toExecute = executable ?? gradlew.gradleWrapperScript(root)
|
||||
|
||||
const status: number = await exec.exec(toExecute, args, {
|
||||
cwd: root,
|
||||
ignoreReturnCode: true
|
||||
})
|
||||
|
||||
if (status !== 0) {
|
||||
core.setFailed(`Gradle build failed: see console output for details`)
|
||||
}
|
||||
}
|
||||
42
sources/src/gradlew.ts
Normal file
42
sources/src/gradlew.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import * as path from 'path'
|
||||
import fs from 'fs'
|
||||
|
||||
const IS_WINDOWS = process.platform === 'win32'
|
||||
|
||||
export function wrapperScriptFilename(): string {
|
||||
return IS_WINDOWS ? './gradlew.bat' : './gradlew'
|
||||
}
|
||||
|
||||
export function installScriptFilename(): string {
|
||||
return IS_WINDOWS ? 'gradle.bat' : 'gradle'
|
||||
}
|
||||
|
||||
export function gradleWrapperScript(buildRootDirectory: string): string {
|
||||
validateGradleWrapper(buildRootDirectory)
|
||||
return wrapperScriptFilename()
|
||||
}
|
||||
|
||||
function validateGradleWrapper(buildRootDirectory: string): void {
|
||||
const wrapperScript = path.resolve(buildRootDirectory, wrapperScriptFilename())
|
||||
verifyExists(wrapperScript, 'Gradle Wrapper script')
|
||||
verifyIsExecutableScript(wrapperScript)
|
||||
|
||||
const wrapperProperties = path.resolve(buildRootDirectory, 'gradle/wrapper/gradle-wrapper.properties')
|
||||
verifyExists(wrapperProperties, 'Gradle wrapper properties file')
|
||||
}
|
||||
|
||||
function verifyExists(file: string, description: string): void {
|
||||
if (!fs.existsSync(file)) {
|
||||
throw new Error(
|
||||
`Cannot locate ${description} at '${file}'. Specify 'gradle-version' for projects without Gradle wrapper configured.`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function verifyIsExecutableScript(toExecute: string): void {
|
||||
try {
|
||||
fs.accessSync(toExecute, fs.constants.X_OK)
|
||||
} catch (err) {
|
||||
throw new Error(`Gradle script '${toExecute}' is not executable.`)
|
||||
}
|
||||
}
|
||||
171
sources/src/input-params.ts
Normal file
171
sources/src/input-params.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
import * as core from '@actions/core'
|
||||
import {parseArgsStringToArgv} from 'string-argv'
|
||||
|
||||
export function isCacheDisabled(): boolean {
|
||||
return getBooleanInput('cache-disabled')
|
||||
}
|
||||
|
||||
export function isCacheReadOnly(): boolean {
|
||||
return getBooleanInput('cache-read-only')
|
||||
}
|
||||
|
||||
export function isCacheWriteOnly(): boolean {
|
||||
return getBooleanInput('cache-write-only')
|
||||
}
|
||||
|
||||
export function isCacheOverwriteExisting(): boolean {
|
||||
return getBooleanInput('cache-overwrite-existing')
|
||||
}
|
||||
|
||||
export function isCacheStrictMatch(): boolean {
|
||||
return getBooleanInput('gradle-home-cache-strict-match')
|
||||
}
|
||||
|
||||
export function isCacheDebuggingEnabled(): boolean {
|
||||
return process.env['GRADLE_BUILD_ACTION_CACHE_DEBUG_ENABLED'] ? true : false
|
||||
}
|
||||
|
||||
export function isCacheCleanupEnabled(): boolean {
|
||||
return getBooleanInput('gradle-home-cache-cleanup')
|
||||
}
|
||||
|
||||
export function getCacheEncryptionKey(): string {
|
||||
return core.getInput('cache-encryption-key')
|
||||
}
|
||||
|
||||
export function getCacheIncludes(): string[] {
|
||||
return core.getMultilineInput('gradle-home-cache-includes')
|
||||
}
|
||||
|
||||
export function getCacheExcludes(): string[] {
|
||||
return core.getMultilineInput('gradle-home-cache-excludes')
|
||||
}
|
||||
|
||||
export function getBuildRootDirectory(): string {
|
||||
return core.getInput('build-root-directory')
|
||||
}
|
||||
|
||||
export function getGradleVersion(): string {
|
||||
return core.getInput('gradle-version')
|
||||
}
|
||||
|
||||
export function getArguments(): string[] {
|
||||
const input = core.getInput('arguments')
|
||||
return parseArgsStringToArgv(input)
|
||||
}
|
||||
|
||||
// Internal parameters
|
||||
export function getJobMatrix(): string {
|
||||
return core.getInput('workflow-job-context')
|
||||
}
|
||||
|
||||
export function getGithubToken(): string {
|
||||
return core.getInput('github-token', {required: true})
|
||||
}
|
||||
|
||||
export function isJobSummaryEnabled(): boolean {
|
||||
return getBooleanInput('generate-job-summary', true)
|
||||
}
|
||||
|
||||
export function getJobSummaryOption(): JobSummaryOption {
|
||||
return parseJobSummaryOption('add-job-summary')
|
||||
}
|
||||
|
||||
export function getPRCommentOption(): JobSummaryOption {
|
||||
return parseJobSummaryOption('add-job-summary-as-pr-comment')
|
||||
}
|
||||
|
||||
export function getBuildScanPublishEnabled(): boolean {
|
||||
return getBooleanInput('build-scan-publish')
|
||||
}
|
||||
|
||||
export function getBuildScanTermsOfServiceUrl(): string {
|
||||
return core.getInput('build-scan-terms-of-service-url')
|
||||
}
|
||||
|
||||
export function getBuildScanTermsOfServiceAgree(): string {
|
||||
return core.getInput('build-scan-terms-of-service-agree')
|
||||
}
|
||||
|
||||
function parseJobSummaryOption(paramName: string): JobSummaryOption {
|
||||
const val = core.getInput(paramName)
|
||||
switch (val.toLowerCase().trim()) {
|
||||
case 'never':
|
||||
return JobSummaryOption.Never
|
||||
case 'always':
|
||||
return JobSummaryOption.Always
|
||||
case 'on-failure':
|
||||
return JobSummaryOption.OnFailure
|
||||
}
|
||||
throw TypeError(`The value '${val}' is not valid for ${paramName}. Valid values are: [never, always, on-failure].`)
|
||||
}
|
||||
|
||||
export function getDependencyGraphOption(): DependencyGraphOption {
|
||||
const val = core.getInput('dependency-graph')
|
||||
switch (val.toLowerCase().trim()) {
|
||||
case 'disabled':
|
||||
return DependencyGraphOption.Disabled
|
||||
case 'generate':
|
||||
return DependencyGraphOption.Generate
|
||||
case 'generate-and-submit':
|
||||
return DependencyGraphOption.GenerateAndSubmit
|
||||
case 'generate-and-upload':
|
||||
return DependencyGraphOption.GenerateAndUpload
|
||||
case 'download-and-submit':
|
||||
return DependencyGraphOption.DownloadAndSubmit
|
||||
case 'clear':
|
||||
return DependencyGraphOption.Clear
|
||||
}
|
||||
throw TypeError(
|
||||
`The value '${val}' is not valid for 'dependency-graph'. Valid values are: [disabled, generate, generate-and-submit, generate-and-upload, download-and-submit, clear]. The default value is 'disabled'.`
|
||||
)
|
||||
}
|
||||
|
||||
export function getDependencyGraphContinueOnFailure(): boolean {
|
||||
return getBooleanInput('dependency-graph-continue-on-failure', true)
|
||||
}
|
||||
|
||||
export function getArtifactRetentionDays(): number {
|
||||
const val = core.getInput('artifact-retention-days')
|
||||
return parseNumericInput('artifact-retention-days', val, 0)
|
||||
// Zero indicates that the default repository settings should be used
|
||||
}
|
||||
|
||||
export function parseNumericInput(paramName: string, paramValue: string, paramDefault: number): number {
|
||||
if (paramValue.length === 0) {
|
||||
return paramDefault
|
||||
}
|
||||
const numericValue = parseInt(paramValue)
|
||||
if (isNaN(numericValue)) {
|
||||
throw TypeError(`The value '${paramValue}' is not a valid numeric value for '${paramName}'.`)
|
||||
}
|
||||
return numericValue
|
||||
}
|
||||
|
||||
function getBooleanInput(paramName: string, paramDefault = false): boolean {
|
||||
const paramValue = core.getInput(paramName)
|
||||
switch (paramValue.toLowerCase().trim()) {
|
||||
case '':
|
||||
return paramDefault
|
||||
case 'false':
|
||||
return false
|
||||
case 'true':
|
||||
return true
|
||||
}
|
||||
throw TypeError(`The value '${paramValue} is not valid for '${paramName}. Valid values are: [true, false]`)
|
||||
}
|
||||
|
||||
export enum DependencyGraphOption {
|
||||
Disabled = 'disabled',
|
||||
Generate = 'generate',
|
||||
GenerateAndSubmit = 'generate-and-submit',
|
||||
GenerateAndUpload = 'generate-and-upload',
|
||||
DownloadAndSubmit = 'download-and-submit',
|
||||
Clear = 'clear'
|
||||
}
|
||||
|
||||
export enum JobSummaryOption {
|
||||
Never = 'never',
|
||||
Always = 'always',
|
||||
OnFailure = 'on-failure'
|
||||
}
|
||||
157
sources/src/job-summary.ts
Normal file
157
sources/src/job-summary.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as github from '@actions/github'
|
||||
import {SUMMARY_ENV_VAR} from '@actions/core/lib/summary'
|
||||
import {RequestError} from '@octokit/request-error'
|
||||
|
||||
import * as params from './input-params'
|
||||
import {BuildResult} from './build-results'
|
||||
import {CacheListener, generateCachingReport} from './cache-reporting'
|
||||
|
||||
export async function generateJobSummary(buildResults: BuildResult[], cacheListener: CacheListener): Promise<void> {
|
||||
const summaryTable = renderSummaryTable(buildResults)
|
||||
const cachingReport = generateCachingReport(cacheListener)
|
||||
|
||||
if (shouldGenerateJobSummary(buildResults)) {
|
||||
core.info('Generating Job Summary')
|
||||
|
||||
core.summary.addRaw(summaryTable)
|
||||
core.summary.addRaw(cachingReport)
|
||||
await core.summary.write()
|
||||
} else {
|
||||
core.info('============================')
|
||||
core.info(summaryTable)
|
||||
core.info('============================')
|
||||
core.info(cachingReport)
|
||||
core.info('============================')
|
||||
}
|
||||
|
||||
if (shouldAddPRComment(buildResults)) {
|
||||
await addPRComment(summaryTable)
|
||||
}
|
||||
}
|
||||
|
||||
async function addPRComment(jobSummary: string): Promise<void> {
|
||||
const context = github.context
|
||||
if (context.payload.pull_request == null) {
|
||||
core.info('No pull_request trigger: not adding PR comment')
|
||||
return
|
||||
}
|
||||
|
||||
const pull_request_number = context.payload.pull_request.number
|
||||
core.info(`Adding Job Summary as comment to PR #${pull_request_number}.`)
|
||||
|
||||
const prComment = `<h3>Job Summary for gradle-build-action</h3>
|
||||
<h5>${github.context.workflow} :: <em>${github.context.job}</em></h5>
|
||||
|
||||
${jobSummary}`
|
||||
|
||||
const github_token = params.getGithubToken()
|
||||
const octokit = github.getOctokit(github_token)
|
||||
try {
|
||||
await octokit.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number: pull_request_number,
|
||||
body: prComment
|
||||
})
|
||||
} catch (error) {
|
||||
if (error instanceof RequestError) {
|
||||
core.warning(buildWarningMessage(error))
|
||||
} else {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildWarningMessage(error: RequestError): string {
|
||||
const mainWarning = `Failed to generate PR comment.\n${String(error)}`
|
||||
if (error.message === 'Resource not accessible by integration') {
|
||||
return `${mainWarning}
|
||||
Please ensure that the 'pull-requests: write' permission is available for the workflow job.
|
||||
Note that this permission is never available for a workflow triggered from a repository fork.
|
||||
`
|
||||
}
|
||||
return mainWarning
|
||||
}
|
||||
|
||||
function renderSummaryTable(results: BuildResult[]): string {
|
||||
if (results.length === 0) {
|
||||
return 'No Gradle build results detected.'
|
||||
}
|
||||
|
||||
return `
|
||||
<table>
|
||||
<tr>
|
||||
<th>Gradle Root Project</th>
|
||||
<th>Requested Tasks</th>
|
||||
<th>Gradle Version</th>
|
||||
<th>Build Outcome</th>
|
||||
<th>Build Scan®</th>
|
||||
</tr>${results.map(result => renderBuildResultRow(result)).join('')}
|
||||
</table>
|
||||
`
|
||||
}
|
||||
|
||||
function renderBuildResultRow(result: BuildResult): string {
|
||||
return `
|
||||
<tr>
|
||||
<td>${result.rootProjectName}</td>
|
||||
<td>${result.requestedTasks}</td>
|
||||
<td align='center'>${result.gradleVersion}</td>
|
||||
<td align='center'>${renderOutcome(result)}</td>
|
||||
<td>${renderBuildScan(result)}</td>
|
||||
</tr>`
|
||||
}
|
||||
|
||||
function renderOutcome(result: BuildResult): string {
|
||||
return result.buildFailed ? ':x:' : ':white_check_mark:'
|
||||
}
|
||||
|
||||
function renderBuildScan(result: BuildResult): string {
|
||||
if (result.buildScanFailed) {
|
||||
return renderBuildScanBadge(
|
||||
'PUBLISH_FAILED',
|
||||
'orange',
|
||||
'https://docs.gradle.com/enterprise/gradle-plugin/#troubleshooting'
|
||||
)
|
||||
}
|
||||
if (result.buildScanUri) {
|
||||
return renderBuildScanBadge('PUBLISHED', '06A0CE', result.buildScanUri)
|
||||
}
|
||||
return renderBuildScanBadge('NOT_PUBLISHED', 'lightgrey', 'https://scans.gradle.com')
|
||||
}
|
||||
|
||||
function renderBuildScanBadge(outcomeText: string, outcomeColor: string, targetUrl: string): string {
|
||||
const badgeUrl = `https://img.shields.io/badge/Build%20Scan%C2%AE-${outcomeText}-${outcomeColor}?logo=Gradle`
|
||||
const badgeHtml = `<img src="${badgeUrl}" alt="Build Scan ${outcomeText}" />`
|
||||
return `<a href="${targetUrl}" rel="nofollow">${badgeHtml}</a>`
|
||||
}
|
||||
|
||||
function shouldGenerateJobSummary(buildResults: BuildResult[]): boolean {
|
||||
// Check if Job Summary is supported on this platform
|
||||
if (!process.env[SUMMARY_ENV_VAR]) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if Job Summary is disabled using the deprecated input
|
||||
if (!params.isJobSummaryEnabled()) {
|
||||
return false
|
||||
}
|
||||
|
||||
return shouldAddJobSummary(params.getJobSummaryOption(), buildResults)
|
||||
}
|
||||
|
||||
function shouldAddPRComment(buildResults: BuildResult[]): boolean {
|
||||
return shouldAddJobSummary(params.getPRCommentOption(), buildResults)
|
||||
}
|
||||
|
||||
function shouldAddJobSummary(option: params.JobSummaryOption, buildResults: BuildResult[]): boolean {
|
||||
switch (option) {
|
||||
case params.JobSummaryOption.Always:
|
||||
return true
|
||||
case params.JobSummaryOption.Never:
|
||||
return false
|
||||
case params.JobSummaryOption.OnFailure:
|
||||
core.info(`Got these build results: ${JSON.stringify(buildResults)}`)
|
||||
return buildResults.some(result => result.buildFailed)
|
||||
}
|
||||
}
|
||||
37
sources/src/main.ts
Normal file
37
sources/src/main.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import * as core from '@actions/core'
|
||||
|
||||
import * as setupGradle from './setup-gradle'
|
||||
import * as execution from './execution'
|
||||
import * as provisioner from './provision'
|
||||
import * as layout from './repository-layout'
|
||||
import * as params from './input-params'
|
||||
|
||||
/**
|
||||
* The main entry point for the action, called by Github Actions for the step.
|
||||
*/
|
||||
export async function run(): Promise<void> {
|
||||
try {
|
||||
// Configure Gradle environment (Gradle User Home)
|
||||
await setupGradle.setup()
|
||||
|
||||
// Download and install Gradle if required
|
||||
const executable = await provisioner.provisionGradle()
|
||||
|
||||
// Only execute if arguments have been provided
|
||||
const args: string[] = params.getArguments()
|
||||
if (args.length > 0) {
|
||||
const buildRootDirectory = layout.buildRootDirectory()
|
||||
await execution.executeGradleBuild(executable, buildRootDirectory, args)
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(String(error))
|
||||
if (error instanceof Error && error.stack) {
|
||||
core.info(error.stack)
|
||||
}
|
||||
}
|
||||
|
||||
// Explicit process.exit() to prevent waiting for hanging promises.
|
||||
process.exit()
|
||||
}
|
||||
|
||||
run()
|
||||
35
sources/src/post.ts
Normal file
35
sources/src/post.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as setupGradle from './setup-gradle'
|
||||
import {PostActionJobFailure} from './errors'
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
// throw an uncaught exception. Instead of failing this action, just warn.
|
||||
process.on('uncaughtException', e => handleFailure(e))
|
||||
|
||||
/**
|
||||
* The post-execution entry point for the action, called by Github Actions after completing all steps for the Job.
|
||||
*/
|
||||
export async function run(): Promise<void> {
|
||||
try {
|
||||
await setupGradle.complete()
|
||||
} catch (error) {
|
||||
if (error instanceof PostActionJobFailure) {
|
||||
core.setFailed(String(error))
|
||||
} else {
|
||||
handleFailure(error)
|
||||
}
|
||||
}
|
||||
|
||||
// Explicit process.exit() to prevent waiting for promises left hanging by `@actions/cache` on save.
|
||||
process.exit()
|
||||
}
|
||||
|
||||
function handleFailure(error: unknown): void {
|
||||
core.warning(`Unhandled error in Gradle post-action - job will continue: ${error}`)
|
||||
if (error instanceof Error && error.stack) {
|
||||
core.info(error.stack)
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
||||
180
sources/src/provision.ts
Normal file
180
sources/src/provision.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import * as fs from 'fs'
|
||||
import * as os from 'os'
|
||||
import * as path from 'path'
|
||||
import * as httpm from '@actions/http-client'
|
||||
import * as core from '@actions/core'
|
||||
import * as cache from '@actions/cache'
|
||||
import * as toolCache from '@actions/tool-cache'
|
||||
|
||||
import * as gradlew from './gradlew'
|
||||
import * as params from './input-params'
|
||||
import {handleCacheFailure, isCacheDisabled, isCacheReadOnly} from './cache-utils'
|
||||
|
||||
const gradleVersionsBaseUrl = 'https://services.gradle.org/versions'
|
||||
|
||||
/**
|
||||
* Install any configured version of Gradle, adding the executable to the PATH.
|
||||
* @return Installed Gradle executable or undefined if no version configured.
|
||||
*/
|
||||
export async function provisionGradle(): Promise<string | undefined> {
|
||||
const gradleVersion = params.getGradleVersion()
|
||||
if (gradleVersion !== '' && gradleVersion !== 'wrapper') {
|
||||
return addToPath(await installGradle(gradleVersion))
|
||||
}
|
||||
|
||||
return undefined
|
||||
}
|
||||
|
||||
async function addToPath(executable: string): Promise<string> {
|
||||
core.addPath(path.dirname(executable))
|
||||
return executable
|
||||
}
|
||||
|
||||
async function installGradle(version: string): Promise<string> {
|
||||
const versionInfo = await resolveGradleVersion(version)
|
||||
core.setOutput('gradle-version', versionInfo.version)
|
||||
return installGradleVersion(versionInfo)
|
||||
}
|
||||
|
||||
async function resolveGradleVersion(version: string): Promise<GradleVersionInfo> {
|
||||
switch (version) {
|
||||
case 'current':
|
||||
return gradleCurrent()
|
||||
case 'rc':
|
||||
core.warning(`Specifying gradle-version 'rc' has been deprecated. Use 'release-candidate' instead.`)
|
||||
return gradleReleaseCandidate()
|
||||
case 'release-candidate':
|
||||
return gradleReleaseCandidate()
|
||||
case 'nightly':
|
||||
return gradleNightly()
|
||||
case 'release-nightly':
|
||||
return gradleReleaseNightly()
|
||||
default:
|
||||
return gradle(version)
|
||||
}
|
||||
}
|
||||
|
||||
async function gradleCurrent(): Promise<GradleVersionInfo> {
|
||||
return await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/current`)
|
||||
}
|
||||
|
||||
async function gradleReleaseCandidate(): Promise<GradleVersionInfo> {
|
||||
const versionInfo = await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/release-candidate`)
|
||||
if (versionInfo && versionInfo.version && versionInfo.downloadUrl) {
|
||||
return versionInfo
|
||||
}
|
||||
core.info('No current release-candidate found, will fallback to current')
|
||||
return gradleCurrent()
|
||||
}
|
||||
|
||||
async function gradleNightly(): Promise<GradleVersionInfo> {
|
||||
return await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/nightly`)
|
||||
}
|
||||
|
||||
async function gradleReleaseNightly(): Promise<GradleVersionInfo> {
|
||||
return await gradleVersionDeclaration(`${gradleVersionsBaseUrl}/release-nightly`)
|
||||
}
|
||||
|
||||
async function gradle(version: string): Promise<GradleVersionInfo> {
|
||||
const versionInfo = await findGradleVersionDeclaration(version)
|
||||
if (!versionInfo) {
|
||||
throw new Error(`Gradle version ${version} does not exists`)
|
||||
}
|
||||
return versionInfo
|
||||
}
|
||||
|
||||
async function gradleVersionDeclaration(url: string): Promise<GradleVersionInfo> {
|
||||
return await httpGetGradleVersion(url)
|
||||
}
|
||||
|
||||
async function findGradleVersionDeclaration(version: string): Promise<GradleVersionInfo | undefined> {
|
||||
const gradleVersions = await httpGetGradleVersions(`${gradleVersionsBaseUrl}/all`)
|
||||
return gradleVersions.find((entry: GradleVersionInfo) => {
|
||||
return entry.version === version
|
||||
})
|
||||
}
|
||||
|
||||
async function installGradleVersion(versionInfo: GradleVersionInfo): Promise<string> {
|
||||
return core.group(`Provision Gradle ${versionInfo.version}`, async () => {
|
||||
return locateGradleAndDownloadIfRequired(versionInfo)
|
||||
})
|
||||
}
|
||||
|
||||
async function locateGradleAndDownloadIfRequired(versionInfo: GradleVersionInfo): Promise<string> {
|
||||
const installsDir = path.join(os.homedir(), 'gradle-installations/installs')
|
||||
const installDir = path.join(installsDir, `gradle-${versionInfo.version}`)
|
||||
if (fs.existsSync(installDir)) {
|
||||
core.info(`Gradle installation already exists at ${installDir}`)
|
||||
return executableFrom(installDir)
|
||||
}
|
||||
|
||||
const downloadPath = await downloadAndCacheGradleDistribution(versionInfo)
|
||||
await toolCache.extractZip(downloadPath, installsDir)
|
||||
core.info(`Extracted Gradle ${versionInfo.version} to ${installDir}`)
|
||||
|
||||
const executable = executableFrom(installDir)
|
||||
fs.chmodSync(executable, '755')
|
||||
core.info(`Provisioned Gradle executable ${executable}`)
|
||||
|
||||
return executable
|
||||
}
|
||||
|
||||
async function downloadAndCacheGradleDistribution(versionInfo: GradleVersionInfo): Promise<string> {
|
||||
const downloadPath = path.join(os.homedir(), `gradle-installations/downloads/gradle-${versionInfo.version}-bin.zip`)
|
||||
|
||||
if (isCacheDisabled()) {
|
||||
await downloadGradleDistribution(versionInfo, downloadPath)
|
||||
return downloadPath
|
||||
}
|
||||
|
||||
const cacheKey = `gradle-${versionInfo.version}`
|
||||
try {
|
||||
const restoreKey = await cache.restoreCache([downloadPath], cacheKey)
|
||||
if (restoreKey) {
|
||||
core.info(`Restored Gradle distribution ${cacheKey} from cache to ${downloadPath}`)
|
||||
return downloadPath
|
||||
}
|
||||
} catch (error) {
|
||||
handleCacheFailure(error, `Restore Gradle distribution ${versionInfo.version} failed`)
|
||||
}
|
||||
|
||||
core.info(`Gradle distribution ${versionInfo.version} not found in cache. Will download.`)
|
||||
await downloadGradleDistribution(versionInfo, downloadPath)
|
||||
|
||||
if (!isCacheReadOnly()) {
|
||||
try {
|
||||
await cache.saveCache([downloadPath], cacheKey)
|
||||
} catch (error) {
|
||||
handleCacheFailure(error, `Save Gradle distribution ${versionInfo.version} failed`)
|
||||
}
|
||||
}
|
||||
return downloadPath
|
||||
}
|
||||
|
||||
async function downloadGradleDistribution(versionInfo: GradleVersionInfo, downloadPath: string): Promise<void> {
|
||||
await toolCache.downloadTool(versionInfo.downloadUrl, downloadPath)
|
||||
core.info(`Downloaded ${versionInfo.downloadUrl} to ${downloadPath} (size ${fs.statSync(downloadPath).size})`)
|
||||
}
|
||||
|
||||
function executableFrom(installDir: string): string {
|
||||
return path.join(installDir, 'bin', `${gradlew.installScriptFilename()}`)
|
||||
}
|
||||
|
||||
async function httpGetGradleVersion(url: string): Promise<GradleVersionInfo> {
|
||||
return JSON.parse(await httpGetString(url))
|
||||
}
|
||||
|
||||
async function httpGetGradleVersions(url: string): Promise<GradleVersionInfo[]> {
|
||||
return JSON.parse(await httpGetString(url))
|
||||
}
|
||||
|
||||
async function httpGetString(url: string): Promise<string> {
|
||||
const httpClient = new httpm.HttpClient('gradle/gradle-build-action')
|
||||
const response = await httpClient.get(url)
|
||||
return response.readBody()
|
||||
}
|
||||
|
||||
interface GradleVersionInfo {
|
||||
version: string
|
||||
downloadUrl: string
|
||||
}
|
||||
16
sources/src/repository-layout.ts
Normal file
16
sources/src/repository-layout.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import * as params from './input-params'
|
||||
import * as path from 'path'
|
||||
|
||||
export function workspaceDirectory(): string {
|
||||
return process.env[`GITHUB_WORKSPACE`] || ''
|
||||
}
|
||||
|
||||
export function buildRootDirectory(): string {
|
||||
const baseDirectory = workspaceDirectory()
|
||||
const buildRootDirectoryInput = params.getBuildRootDirectory()
|
||||
const resolvedBuildRootDirectory =
|
||||
buildRootDirectoryInput === ''
|
||||
? path.resolve(baseDirectory)
|
||||
: path.resolve(baseDirectory, buildRootDirectoryInput)
|
||||
return resolvedBuildRootDirectory
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
import org.gradle.tooling.events.*
|
||||
import org.gradle.tooling.events.task.*
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
// Can't use settingsEvaluated since this script is applied inside a settingsEvaluated handler
|
||||
// But projectsEvaluated is good enough, since the build service won't catch configuration failures anyway
|
||||
projectsEvaluated {
|
||||
def projectTracker = gradle.sharedServices.registerIfAbsent("gradle-build-action-buildResultsRecorder", BuildResultsRecorder, { spec ->
|
||||
spec.getParameters().getRootProjectName().set(gradle.rootProject.name)
|
||||
spec.getParameters().getRootProjectDir().set(gradle.rootProject.rootDir.absolutePath)
|
||||
spec.getParameters().getRequestedTasks().set(gradle.startParameter.taskNames.join(" "))
|
||||
spec.getParameters().getGradleHomeDir().set(gradle.gradleHomeDir.absolutePath)
|
||||
spec.getParameters().getInvocationId().set(gradle.ext.invocationId)
|
||||
})
|
||||
|
||||
gradle.services.get(BuildEventsListenerRegistry).onTaskCompletion(projectTracker)
|
||||
}
|
||||
|
||||
abstract class BuildResultsRecorder implements BuildService<BuildResultsRecorder.Params>, OperationCompletionListener, AutoCloseable {
|
||||
private boolean buildFailed = false
|
||||
interface Params extends BuildServiceParameters {
|
||||
Property<String> getRootProjectName()
|
||||
Property<String> getRootProjectDir()
|
||||
Property<String> getRequestedTasks()
|
||||
Property<String> getGradleHomeDir()
|
||||
Property<String> getInvocationId()
|
||||
}
|
||||
|
||||
public void onFinish(FinishEvent finishEvent) {
|
||||
if (finishEvent instanceof TaskFinishEvent && finishEvent.result instanceof TaskFailureResult) {
|
||||
buildFailed = true
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
def buildResults = [
|
||||
rootProjectName: getParameters().getRootProjectName().get(),
|
||||
rootProjectDir: getParameters().getRootProjectDir().get(),
|
||||
requestedTasks: getParameters().getRequestedTasks().get(),
|
||||
gradleVersion: GradleVersion.current().version,
|
||||
gradleHomeDir: getParameters().getGradleHomeDir().get(),
|
||||
buildFailed: buildFailed,
|
||||
buildScanUri: null,
|
||||
buildScanFailed: false
|
||||
]
|
||||
|
||||
def runnerTempDir = System.getenv("RUNNER_TEMP")
|
||||
def githubActionStep = System.getenv("GITHUB_ACTION")
|
||||
if (!runnerTempDir || !githubActionStep) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
def buildResultsDir = new File(runnerTempDir, ".build-results")
|
||||
buildResultsDir.mkdirs()
|
||||
def buildResultsFile = new File(buildResultsDir, githubActionStep + getParameters().getInvocationId().get() + ".json")
|
||||
if (!buildResultsFile.exists()) {
|
||||
buildResultsFile << groovy.json.JsonOutput.toJson(buildResults)
|
||||
}
|
||||
} catch (Exception e) {
|
||||
println "\ngradle-build-action failed to write build-results file. Will continue.\n> ${e.getLocalizedMessage()}"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
/*
|
||||
* Capture information for each executed Gradle build to display in the job summary.
|
||||
*/
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
// Only run against root build. Do not run against included builds.
|
||||
def isTopLevelBuild = gradle.getParent() == null
|
||||
if (isTopLevelBuild) {
|
||||
def version = GradleVersion.current().baseVersion
|
||||
|
||||
def atLeastGradle3 = version >= GradleVersion.version("3.0")
|
||||
def atLeastGradle6 = version >= GradleVersion.version("6.0")
|
||||
|
||||
def invocationId = "-${System.currentTimeMillis()}"
|
||||
|
||||
if (atLeastGradle6) {
|
||||
def useBuildService = version >= GradleVersion.version("6.6")
|
||||
settingsEvaluated { settings ->
|
||||
// By default, use standard mechanisms to capture build results
|
||||
if (useBuildService) {
|
||||
captureUsingBuildService(settings, invocationId)
|
||||
} else {
|
||||
captureUsingBuildFinished(gradle, invocationId)
|
||||
}
|
||||
|
||||
// The `buildScanPublished` hook allows the capture of the Build Scan URI.
|
||||
// Results captured this way will overwrite any results from the other mechanism.
|
||||
settings.pluginManager.withPlugin("com.gradle.enterprise") {
|
||||
captureUsingBuildScanPublished(settings.extensions["gradleEnterprise"].buildScan, settings.rootProject, invocationId)
|
||||
}
|
||||
}
|
||||
} else if (atLeastGradle3) {
|
||||
projectsEvaluated { gradle ->
|
||||
// By default, use 'buildFinished' to capture build results
|
||||
captureUsingBuildFinished(gradle, invocationId)
|
||||
|
||||
// The `buildScanPublished` hook allows the capture of the Build Scan URI.
|
||||
// Results captured this way will overwrite any results from 'buildFinished'.
|
||||
gradle.rootProject.pluginManager.withPlugin("com.gradle.build-scan") {
|
||||
captureUsingBuildScanPublished(gradle.rootProject.extensions["buildScan"], gradle.rootProject, invocationId)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def captureUsingBuildScanPublished(buildScanExtension, rootProject, invocationId) {
|
||||
buildScanExtension.with {
|
||||
def buildResults = new BuildResults(invocationId, gradle, rootProject)
|
||||
|
||||
buildFinished { result ->
|
||||
buildResults.setBuildResult(result)
|
||||
}
|
||||
|
||||
buildScanPublished { buildScan ->
|
||||
buildResults.setBuildScanUri(buildScan.buildScanUri.toASCIIString())
|
||||
buildResults.writeToResultsFile(true)
|
||||
|
||||
def githubOutput = System.getenv("GITHUB_OUTPUT")
|
||||
if (githubOutput) {
|
||||
new File(githubOutput) << "build-scan-url=${buildScan.buildScanUri}\n"
|
||||
} else {
|
||||
// Retained for compatibility with older GitHub Enterprise versions
|
||||
println("::set-output name=build-scan-url::${buildScan.buildScanUri}")
|
||||
}
|
||||
}
|
||||
|
||||
onError { error ->
|
||||
buildResults.setBuildScanFailed()
|
||||
buildResults.writeToResultsFile(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def captureUsingBuildFinished(gradle, invocationId) {
|
||||
gradle.buildFinished { result ->
|
||||
println "Got buildFinished: ${result}"
|
||||
def buildResults = new BuildResults(invocationId, gradle, gradle.rootProject)
|
||||
buildResults.setBuildResult(result)
|
||||
buildResults.writeToResultsFile(false)
|
||||
}
|
||||
}
|
||||
|
||||
def captureUsingBuildService(settings, invocationId) {
|
||||
gradle.ext.invocationId = invocationId
|
||||
apply from: 'gradle-build-action.build-result-capture-service.plugin.groovy'
|
||||
}
|
||||
|
||||
class BuildResults {
|
||||
def invocationId
|
||||
def buildResults
|
||||
|
||||
BuildResults(String invocationId, def gradle, def rootProject) {
|
||||
this.invocationId = invocationId
|
||||
buildResults = [
|
||||
rootProjectName: rootProject.name,
|
||||
rootProjectDir: rootProject.projectDir.absolutePath,
|
||||
requestedTasks: gradle.startParameter.taskNames.join(" "),
|
||||
gradleVersion: GradleVersion.current().version,
|
||||
gradleHomeDir: gradle.gradleHomeDir.absolutePath,
|
||||
buildFailed: false,
|
||||
buildScanUri: null,
|
||||
buildScanFailed: false
|
||||
]
|
||||
}
|
||||
|
||||
def setBuildResult(def result) {
|
||||
buildResults['buildFailed'] = result.failure != null
|
||||
}
|
||||
|
||||
def setBuildScanUri(def buildScanUrl) {
|
||||
buildResults['buildScanUri'] = buildScanUrl
|
||||
}
|
||||
|
||||
def setBuildScanFailed() {
|
||||
buildResults['buildScanFailed'] = true
|
||||
}
|
||||
|
||||
def writeToResultsFile(boolean overwrite) {
|
||||
def runnerTempDir = System.getenv("RUNNER_TEMP")
|
||||
def githubActionStep = System.getenv("GITHUB_ACTION")
|
||||
if (!runnerTempDir || !githubActionStep) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
def buildResultsDir = new File(runnerTempDir, ".build-results")
|
||||
buildResultsDir.mkdirs()
|
||||
def buildResultsFile = new File(buildResultsDir, githubActionStep + invocationId + ".json")
|
||||
|
||||
// Overwrite any contents written by buildFinished or build service, since this result is a superset.
|
||||
if (buildResultsFile.exists()) {
|
||||
if (overwrite) {
|
||||
buildResultsFile.text = groovy.json.JsonOutput.toJson(buildResults)
|
||||
}
|
||||
} else {
|
||||
buildResultsFile << groovy.json.JsonOutput.toJson(buildResults)
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
println "\ngradle-build-action failed to write build-results file. Will continue.\n> ${e.getLocalizedMessage()}"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
buildscript {
|
||||
def getInputParam = { String name ->
|
||||
def envVarName = name.toUpperCase().replace('.', '_').replace('-', '_')
|
||||
return System.getProperty(name) ?: System.getenv(envVarName)
|
||||
}
|
||||
def pluginRepositoryUrl = getInputParam('gradle.plugin-repository.url') ?: 'https://plugins.gradle.org/m2'
|
||||
|
||||
repositories {
|
||||
maven { url pluginRepositoryUrl }
|
||||
}
|
||||
dependencies {
|
||||
classpath "org.gradle:github-dependency-graph-gradle-plugin:1.1.1"
|
||||
}
|
||||
}
|
||||
apply plugin: org.gradle.github.GitHubDependencyGraphPlugin
|
||||
@@ -0,0 +1,65 @@
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
// Only run when dependency graph is explicitly enabled
|
||||
if (getVariable('GITHUB_DEPENDENCY_GRAPH_ENABLED') != "true") {
|
||||
return
|
||||
}
|
||||
|
||||
// Do not run for unsupported versions of Gradle
|
||||
def gradleVersion = GradleVersion.current().baseVersion
|
||||
if (gradleVersion < GradleVersion.version("5.2") ||
|
||||
(gradleVersion >= GradleVersion.version("7.0") && gradleVersion < GradleVersion.version("7.1"))) {
|
||||
if (getVariable('GITHUB_DEPENDENCY_GRAPH_CONTINUE_ON_FAILURE') != "true") {
|
||||
throw new GradleException("Dependency Graph is not supported for ${gradleVersion}. No dependency snapshot will be generated.")
|
||||
}
|
||||
println "::warning::Dependency Graph is not supported for ${gradleVersion}. No dependency snapshot will be generated."
|
||||
return
|
||||
}
|
||||
|
||||
// Attempt to find a unique job correlator to use based on the environment variable
|
||||
// This is only required for top-level builds
|
||||
def isTopLevelBuild = gradle.getParent() == null
|
||||
if (isTopLevelBuild) {
|
||||
def reportFile = getUniqueReportFile(getVariable('GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR'))
|
||||
|
||||
if (reportFile == null) {
|
||||
println "::warning::No dependency snapshot generated for step. Could not determine unique job correlator - specify GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR var for this step."
|
||||
return
|
||||
}
|
||||
|
||||
println "Generating dependency graph into '${reportFile}'"
|
||||
}
|
||||
|
||||
apply from: 'gradle-build-action.github-dependency-graph-gradle-plugin-apply.groovy'
|
||||
|
||||
/**
|
||||
* Using the supplied jobCorrelator value:
|
||||
* - Checks if report file already exists
|
||||
* - If so, tries to find a unique value that does not yet have a corresponding report file.
|
||||
* - When found, this value is set as a System property override.
|
||||
*/
|
||||
File getUniqueReportFile(String jobCorrelator) {
|
||||
def reportDir = getVariable('DEPENDENCY_GRAPH_REPORT_DIR')
|
||||
def reportFile = new File(reportDir, jobCorrelator + ".json")
|
||||
if (!reportFile.exists()) return reportFile
|
||||
|
||||
// Try at most 100 suffixes
|
||||
for (int i = 1; i < 100; i++) {
|
||||
def candidateCorrelator = jobCorrelator + "-" + i
|
||||
def candidateFile = new File(reportDir, candidateCorrelator + ".json")
|
||||
if (!candidateFile.exists()) {
|
||||
System.properties['GITHUB_DEPENDENCY_GRAPH_JOB_CORRELATOR'] = candidateCorrelator
|
||||
return candidateFile
|
||||
}
|
||||
}
|
||||
|
||||
// Could not determine unique job correlator
|
||||
return null
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the environment variable value, or equivalent system property (if set)
|
||||
*/
|
||||
String getVariable(String name) {
|
||||
return System.properties[name] ?: System.getenv(name)
|
||||
}
|
||||
@@ -0,0 +1,211 @@
|
||||
import org.gradle.util.GradleVersion
|
||||
|
||||
// note that there is no mechanism to share code between the initscript{} block and the main script, so some logic is duplicated
|
||||
|
||||
// conditionally apply the GE / Build Scan plugin to the classpath so it can be applied to the build further down in this script
|
||||
initscript {
|
||||
def isTopLevelBuild = !gradle.parent
|
||||
if (!isTopLevelBuild) {
|
||||
return
|
||||
}
|
||||
|
||||
def getInputParam = { String name ->
|
||||
def envVarName = name.toUpperCase().replace('.', '_').replace('-', '_')
|
||||
return System.getProperty(name) ?: System.getenv(envVarName)
|
||||
}
|
||||
|
||||
// finish early if injection is disabled
|
||||
def gradleInjectionEnabled = getInputParam("develocity.injection-enabled")
|
||||
if (gradleInjectionEnabled != "true") {
|
||||
return
|
||||
}
|
||||
|
||||
def pluginRepositoryUrl = getInputParam('gradle.plugin-repository.url')
|
||||
def gePluginVersion = getInputParam('develocity.plugin.version')
|
||||
def ccudPluginVersion = getInputParam('develocity.ccud-plugin.version')
|
||||
|
||||
def atLeastGradle5 = GradleVersion.current() >= GradleVersion.version('5.0')
|
||||
def atLeastGradle4 = GradleVersion.current() >= GradleVersion.version('4.0')
|
||||
|
||||
if (gePluginVersion || ccudPluginVersion && atLeastGradle4) {
|
||||
pluginRepositoryUrl = pluginRepositoryUrl ?: 'https://plugins.gradle.org/m2'
|
||||
logger.quiet("Develocity plugins resolution: $pluginRepositoryUrl")
|
||||
|
||||
repositories {
|
||||
maven { url pluginRepositoryUrl }
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
if (gePluginVersion) {
|
||||
classpath atLeastGradle5 ?
|
||||
"com.gradle:gradle-enterprise-gradle-plugin:$gePluginVersion" :
|
||||
"com.gradle:build-scan-plugin:1.16"
|
||||
}
|
||||
|
||||
if (ccudPluginVersion && atLeastGradle4) {
|
||||
classpath "com.gradle:common-custom-user-data-gradle-plugin:$ccudPluginVersion"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def BUILD_SCAN_PLUGIN_ID = 'com.gradle.build-scan'
|
||||
def BUILD_SCAN_PLUGIN_CLASS = 'com.gradle.scan.plugin.BuildScanPlugin'
|
||||
|
||||
def DEVELOCITY_PLUGIN_ID = 'com.gradle.enterprise'
|
||||
def DEVELOCITY_PLUGIN_CLASS = 'com.gradle.enterprise.gradleplugin.GradleEnterprisePlugin'
|
||||
def DEVELOCITY_EXTENSION_CLASS = 'com.gradle.enterprise.gradleplugin.GradleEnterpriseExtension'
|
||||
def CI_AUTO_INJECTION_CUSTOM_VALUE_NAME = 'CI auto injection'
|
||||
def CI_AUTO_INJECTION_CUSTOM_VALUE_VALUE = 'gradle-build-action'
|
||||
def CCUD_PLUGIN_ID = 'com.gradle.common-custom-user-data-gradle-plugin'
|
||||
def CCUD_PLUGIN_CLASS = 'com.gradle.CommonCustomUserDataGradlePlugin'
|
||||
|
||||
def isTopLevelBuild = !gradle.parent
|
||||
if (!isTopLevelBuild) {
|
||||
return
|
||||
}
|
||||
|
||||
def getInputParam = { String name ->
|
||||
def envVarName = name.toUpperCase().replace('.', '_').replace('-', '_')
|
||||
return System.getProperty(name) ?: System.getenv(envVarName)
|
||||
}
|
||||
|
||||
// finish early if injection is disabled
|
||||
def gradleInjectionEnabled = getInputParam("develocity.injection-enabled")
|
||||
if (gradleInjectionEnabled != "true") {
|
||||
return
|
||||
}
|
||||
|
||||
def geUrl = getInputParam('develocity.url')
|
||||
def geAllowUntrustedServer = Boolean.parseBoolean(getInputParam('develocity.allow-untrusted-server'))
|
||||
def geEnforceUrl = Boolean.parseBoolean(getInputParam('develocity.enforce-url'))
|
||||
def buildScanUploadInBackground = Boolean.parseBoolean(getInputParam('develocity.build-scan.upload-in-background'))
|
||||
def gePluginVersion = getInputParam('develocity.plugin.version')
|
||||
def ccudPluginVersion = getInputParam('develocity.ccud-plugin.version')
|
||||
def buildScanTermsOfServiceUrl = getInputParam('build-scan.terms-of-service.url')
|
||||
def buildScanTermsOfServiceAgree = getInputParam('build-scan.terms-of-service.agree')
|
||||
|
||||
def atLeastGradle4 = GradleVersion.current() >= GradleVersion.version('4.0')
|
||||
|
||||
// finish early if configuration parameters passed in via system properties are not valid/supported
|
||||
if (ccudPluginVersion && isNotAtLeast(ccudPluginVersion, '1.7')) {
|
||||
logger.warn("Common Custom User Data Gradle plugin must be at least 1.7. Configured version is $ccudPluginVersion.")
|
||||
return
|
||||
}
|
||||
|
||||
// register buildScanPublished listener and optionally apply the GE / Build Scan plugin
|
||||
if (GradleVersion.current() < GradleVersion.version('6.0')) {
|
||||
rootProject {
|
||||
buildscript.configurations.getByName("classpath").incoming.afterResolve { ResolvableDependencies incoming ->
|
||||
def resolutionResult = incoming.resolutionResult
|
||||
|
||||
if (gePluginVersion) {
|
||||
def scanPluginComponent = resolutionResult.allComponents.find {
|
||||
it.moduleVersion.with { group == "com.gradle" && (name == "build-scan-plugin" || name == "gradle-enterprise-gradle-plugin") }
|
||||
}
|
||||
if (!scanPluginComponent) {
|
||||
logger.quiet("Applying $BUILD_SCAN_PLUGIN_CLASS via init script")
|
||||
applyPluginExternally(pluginManager, BUILD_SCAN_PLUGIN_CLASS)
|
||||
if (geUrl) {
|
||||
logger.quiet("Connection to Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
|
||||
buildScan.server = geUrl
|
||||
buildScan.allowUntrustedServer = geAllowUntrustedServer
|
||||
}
|
||||
buildScan.publishAlways()
|
||||
if (buildScan.metaClass.respondsTo(buildScan, 'setUploadInBackground', Boolean)) buildScan.uploadInBackground = buildScanUploadInBackground // uploadInBackground not available for build-scan-plugin 1.16
|
||||
buildScan.value CI_AUTO_INJECTION_CUSTOM_VALUE_NAME, CI_AUTO_INJECTION_CUSTOM_VALUE_VALUE
|
||||
}
|
||||
|
||||
if (geUrl && geEnforceUrl) {
|
||||
pluginManager.withPlugin(BUILD_SCAN_PLUGIN_ID) {
|
||||
afterEvaluate {
|
||||
logger.quiet("Enforcing Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
|
||||
buildScan.server = geUrl
|
||||
buildScan.allowUntrustedServer = geAllowUntrustedServer
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (buildScanTermsOfServiceUrl && buildScanTermsOfServiceAgree) {
|
||||
buildScan.termsOfServiceUrl = buildScanTermsOfServiceUrl
|
||||
buildScan.termsOfServiceAgree = buildScanTermsOfServiceAgree
|
||||
}
|
||||
}
|
||||
|
||||
if (ccudPluginVersion && atLeastGradle4) {
|
||||
def ccudPluginComponent = resolutionResult.allComponents.find {
|
||||
it.moduleVersion.with { group == "com.gradle" && name == "common-custom-user-data-gradle-plugin" }
|
||||
}
|
||||
if (!ccudPluginComponent) {
|
||||
logger.quiet("Applying $CCUD_PLUGIN_CLASS via init script")
|
||||
pluginManager.apply(initscript.classLoader.loadClass(CCUD_PLUGIN_CLASS))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
gradle.settingsEvaluated { settings ->
|
||||
if (gePluginVersion) {
|
||||
if (!settings.pluginManager.hasPlugin(DEVELOCITY_PLUGIN_ID)) {
|
||||
logger.quiet("Applying $DEVELOCITY_PLUGIN_CLASS via init script")
|
||||
applyPluginExternally(settings.pluginManager, DEVELOCITY_PLUGIN_CLASS)
|
||||
eachDevelocityExtension(settings, DEVELOCITY_EXTENSION_CLASS) { ext ->
|
||||
if (geUrl) {
|
||||
logger.quiet("Connection to Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
|
||||
ext.server = geUrl
|
||||
ext.allowUntrustedServer = geAllowUntrustedServer
|
||||
}
|
||||
ext.buildScan.publishAlways()
|
||||
ext.buildScan.uploadInBackground = buildScanUploadInBackground
|
||||
ext.buildScan.value CI_AUTO_INJECTION_CUSTOM_VALUE_NAME, CI_AUTO_INJECTION_CUSTOM_VALUE_VALUE
|
||||
}
|
||||
}
|
||||
|
||||
if (geUrl && geEnforceUrl) {
|
||||
eachDevelocityExtension(settings, DEVELOCITY_EXTENSION_CLASS) { ext ->
|
||||
logger.quiet("Enforcing Develocity: $geUrl, allowUntrustedServer: $geAllowUntrustedServer")
|
||||
ext.server = geUrl
|
||||
ext.allowUntrustedServer = geAllowUntrustedServer
|
||||
}
|
||||
}
|
||||
|
||||
if (buildScanTermsOfServiceUrl && buildScanTermsOfServiceAgree) {
|
||||
eachDevelocityExtension(settings, DEVELOCITY_EXTENSION_CLASS) { ext ->
|
||||
ext.buildScan.termsOfServiceUrl = buildScanTermsOfServiceUrl
|
||||
ext.buildScan.termsOfServiceAgree = buildScanTermsOfServiceAgree
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (ccudPluginVersion) {
|
||||
if (!settings.pluginManager.hasPlugin(CCUD_PLUGIN_ID)) {
|
||||
logger.quiet("Applying $CCUD_PLUGIN_CLASS via init script")
|
||||
settings.pluginManager.apply(initscript.classLoader.loadClass(CCUD_PLUGIN_CLASS))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void applyPluginExternally(def pluginManager, String pluginClassName) {
|
||||
def externallyApplied = 'develocity.externally-applied'
|
||||
def oldValue = System.getProperty(externallyApplied)
|
||||
System.setProperty(externallyApplied, 'true')
|
||||
try {
|
||||
pluginManager.apply(initscript.classLoader.loadClass(pluginClassName))
|
||||
} finally {
|
||||
if (oldValue == null) {
|
||||
System.clearProperty(externallyApplied)
|
||||
} else {
|
||||
System.setProperty(externallyApplied, oldValue)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static def eachDevelocityExtension(def settings, def publicType, def action) {
|
||||
settings.extensions.extensionsSchema.elements.findAll { it.publicType.concreteClass.name == publicType }
|
||||
.collect { settings[it.name] }.each(action)
|
||||
}
|
||||
|
||||
static boolean isNotAtLeast(String versionUnderTest, String referenceVersion) {
|
||||
GradleVersion.version(versionUnderTest) < GradleVersion.version(referenceVersion)
|
||||
}
|
||||
44
sources/src/resources/toolchains.xml
Normal file
44
sources/src/resources/toolchains.xml
Normal file
@@ -0,0 +1,44 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<toolchains>
|
||||
<!-- JDK Toolchains installed by default on GitHub-hosted runners -->
|
||||
<toolchain>
|
||||
<type>jdk</type>
|
||||
<provides>
|
||||
<version>8</version>
|
||||
<vendor>Eclipse Temurin</vendor>
|
||||
</provides>
|
||||
<configuration>
|
||||
<jdkHome>${env.JAVA_HOME_8_X64}</jdkHome>
|
||||
</configuration>
|
||||
</toolchain>
|
||||
<toolchain>
|
||||
<type>jdk</type>
|
||||
<provides>
|
||||
<version>11</version>
|
||||
<vendor>Eclipse Temurin</vendor>
|
||||
</provides>
|
||||
<configuration>
|
||||
<jdkHome>${env.JAVA_HOME_11_X64}</jdkHome>
|
||||
</configuration>
|
||||
</toolchain>
|
||||
<toolchain>
|
||||
<type>jdk</type>
|
||||
<provides>
|
||||
<version>17</version>
|
||||
<vendor>Eclipse Temurin</vendor>
|
||||
</provides>
|
||||
<configuration>
|
||||
<jdkHome>${env.JAVA_HOME_17_X64}</jdkHome>
|
||||
</configuration>
|
||||
</toolchain>
|
||||
<toolchain>
|
||||
<type>jdk</type>
|
||||
<provides>
|
||||
<version>21</version>
|
||||
<vendor>Eclipse Temurin</vendor>
|
||||
</provides>
|
||||
<configuration>
|
||||
<jdkHome>${env.JAVA_HOME_21_X64}</jdkHome>
|
||||
</configuration>
|
||||
</toolchain>
|
||||
</toolchains>
|
||||
97
sources/src/setup-gradle.ts
Normal file
97
sources/src/setup-gradle.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as exec from '@actions/exec'
|
||||
import * as path from 'path'
|
||||
import * as os from 'os'
|
||||
import * as caches from './caches'
|
||||
import * as layout from './repository-layout'
|
||||
import * as params from './input-params'
|
||||
import * as dependencyGraph from './dependency-graph'
|
||||
import * as jobSummary from './job-summary'
|
||||
import * as buildScan from './build-scan'
|
||||
|
||||
import {loadBuildResults} from './build-results'
|
||||
import {CacheListener} from './cache-reporting'
|
||||
import {DaemonController} from './daemon-controller'
|
||||
|
||||
const GRADLE_SETUP_VAR = 'GRADLE_BUILD_ACTION_SETUP_COMPLETED'
|
||||
const USER_HOME = 'USER_HOME'
|
||||
const GRADLE_USER_HOME = 'GRADLE_USER_HOME'
|
||||
const CACHE_LISTENER = 'CACHE_LISTENER'
|
||||
|
||||
export async function setup(): Promise<void> {
|
||||
const userHome = await determineUserHome()
|
||||
const gradleUserHome = await determineGradleUserHome()
|
||||
|
||||
// Bypass setup on all but first action step in workflow.
|
||||
if (process.env[GRADLE_SETUP_VAR]) {
|
||||
core.info('Gradle setup only performed on first gradle-build-action step in workflow.')
|
||||
return
|
||||
}
|
||||
// Record setup complete: visible to all subsequent actions and prevents duplicate setup
|
||||
core.exportVariable(GRADLE_SETUP_VAR, true)
|
||||
// Record setup complete: visible in post-action, to control action completion
|
||||
core.saveState(GRADLE_SETUP_VAR, true)
|
||||
|
||||
// Save the User Home and Gradle User Home for use in the post-action step.
|
||||
core.saveState(USER_HOME, userHome)
|
||||
core.saveState(GRADLE_USER_HOME, gradleUserHome)
|
||||
|
||||
const cacheListener = new CacheListener()
|
||||
await caches.restore(userHome, gradleUserHome, cacheListener)
|
||||
|
||||
core.saveState(CACHE_LISTENER, cacheListener.stringify())
|
||||
|
||||
await dependencyGraph.setup(params.getDependencyGraphOption())
|
||||
|
||||
buildScan.setup()
|
||||
}
|
||||
|
||||
export async function complete(): Promise<void> {
|
||||
if (!core.getState(GRADLE_SETUP_VAR)) {
|
||||
core.info('Gradle setup post-action only performed for first gradle-build-action step in workflow.')
|
||||
return
|
||||
}
|
||||
core.info('In post-action step')
|
||||
|
||||
const buildResults = loadBuildResults()
|
||||
|
||||
const userHome = core.getState(USER_HOME)
|
||||
const gradleUserHome = core.getState(GRADLE_USER_HOME)
|
||||
const cacheListener: CacheListener = CacheListener.rehydrate(core.getState(CACHE_LISTENER))
|
||||
const daemonController = new DaemonController(buildResults)
|
||||
|
||||
await caches.save(userHome, gradleUserHome, cacheListener, daemonController)
|
||||
|
||||
await jobSummary.generateJobSummary(buildResults, cacheListener)
|
||||
|
||||
await dependencyGraph.complete(params.getDependencyGraphOption())
|
||||
|
||||
core.info('Completed post-action step')
|
||||
}
|
||||
|
||||
async function determineGradleUserHome(): Promise<string> {
|
||||
const customGradleUserHome = process.env['GRADLE_USER_HOME']
|
||||
if (customGradleUserHome) {
|
||||
const rootDir = layout.workspaceDirectory()
|
||||
return path.resolve(rootDir, customGradleUserHome)
|
||||
}
|
||||
|
||||
return path.resolve(await determineUserHome(), '.gradle')
|
||||
}
|
||||
|
||||
/**
|
||||
* Different values can be returned by os.homedir() in Javascript and System.getProperty('user.home') in Java.
|
||||
* In order to determine the correct Gradle User Home, we ask Java for the user home instead of using os.homedir().
|
||||
*/
|
||||
async function determineUserHome(): Promise<string> {
|
||||
const output = await exec.getExecOutput('java', ['-XshowSettings:properties', '-version'], {silent: true})
|
||||
const regex = /user\.home = (\S*)/i
|
||||
const found = output.stderr.match(regex)
|
||||
if (found == null || found.length <= 1) {
|
||||
core.info('Could not determine user.home from java -version output. Using os.homedir().')
|
||||
return os.homedir()
|
||||
}
|
||||
const userHome = found[1]
|
||||
core.debug(`Determined user.home from java -version output: '${userHome}'`)
|
||||
return userHome
|
||||
}
|
||||
Reference in New Issue
Block a user