"Fossies" - the Fresh Open Source Software Archive

Member "elasticsearch-6.8.3/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy" (29 Aug 2019, 60128 Bytes) of package /linux/www/elasticsearch-6.8.3-src.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Java source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file. See also the latest Fossies "Diffs" side-by-side code changes report for "BuildPlugin.groovy": 6.8.2_vs_6.8.3.

    1 /*
    2  * Licensed to Elasticsearch under one or more contributor
    3  * license agreements. See the NOTICE file distributed with
    4  * this work for additional information regarding copyright
    5  * ownership. Elasticsearch licenses this file to you under
    6  * the Apache License, Version 2.0 (the "License"); you may
    7  * not use this file except in compliance with the License.
    8  * You may obtain a copy of the License at
    9  *
   10  *    http://www.apache.org/licenses/LICENSE-2.0
   11  *
   12  * Unless required by applicable law or agreed to in writing,
   13  * software distributed under the License is distributed on an
   14  * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
   15  * KIND, either express or implied.  See the License for the
   16  * specific language governing permissions and limitations
   17  * under the License.
   18  */
   19 package org.elasticsearch.gradle
   20 
   21 import com.carrotsearch.gradle.junit4.RandomizedTestingTask
   22 import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
   23 import org.apache.commons.io.IOUtils
   24 import org.apache.tools.ant.taskdefs.condition.Os
   25 import org.elasticsearch.gradle.precommit.PrecommitTasks
   26 import org.gradle.api.GradleException
   27 import org.gradle.api.InvalidUserDataException
   28 import org.gradle.api.JavaVersion
   29 import org.gradle.api.Plugin
   30 import org.gradle.api.Project
   31 import org.gradle.api.Task
   32 import org.gradle.api.XmlProvider
   33 import org.gradle.api.artifacts.Configuration
   34 import org.gradle.api.artifacts.Dependency
   35 import org.gradle.api.artifacts.ModuleDependency
   36 import org.gradle.api.artifacts.ModuleVersionIdentifier
   37 import org.gradle.api.artifacts.ProjectDependency
   38 import org.gradle.api.artifacts.ResolvedArtifact
   39 import org.gradle.api.artifacts.dsl.RepositoryHandler
   40 import org.gradle.api.artifacts.repositories.IvyArtifactRepository
   41 import org.gradle.api.artifacts.repositories.MavenArtifactRepository
   42 import org.gradle.api.execution.TaskExecutionGraph
   43 import org.gradle.api.plugins.JavaBasePlugin
   44 import org.gradle.api.plugins.JavaPlugin
   45 import org.gradle.api.publish.maven.MavenPublication
   46 import org.gradle.api.publish.maven.plugins.MavenPublishPlugin
   47 import org.gradle.api.publish.maven.tasks.GenerateMavenPom
   48 import org.gradle.api.tasks.SourceSet
   49 import org.gradle.api.tasks.bundling.Jar
   50 import org.gradle.api.tasks.compile.GroovyCompile
   51 import org.gradle.api.tasks.compile.JavaCompile
   52 import org.gradle.api.tasks.javadoc.Javadoc
   53 import org.gradle.internal.jvm.Jvm
   54 import org.gradle.process.ExecResult
   55 import org.gradle.process.ExecSpec
   56 import org.gradle.util.GradleVersion
   57 
   58 import java.nio.charset.StandardCharsets
   59 import java.nio.file.Files
   60 import java.nio.file.Path
   61 import java.nio.file.Paths
   62 import java.time.ZoneOffset
   63 import java.time.ZonedDateTime
   64 import java.util.function.Supplier
   65 import java.util.regex.Matcher
   66 
   67 /**
   68  * Encapsulates build configuration for elasticsearch projects.
   69  */
   70 class BuildPlugin implements Plugin<Project> {
   71 
   72     @Override
   73     void apply(Project project) {
   74         if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) {
   75               throw new InvalidUserDataException('elasticsearch.standalone-test, '
   76                 + 'elasticsearch.standalone-rest-test, and elasticsearch.build '
   77                 + 'are mutually exclusive')
   78         }
   79         String minimumGradleVersion = null
   80         InputStream is = getClass().getResourceAsStream("/minimumGradleVersion")
   81         try { minimumGradleVersion = IOUtils.toString(is, StandardCharsets.UTF_8.toString()) } finally { is.close() }
   82         if (GradleVersion.current() < GradleVersion.version(minimumGradleVersion.trim())) {
   83             throw new GradleException(
   84                     "Gradle ${minimumGradleVersion}+ is required to use elasticsearch.build plugin"
   85             )
   86         }
   87         project.pluginManager.apply('java')
   88         project.pluginManager.apply('carrotsearch.randomized-testing')
   89         configureConfigurations(project)
   90         configureJars(project) // jar config must be added before info broker
   91         // these plugins add lots of info to our jars
   92         project.pluginManager.apply('nebula.info-broker')
   93         project.pluginManager.apply('nebula.info-basic')
   94         project.pluginManager.apply('nebula.info-java')
   95         project.pluginManager.apply('nebula.info-scm')
   96         project.pluginManager.apply('nebula.info-jar')
   97 
   98         project.getTasks().create("buildResources", ExportElasticsearchBuildResourcesTask)
   99 
  100         globalBuildInfo(project)
  101         configureRepositories(project)
  102         project.ext.versions = VersionProperties.versions
  103         configureSourceSets(project)
  104         configureCompile(project)
  105         configureJavadoc(project)
  106         configureSourcesJar(project)
  107         configurePomGeneration(project)
  108 
  109         applyCommonTestConfig(project)
  110         configureTest(project)
  111         configurePrecommit(project)
  112         configureDependenciesInfo(project)
  113     }
  114 
  115 
  116 
  117     /** Performs checks on the build environment and prints information about the build environment. */
  118     static void globalBuildInfo(Project project) {
  119         if (project.rootProject.ext.has('buildChecksDone') == false) {
  120             JavaVersion minimumRuntimeVersion = JavaVersion.toVersion(
  121                     BuildPlugin.class.getClassLoader().getResourceAsStream("minimumRuntimeVersion").text.trim()
  122             )
  123             JavaVersion minimumCompilerVersion = JavaVersion.toVersion(
  124                     BuildPlugin.class.getClassLoader().getResourceAsStream("minimumCompilerVersion").text.trim()
  125             )
  126             String compilerJavaHome = findCompilerJavaHome()
  127             String runtimeJavaHome = findRuntimeJavaHome(compilerJavaHome)
  128             File gradleJavaHome = Jvm.current().javaHome
  129 
  130             final Map<Integer, String> javaVersions = [:]
  131             for (int version = 8; version <= Integer.parseInt(minimumCompilerVersion.majorVersion); version++) {
  132                 if(System.getenv(getJavaHomeEnvVarName(version.toString())) != null) {
  133                     javaVersions.put(version, findJavaHome(version.toString()));
  134                 }
  135             }
  136 
  137             String javaVendor = System.getProperty('java.vendor')
  138             String gradleJavaVersion = System.getProperty('java.version')
  139             String gradleJavaVersionDetails = "${javaVendor} ${gradleJavaVersion}" +
  140                 " [${System.getProperty('java.vm.name')} ${System.getProperty('java.vm.version')}]"
  141 
  142             String compilerJavaVersionDetails = gradleJavaVersionDetails
  143             JavaVersion compilerJavaVersionEnum = JavaVersion.current()
  144             if (new File(compilerJavaHome).canonicalPath != gradleJavaHome.canonicalPath) {
  145                 compilerJavaVersionDetails = findJavaVersionDetails(project, compilerJavaHome)
  146                 compilerJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, compilerJavaHome))
  147             }
  148 
  149             String runtimeJavaVersionDetails = gradleJavaVersionDetails
  150             JavaVersion runtimeJavaVersionEnum = JavaVersion.current()
  151             if (new File(runtimeJavaHome).canonicalPath != gradleJavaHome.canonicalPath) {
  152                 runtimeJavaVersionDetails = findJavaVersionDetails(project, runtimeJavaHome)
  153                 runtimeJavaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, runtimeJavaHome))
  154             }
  155 
  156             String inFipsJvmScript = 'print(java.security.Security.getProviders()[0].name.toLowerCase().contains("fips"));'
  157             boolean inFipsJvm = Boolean.parseBoolean(runJavaAsScript(project, runtimeJavaHome, inFipsJvmScript))
  158 
  159             // Build debugging info
  160             println '======================================='
  161             println 'Elasticsearch Build Hamster says Hello!'
  162             println "  Gradle Version        : ${project.gradle.gradleVersion}"
  163             println "  OS Info               : ${System.getProperty('os.name')} ${System.getProperty('os.version')} (${System.getProperty('os.arch')})"
  164             if (gradleJavaVersionDetails != compilerJavaVersionDetails || gradleJavaVersionDetails != runtimeJavaVersionDetails) {
  165                 println "  Compiler JDK Version  : ${compilerJavaVersionEnum} (${compilerJavaVersionDetails})"
  166                 println "  Compiler java.home    : ${compilerJavaHome}"
  167                 println "  Runtime JDK Version   : ${runtimeJavaVersionEnum} (${runtimeJavaVersionDetails})"
  168                 println "  Runtime java.home     : ${runtimeJavaHome}"
  169                 println "  Gradle JDK Version    : ${JavaVersion.toVersion(gradleJavaVersion)} (${gradleJavaVersionDetails})"
  170                 println "  Gradle java.home      : ${gradleJavaHome}"
  171             } else {
  172                 println "  JDK Version           : ${JavaVersion.toVersion(gradleJavaVersion)} (${gradleJavaVersionDetails})"
  173                 println "  JAVA_HOME             : ${gradleJavaHome}"
  174             }
  175             println "  Random Testing Seed   : ${project.testSeed}"
  176             println '======================================='
  177 
  178             // enforce Java version
  179             if (compilerJavaVersionEnum < minimumCompilerVersion) {
  180                 final String message =
  181                         "the compiler java.home must be set to a JDK installation directory for Java ${minimumCompilerVersion}" +
  182                                 " but is [${compilerJavaHome}] corresponding to [${compilerJavaVersionEnum}]"
  183                 throw new GradleException(message)
  184             }
  185 
  186             if (runtimeJavaVersionEnum < minimumRuntimeVersion) {
  187                 final String message =
  188                         "the runtime java.home must be set to a JDK installation directory for Java ${minimumRuntimeVersion}" +
  189                                 " but is [${runtimeJavaHome}] corresponding to [${runtimeJavaVersionEnum}]"
  190                 throw new GradleException(message)
  191             }
  192 
  193             for (final Map.Entry<Integer, String> javaVersionEntry : javaVersions.entrySet()) {
  194                 final String javaHome = javaVersionEntry.getValue()
  195                 if (javaHome == null) {
  196                     continue
  197                 }
  198                 JavaVersion javaVersionEnum = JavaVersion.toVersion(findJavaSpecificationVersion(project, javaHome))
  199                 final JavaVersion expectedJavaVersionEnum
  200                 final int version = javaVersionEntry.getKey()
  201                 if (version < 9) {
  202                     expectedJavaVersionEnum = JavaVersion.toVersion("1." + version)
  203                 } else {
  204                     expectedJavaVersionEnum = JavaVersion.toVersion(Integer.toString(version))
  205                 }
  206                 if (javaVersionEnum != expectedJavaVersionEnum) {
  207                     final String message =
  208                             "the environment variable JAVA" + version + "_HOME must be set to a JDK installation directory for Java" +
  209                                     " ${expectedJavaVersionEnum} but is [${javaHome}] corresponding to [${javaVersionEnum}]"
  210                     throw new GradleException(message)
  211                 }
  212             }
  213 
  214             project.rootProject.ext.compilerJavaHome = compilerJavaHome
  215             project.rootProject.ext.runtimeJavaHome = runtimeJavaHome
  216             project.rootProject.ext.compilerJavaVersion = compilerJavaVersionEnum
  217             project.rootProject.ext.runtimeJavaVersion = runtimeJavaVersionEnum
  218             project.rootProject.ext.javaVersions = javaVersions
  219             project.rootProject.ext.buildChecksDone = true
  220             project.rootProject.ext.minimumCompilerVersion = minimumCompilerVersion
  221             project.rootProject.ext.minimumRuntimeVersion = minimumRuntimeVersion
  222             project.rootProject.ext.inFipsJvm = inFipsJvm
  223             project.rootProject.ext.gradleJavaVersion = JavaVersion.toVersion(gradleJavaVersion)
  224             project.rootProject.ext.java9Home = "${-> findJavaHome("9")}"
  225             project.rootProject.ext.defaultParallel = findDefaultParallel(project.rootProject)
  226             project.rootProject.ext.gitRevision = gitRevision(project)
  227             project.rootProject.ext.buildDate = ZonedDateTime.now(ZoneOffset.UTC);
  228         }
  229 
  230         project.targetCompatibility = project.rootProject.ext.minimumRuntimeVersion
  231         project.sourceCompatibility = project.rootProject.ext.minimumRuntimeVersion
  232 
  233         // set java home for each project, so they dont have to find it in the root project
  234         project.ext.compilerJavaHome = project.rootProject.ext.compilerJavaHome
  235         project.ext.runtimeJavaHome = project.rootProject.ext.runtimeJavaHome
  236         project.ext.compilerJavaVersion = project.rootProject.ext.compilerJavaVersion
  237         project.ext.runtimeJavaVersion = project.rootProject.ext.runtimeJavaVersion
  238         project.ext.javaVersions = project.rootProject.ext.javaVersions
  239         project.ext.inFipsJvm = project.rootProject.ext.inFipsJvm
  240         project.ext.gitRevision = project.rootProject.ext.gitRevision
  241         project.ext.buildDate = project.rootProject.ext.buildDate
  242         project.ext.gradleJavaVersion = project.rootProject.ext.gradleJavaVersion
  243         project.ext.java9Home = project.rootProject.ext.java9Home
  244     }
  245 
  246     static void requireDocker(final Task task) {
  247         final Project rootProject = task.project.rootProject
  248         if (rootProject.hasProperty('requiresDocker') == false) {
  249             /*
  250              * This is our first time encountering a task that requires Docker. We will add an extension that will let us track the tasks
  251              * that register as requiring Docker. We will add a delayed execution that when the task graph is ready if any such tasks are
  252              * in the task graph, then we check two things:
  253              *  - the Docker binary is available
  254              *  - we can execute a Docker command that requires privileges
  255              *
  256              *  If either of these fail, we fail the build.
  257              */
  258 
  259             // check if the Docker binary exists and record its path
  260             final List<String> maybeDockerBinaries = ['/usr/bin/docker', '/usr/local/bin/docker']
  261             final String dockerBinary = maybeDockerBinaries.find { it -> new File(it).exists() }
  262 
  263             final boolean buildDocker
  264             final String buildDockerProperty = System.getProperty("build.docker")
  265             if (buildDockerProperty == null) {
  266                 buildDocker = dockerBinary != null
  267             } else if (buildDockerProperty == "true") {
  268                 buildDocker = true
  269             } else if (buildDockerProperty == "false") {
  270                 buildDocker = false
  271             } else {
  272                 throw new IllegalArgumentException(
  273                         "expected build.docker to be unset or one of \"true\" or \"false\" but was [" + buildDockerProperty + "]")
  274             }
  275             rootProject.rootProject.ext.buildDocker = buildDocker
  276             rootProject.rootProject.ext.requiresDocker = []
  277             rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph ->
  278                 final List<String> tasks =
  279                         ((List<Task>)rootProject.requiresDocker).findAll { taskGraph.hasTask(it) }.collect { "  ${it.path}".toString()}
  280                 if (tasks.isEmpty() == false) {
  281                     /*
  282                      * There are tasks in the task graph that require Docker. Now we are failing because either the Docker binary does not
  283                      * exist or because execution of a privileged Docker command failed.
  284                      */
  285                     if (dockerBinary == null) {
  286                         final String message = String.format(
  287                                 Locale.ROOT,
  288                                 "Docker (checked [%s]) is required to run the following task%s: \n%s",
  289                                 maybeDockerBinaries.join(","),
  290                                 tasks.size() > 1 ? "s" : "",
  291                                 tasks.join('\n'))
  292                         throwDockerRequiredException(message)
  293                     }
  294 
  295                     // we use a multi-stage Docker build, check the Docker version since 17.05
  296                     final ByteArrayOutputStream dockerVersionOutput = new ByteArrayOutputStream()
  297                     LoggedExec.exec(
  298                             rootProject,
  299                             { ExecSpec it ->
  300                                 it.commandLine = [dockerBinary, '--version']
  301                                 it.standardOutput = dockerVersionOutput
  302                             })
  303                     final String dockerVersion = dockerVersionOutput.toString().trim()
  304                     checkDockerVersionRecent(dockerVersion)
  305 
  306                     final ByteArrayOutputStream dockerImagesErrorOutput = new ByteArrayOutputStream()
  307                     // the Docker binary executes, check that we can execute a privileged command
  308                     final ExecResult dockerImagesResult = LoggedExec.exec(
  309                             rootProject,
  310                             { ExecSpec it ->
  311                                 it.commandLine = [dockerBinary, "images"]
  312                                 it.errorOutput = dockerImagesErrorOutput
  313                                 it.ignoreExitValue = true
  314                             })
  315 
  316                     if (dockerImagesResult.exitValue != 0) {
  317                         final String message = String.format(
  318                                 Locale.ROOT,
  319                                 "a problem occurred running Docker from [%s] yet it is required to run the following task%s: \n%s\n" +
  320                                         "the problem is that Docker exited with exit code [%d] with standard error output [%s]",
  321                                 dockerBinary,
  322                                 tasks.size() > 1 ? "s" : "",
  323                                 tasks.join('\n'),
  324                                 dockerImagesResult.exitValue,
  325                                 dockerImagesErrorOutput.toString().trim())
  326                         throwDockerRequiredException(message)
  327                     }
  328 
  329                 }
  330             }
  331         }
  332         if (rootProject.buildDocker) {
  333             rootProject.requiresDocker.add(task)
  334         } else {
  335             task.enabled = false
  336         }
  337     }
  338 
  339     protected static void checkDockerVersionRecent(String dockerVersion) {
  340         final Matcher matcher = dockerVersion =~ /Docker version (\d+\.\d+)\.\d+(?:-ce)?, build [0-9a-f]{7,40}/
  341         assert matcher.matches(): dockerVersion
  342         final dockerMajorMinorVersion = matcher.group(1)
  343         final String[] majorMinor = dockerMajorMinorVersion.split("\\.")
  344         if (Integer.parseInt(majorMinor[0]) < 17
  345                 || (Integer.parseInt(majorMinor[0]) == 17 && Integer.parseInt(majorMinor[1]) < 5)) {
  346             final String message = String.format(
  347                     Locale.ROOT,
  348                     "building Docker images requires Docker version 17.05+ due to use of multi-stage builds yet was [%s]",
  349                     dockerVersion)
  350             throwDockerRequiredException(message)
  351         }
  352     }
  353 
  354     private static void throwDockerRequiredException(final String message) {
  355         throw new GradleException(
  356                 message + "\nyou can address this by attending to the reported issue, "
  357                         + "removing the offending tasks from being executed, "
  358                         + "or by passing -Dbuild.docker=false")
  359     }
  360 
  361     private static String findCompilerJavaHome() {
  362         String compilerJavaHome = System.getenv('JAVA_HOME')
  363         final String compilerJavaProperty = System.getProperty('compiler.java')
  364         if (compilerJavaProperty != null) {
  365             compilerJavaHome = findJavaHome(compilerJavaProperty)
  366         }
  367         if (compilerJavaHome == null) {
  368             if (System.getProperty("idea.executable") != null || System.getProperty("eclipse.launcher") != null) {
  369                 // IntelliJ does not set JAVA_HOME, so we use the JDK that Gradle was run with
  370                 return Jvm.current().javaHome
  371             } else {
  372                 throw new GradleException(
  373                         " " + System.getProperties().toString() + " " +
  374                         "JAVA_HOME must be set to build Elasticsearch. " +
  375                                 "Note that if the variable was just set you might have to run `./gradlew --stop` for " +
  376                                 "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details."
  377                 )
  378             }
  379         }
  380         return compilerJavaHome
  381     }
  382 
  383     private static String findJavaHome(String version) {
  384         String versionedVarName = getJavaHomeEnvVarName(version)
  385         String versionedJavaHome = System.getenv(versionedVarName);
  386         if (versionedJavaHome == null) {
  387             throw new GradleException(
  388                     "$versionedVarName must be set to build Elasticsearch. " +
  389                             "Note that if the variable was just set you might have to run `./gradlew --stop` for " +
  390                             "it to be picked up. See https://github.com/elastic/elasticsearch/issues/31399 details."
  391             )
  392         }
  393         return versionedJavaHome
  394     }
  395 
  396     private static String getJavaHomeEnvVarName(String version) {
  397         return 'JAVA' + version + '_HOME'
  398     }
  399 
  400     /** Add a check before gradle execution phase which ensures java home for the given java version is set. */
  401     static void requireJavaHome(Task task, int version) {
  402         Project rootProject = task.project.rootProject // use root project for global accounting
  403         if (rootProject.hasProperty('requiredJavaVersions') == false) {
  404             rootProject.rootProject.ext.requiredJavaVersions = [:]
  405             rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph ->
  406                 List<String> messages = []
  407                 for (entry in rootProject.requiredJavaVersions) {
  408                     if (rootProject.javaVersions.get(entry.key) != null) {
  409                         continue
  410                     }
  411                     List<String> tasks = entry.value.findAll { taskGraph.hasTask(it) }.collect { "  ${it.path}" }
  412                     if (tasks.isEmpty() == false) {
  413                         messages.add("JAVA${entry.key}_HOME required to run tasks:\n${tasks.join('\n')}")
  414                     }
  415                 }
  416                 if (messages.isEmpty() == false) {
  417                     throw new GradleException(messages.join('\n'))
  418                 }
  419                 rootProject.rootProject.ext.requiredJavaVersions = null // reset to null to indicate the pre-execution checks have executed
  420             }
  421         } else if (rootProject.rootProject.requiredJavaVersions == null) {
  422             // check directly if the version is present since we are already executing
  423             if (rootProject.javaVersions.get(version) == null) {
  424                 throw new GradleException("JAVA${version}_HOME required to run task:\n${task}")
  425             }
  426         } else {
  427             rootProject.requiredJavaVersions.getOrDefault(version, []).add(task)
  428         }
  429     }
  430 
  431     /** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */
  432     static String getJavaHome(final Task task, final int version) {
  433         requireJavaHome(task, version)
  434         return task.project.javaVersions.get(version)
  435     }
  436 
  437     private static String findRuntimeJavaHome(final String compilerJavaHome) {
  438         String runtimeJavaProperty = System.getProperty("runtime.java")
  439         if (runtimeJavaProperty != null) {
  440             return findJavaHome(runtimeJavaProperty)
  441         }
  442         return System.getenv('RUNTIME_JAVA_HOME') ?: compilerJavaHome
  443     }
  444 
  445     /** Finds printable java version of the given JAVA_HOME */
  446     private static String findJavaVersionDetails(Project project, String javaHome) {
  447         String versionInfoScript = 'print(' +
  448             'java.lang.System.getProperty("java.vendor") + " " + java.lang.System.getProperty("java.version") + ' +
  449             '" [" + java.lang.System.getProperty("java.vm.name") + " " + java.lang.System.getProperty("java.vm.version") + "]");'
  450         return runJavaAsScript(project, javaHome, versionInfoScript).trim()
  451     }
  452 
  453     /** Finds the parsable java specification version */
  454     private static String findJavaSpecificationVersion(Project project, String javaHome) {
  455         String versionScript = 'print(java.lang.System.getProperty("java.specification.version"));'
  456         return runJavaAsScript(project, javaHome, versionScript)
  457     }
  458 
  459     private static String findJavaVendor(Project project, String javaHome) {
  460         String vendorScript = 'print(java.lang.System.getProperty("java.vendor"));'
  461         return runJavaAsScript(project, javaHome, vendorScript)
  462     }
  463 
  464     /** Finds the parsable java specification version */
  465     private static String findJavaVersion(Project project, String javaHome) {
  466         String versionScript = 'print(java.lang.System.getProperty("java.version"));'
  467         return runJavaAsScript(project, javaHome, versionScript)
  468     }
  469 
  470     /** Runs the given javascript using jjs from the jdk, and returns the output */
  471     private static String runJavaAsScript(Project project, String javaHome, String script) {
  472         ByteArrayOutputStream stdout = new ByteArrayOutputStream()
  473         ByteArrayOutputStream stderr = new ByteArrayOutputStream()
  474         if (Os.isFamily(Os.FAMILY_WINDOWS)) {
  475             // gradle/groovy does not properly escape the double quote for windows
  476             script = script.replace('"', '\\"')
  477         }
  478         File jrunscriptPath = new File(javaHome, 'bin/jrunscript')
  479         ExecResult result = project.exec {
  480             executable = jrunscriptPath
  481             args '-e', script
  482             standardOutput = stdout
  483             errorOutput = stderr
  484             ignoreExitValue = true
  485         }
  486         if (result.exitValue != 0) {
  487             project.logger.error("STDOUT:")
  488             stdout.toString('UTF-8').eachLine { line -> project.logger.error(line) }
  489             project.logger.error("STDERR:")
  490             stderr.toString('UTF-8').eachLine { line -> project.logger.error(line) }
  491             result.rethrowFailure()
  492         }
  493         return stdout.toString('UTF-8').trim()
  494     }
  495 
  496     /** Return the configuration name used for finding transitive deps of the given dependency. */
  497     private static String transitiveDepConfigName(String groupId, String artifactId, String version) {
  498         return "_transitive_${groupId}_${artifactId}_${version}"
  499     }
  500 
  501     /**
  502      * Makes dependencies non-transitive.
  503      *
  504      * Gradle allows setting all dependencies as non-transitive very easily.
  505      * Sadly this mechanism does not translate into maven pom generation. In order
  506      * to effectively make the pom act as if it has no transitive dependencies,
  507      * we must exclude each transitive dependency of each direct dependency.
  508      *
  509      * Determining the transitive deps of a dependency which has been resolved as
  510      * non-transitive is difficult because the process of resolving removes the
  511      * transitive deps. To sidestep this issue, we create a configuration per
  512      * direct dependency version. This specially named and unique configuration
  513      * will contain all of the transitive dependencies of this particular
  514      * dependency. We can then use this configuration during pom generation
  515      * to iterate the transitive dependencies and add excludes.
  516      */
  517     static void configureConfigurations(Project project) {
  518         // we want to test compileOnly deps!
  519         project.configurations.testCompile.extendsFrom(project.configurations.compileOnly)
  520 
  521         // we are not shipping these jars, we act like dumb consumers of these things
  522         if (project.path.startsWith(':test:fixtures') || project.path == ':build-tools') {
  523             return
  524         }
  525         // fail on any conflicting dependency versions
  526         project.configurations.all({ Configuration configuration ->
  527             if (configuration.name.startsWith('_transitive_')) {
  528                 // don't force transitive configurations to not conflict with themselves, since
  529                 // we just have them to find *what* transitive deps exist
  530                 return
  531             }
  532             if (configuration.name.endsWith('Fixture')) {
  533                 // just a self contained test-fixture configuration, likely transitive and hellacious
  534                 return
  535             }
  536             configuration.resolutionStrategy {
  537                 failOnVersionConflict()
  538             }
  539         })
  540 
  541         // force all dependencies added directly to compile/testCompile to be non-transitive, except for ES itself
  542         Closure disableTransitiveDeps = { Dependency dep ->
  543             if (dep instanceof ModuleDependency && !(dep instanceof ProjectDependency)
  544                     && dep.group.startsWith('org.elasticsearch') == false) {
  545                 dep.transitive = false
  546 
  547                 // also create a configuration just for this dependency version, so that later
  548                 // we can determine which transitive dependencies it has
  549                 String depConfig = transitiveDepConfigName(dep.group, dep.name, dep.version)
  550                 if (project.configurations.findByName(depConfig) == null) {
  551                     project.configurations.create(depConfig)
  552                     project.dependencies.add(depConfig, "${dep.group}:${dep.name}:${dep.version}")
  553                 }
  554             }
  555         }
  556 
  557         project.configurations.compile.dependencies.all(disableTransitiveDeps)
  558         project.configurations.testCompile.dependencies.all(disableTransitiveDeps)
  559         project.configurations.compileOnly.dependencies.all(disableTransitiveDeps)
  560 
  561         project.plugins.withType(ShadowPlugin).whenPluginAdded {
  562             Configuration bundle = project.configurations.create('bundle')
  563             bundle.dependencies.all(disableTransitiveDeps)
  564         }
  565     }
  566 
  567     /** Adds repositories used by ES dependencies */
  568     static void configureRepositories(Project project) {
  569         project.getRepositories().all { repository ->
  570             if (repository instanceof MavenArtifactRepository) {
  571                 final MavenArtifactRepository maven = (MavenArtifactRepository) repository
  572                 assertRepositoryURIIsSecure(maven.name, project.path, maven.getUrl())
  573                 repository.getArtifactUrls().each { uri -> assertRepositoryURIIsSecure(maven.name, project.path, uri) }
  574             } else if (repository instanceof IvyArtifactRepository) {
  575                 final IvyArtifactRepository ivy = (IvyArtifactRepository) repository
  576                 assertRepositoryURIIsSecure(ivy.name, project.path, ivy.getUrl())
  577             }
  578         }
  579         RepositoryHandler repos = project.repositories
  580         if (System.getProperty("repos.mavenLocal") != null) {
  581             // with -Drepos.mavenLocal=true we can force checking the local .m2 repo which is
  582             // useful for development ie. bwc tests where we install stuff in the local repository
  583             // such that we don't have to pass hardcoded files to gradle
  584             repos.mavenLocal()
  585         }
  586         repos.maven {
  587             name "elastic"
  588             url "https://artifacts.elastic.co/maven"
  589         }
  590         repos.jcenter()
  591         String luceneVersion = VersionProperties.lucene
  592         if (luceneVersion.contains('-snapshot')) {
  593             // extract the revision number from the version with a regex matcher
  594             String revision = (luceneVersion =~ /\w+-snapshot-([a-z0-9]+)/)[0][1]
  595             repos.maven {
  596                 name 'lucene-snapshots'
  597                 url "https://s3.amazonaws.com/download.elasticsearch.org/lucenesnapshots/${revision}"
  598             }
  599         }
  600     }
  601 
  602     static void assertRepositoryURIIsSecure(final String repositoryName, final String projectPath, final URI uri) {
  603         if (uri != null && ["file", "https", "s3"].contains(uri.getScheme()) == false) {
  604             final String message = String.format(
  605                     Locale.ROOT,
  606                     "repository [%s] on project with path [%s] is not using a secure protocol for artifacts on [%s]",
  607                     repositoryName,
  608                     projectPath,
  609                     uri.toURL())
  610             throw new GradleException(message)
  611         }
  612     }
  613 
  614     /**
  615      * Returns a closure which can be used with a MavenPom for fixing problems with gradle generated poms.
  616      *
  617      * <ul>
  618      *     <li>Remove transitive dependencies. We currently exclude all artifacts explicitly instead of using wildcards
  619      *         as Ivy incorrectly translates POMs with * excludes to Ivy XML with * excludes which results in the main artifact
  620      *         being excluded as well (see https://issues.apache.org/jira/browse/IVY-1531). Note that Gradle 2.14+ automatically
  621      *         translates non-transitive dependencies to * excludes. We should revisit this when upgrading Gradle.</li>
  622      *     <li>Set compile time deps back to compile from runtime (known issue with maven-publish plugin)</li>
  623      * </ul>
  624      */
  625     private static Closure fixupDependencies(Project project) {
  626         return { XmlProvider xml ->
  627             // first find if we have dependencies at all, and grab the node
  628             NodeList depsNodes = xml.asNode().get('dependencies')
  629             if (depsNodes.isEmpty()) {
  630                 return
  631             }
  632 
  633             // check each dependency for any transitive deps
  634             for (Node depNode : depsNodes.get(0).children()) {
  635                 String groupId = depNode.get('groupId').get(0).text()
  636                 String artifactId = depNode.get('artifactId').get(0).text()
  637                 String version = depNode.get('version').get(0).text()
  638 
  639                 // fix deps incorrectly marked as runtime back to compile time deps
  640                 // see https://discuss.gradle.org/t/maven-publish-plugin-generated-pom-making-dependency-scope-runtime/7494/4
  641                 boolean isCompileDep = project.configurations.compile.allDependencies.find { dep ->
  642                     dep.name == depNode.artifactId.text()
  643                 }
  644                 if (depNode.scope.text() == 'runtime' && isCompileDep) {
  645                     depNode.scope*.value = 'compile'
  646                 }
  647 
  648                 // remove any exclusions added by gradle, they contain wildcards and systems like ivy have bugs with wildcards
  649                 // see https://github.com/elastic/elasticsearch/issues/24490
  650                 NodeList exclusionsNode = depNode.get('exclusions')
  651                 if (exclusionsNode.size() > 0) {
  652                     depNode.remove(exclusionsNode.get(0))
  653                 }
  654 
  655                 // collect the transitive deps now that we know what this dependency is
  656                 String depConfig = transitiveDepConfigName(groupId, artifactId, version)
  657                 Configuration configuration = project.configurations.findByName(depConfig)
  658                 if (configuration == null) {
  659                     continue // we did not make this dep non-transitive
  660                 }
  661                 Set<ResolvedArtifact> artifacts = configuration.resolvedConfiguration.resolvedArtifacts
  662                 if (artifacts.size() <= 1) {
  663                     // this dep has no transitive deps (or the only artifact is itself)
  664                     continue
  665                 }
  666 
  667                 // we now know we have something to exclude, so add exclusions for all artifacts except the main one
  668                 Node exclusions = depNode.appendNode('exclusions')
  669                 for (ResolvedArtifact artifact : artifacts) {
  670                     ModuleVersionIdentifier moduleVersionIdentifier = artifact.moduleVersion.id;
  671                     String depGroupId = moduleVersionIdentifier.group
  672                     String depArtifactId = moduleVersionIdentifier.name
  673                     // add exclusions for all artifacts except the main one
  674                     if (depGroupId != groupId || depArtifactId != artifactId) {
  675                         Node exclusion = exclusions.appendNode('exclusion')
  676                         exclusion.appendNode('groupId', depGroupId)
  677                         exclusion.appendNode('artifactId', depArtifactId)
  678                     }
  679                 }
  680             }
  681         }
  682     }
  683 
  684     /**Configuration generation of maven poms. */
  685     public static void configurePomGeneration(Project project) {
  686         // Only works with  `enableFeaturePreview('STABLE_PUBLISHING')`
  687         // https://github.com/gradle/gradle/issues/5696#issuecomment-396965185
  688         project.tasks.withType(GenerateMavenPom.class) { GenerateMavenPom generatePOMTask ->
  689             // The GenerateMavenPom task is aggressive about setting the destination, instead of fighting it,
  690             // just make a copy.
  691             generatePOMTask.ext.pomFileName = null
  692             doLast {
  693                 project.copy {
  694                     from generatePOMTask.destination
  695                     into "${project.buildDir}/distributions"
  696                     rename {
  697                         generatePOMTask.ext.pomFileName == null ? 
  698                             "${project.archivesBaseName}-${project.version}.pom" : 
  699                             generatePOMTask.ext.pomFileName 
  700                     }
  701                 }
  702             }
  703             // build poms with assemble (if the assemble task exists)
  704             Task assemble = project.tasks.findByName('assemble')
  705             if (assemble && assemble.enabled) {
  706                 assemble.dependsOn(generatePOMTask)
  707             }
  708         }
  709         project.plugins.withType(MavenPublishPlugin.class).whenPluginAdded {
  710             project.publishing {
  711                 publications {
  712                     all { MavenPublication publication -> // we only deal with maven
  713                         // add exclusions to the pom directly, for each of the transitive deps of this project's deps
  714                         publication.pom.withXml(fixupDependencies(project))
  715                     }
  716                 }
  717             }
  718             project.plugins.withType(ShadowPlugin).whenPluginAdded {
  719                 project.publishing {
  720                     publications {
  721                         nebula(MavenPublication) {
  722                             artifacts = [ project.tasks.shadowJar ]
  723                         }
  724                     }
  725                 }
  726             }
  727         }
  728     }
  729 
  730     /**
  731      * Add dependencies that we are going to bundle to the compile classpath.
  732      */
  733     static void configureSourceSets(Project project) {
  734         project.plugins.withType(ShadowPlugin).whenPluginAdded {
  735             ['main', 'test'].each {name ->
  736                 SourceSet sourceSet = project.sourceSets.findByName(name)
  737                 if (sourceSet != null) {
  738                     sourceSet.compileClasspath += project.configurations.bundle
  739                 }
  740             }
  741         }
  742     }
  743 
  744     /** Adds compiler settings to the project */
  745     static void configureCompile(Project project) {
  746         if (project.compilerJavaVersion < JavaVersion.VERSION_1_10) {
  747             project.ext.compactProfile = 'compact3'
  748         } else {
  749             project.ext.compactProfile = 'full'
  750         }
  751         project.afterEvaluate {
  752             project.tasks.withType(JavaCompile) {
  753                 final JavaVersion targetCompatibilityVersion = JavaVersion.toVersion(it.targetCompatibility)
  754                 final compilerJavaHomeFile = new File(project.compilerJavaHome)
  755                 // we only fork if the Gradle JDK is not the same as the compiler JDK
  756                 if (compilerJavaHomeFile.canonicalPath == Jvm.current().javaHome.canonicalPath) {
  757                     options.fork = false
  758                 } else {
  759                     options.fork = true
  760                     options.forkOptions.javaHome = compilerJavaHomeFile
  761                 }
  762                 if (targetCompatibilityVersion == JavaVersion.VERSION_1_8) {
  763                     // compile with compact 3 profile by default
  764                     // NOTE: this is just a compile time check: does not replace testing with a compact3 JRE
  765                     if (project.compactProfile != 'full') {
  766                         options.compilerArgs << '-profile' << project.compactProfile
  767                     }
  768                 }
  769                 /*
  770                  * -path because gradle will send in paths that don't always exist.
  771                  * -missing because we have tons of missing @returns and @param.
  772                  * -serial because we don't use java serialization.
  773                  */
  774                 // don't even think about passing args with -J-xxx, oracle will ask you to submit a bug report :)
  775                 // fail on all javac warnings
  776                 options.compilerArgs << '-Werror' << '-Xlint:all,-path,-serial,-options,-deprecation' << '-Xdoclint:all' << '-Xdoclint:-missing'
  777 
  778                 // either disable annotation processor completely (default) or allow to enable them if an annotation processor is explicitly defined
  779                 if (options.compilerArgs.contains("-processor") == false) {
  780                     options.compilerArgs << '-proc:none'
  781                 }
  782 
  783                 options.encoding = 'UTF-8'
  784                 options.incremental = true
  785 
  786                 // TODO: use native Gradle support for --release when available (cf. https://github.com/gradle/gradle/issues/2510)
  787                 options.compilerArgs << '--release' << targetCompatibilityVersion.majorVersion
  788             }
  789             // also apply release flag to groovy, which is used in build-tools
  790             project.tasks.withType(GroovyCompile) {
  791                 final compilerJavaHomeFile = new File(project.compilerJavaHome)
  792                 // we only fork if the Gradle JDK is not the same as the compiler JDK
  793                 if (compilerJavaHomeFile.canonicalPath == Jvm.current().javaHome.canonicalPath) {
  794                     options.fork = false
  795                 } else {
  796                     options.fork = true
  797                     options.forkOptions.javaHome = compilerJavaHomeFile
  798                     options.compilerArgs << '--release' << JavaVersion.toVersion(it.targetCompatibility).majorVersion
  799                 }
  800             }
  801         }
  802     }
  803 
  804     static void configureJavadoc(Project project) {
  805         // remove compiled classes from the Javadoc classpath: http://mail.openjdk.java.net/pipermail/javadoc-dev/2018-January/000400.html
  806         final List<File> classes = new ArrayList<>()
  807         project.tasks.withType(JavaCompile) { javaCompile ->
  808             classes.add(javaCompile.destinationDir)
  809         }
  810         project.tasks.withType(Javadoc) { javadoc ->
  811             javadoc.executable = new File(project.compilerJavaHome, 'bin/javadoc')
  812             javadoc.classpath = javadoc.getClasspath().filter { f ->
  813                 return classes.contains(f) == false
  814             }
  815             /*
  816              * Generate docs using html5 to suppress a warning from `javadoc`
  817              * that the default will change to html5 in the future.
  818              */
  819             javadoc.options.addBooleanOption('html5', true)
  820         }
  821         configureJavadocJar(project)
  822     }
  823 
  824     /** Adds a javadocJar task to generate a jar containing javadocs. */
  825     static void configureJavadocJar(Project project) {
  826         Jar javadocJarTask = project.task('javadocJar', type: Jar)
  827         javadocJarTask.classifier = 'javadoc'
  828         javadocJarTask.group = 'build'
  829         javadocJarTask.description = 'Assembles a jar containing javadocs.'
  830         javadocJarTask.from(project.tasks.getByName(JavaPlugin.JAVADOC_TASK_NAME))
  831         project.assemble.dependsOn(javadocJarTask)
  832     }
  833 
  834     static void configureSourcesJar(Project project) {
  835         Jar sourcesJarTask = project.task('sourcesJar', type: Jar)
  836         sourcesJarTask.classifier = 'sources'
  837         sourcesJarTask.group = 'build'
  838         sourcesJarTask.description = 'Assembles a jar containing source files.'
  839         sourcesJarTask.from(project.sourceSets.main.allSource)
  840         project.assemble.dependsOn(sourcesJarTask)
  841     }
  842 
  843     /** Adds additional manifest info to jars */
  844     static void configureJars(Project project) {
  845         project.ext.licenseFile = null
  846         project.ext.noticeFile = null
  847         project.tasks.withType(Jar) { Jar jarTask ->
  848             // we put all our distributable files under distributions
  849             jarTask.destinationDir = new File(project.buildDir, 'distributions')
  850             // fixup the jar manifest
  851             jarTask.doFirst {
  852                 // this doFirst is added before the info plugin, therefore it will run
  853                 // after the doFirst added by the info plugin, and we can override attributes
  854                 jarTask.manifest.attributes(
  855                         // TODO: remove using the short hash
  856                         'Change': ((String)project.gitRevision).substring(0, 7),
  857                         'X-Compile-Elasticsearch-Version': VersionProperties.elasticsearch.replace("-SNAPSHOT", ""),
  858                         'X-Compile-Lucene-Version': VersionProperties.lucene,
  859                         'X-Compile-Elasticsearch-Snapshot': VersionProperties.isElasticsearchSnapshot(),
  860                         'Build-Date': project.buildDate,
  861                         'Build-Java-Version': project.compilerJavaVersion)
  862                 // Force manifest entries that change by nature to a constant to be able to compare builds more effectively
  863                 if (System.properties.getProperty("build.compare_friendly", "false") == "true") {
  864                     jarTask.manifest.getAttributes().clear()
  865                 }
  866             }
  867             // add license/notice files
  868             project.afterEvaluate {
  869                 if (project.licenseFile == null || project.noticeFile == null) {
  870                     throw new GradleException("Must specify license and notice file for project ${project.path}")
  871                 }
  872                 jarTask.metaInf {
  873                     from(project.licenseFile.parent) {
  874                         include project.licenseFile.name
  875                         rename { 'LICENSE.txt' }
  876                     }
  877                     from(project.noticeFile.parent) {
  878                         include project.noticeFile.name
  879                         rename { 'NOTICE.txt' }
  880                     }
  881                 }
  882             }
  883         }
  884         project.plugins.withType(ShadowPlugin).whenPluginAdded {
  885             /*
  886              * When we use the shadow plugin we entirely replace the
  887              * normal jar with the shadow jar so we no longer want to run
  888              * the jar task.
  889              */
  890             project.tasks.jar.enabled = false
  891             project.tasks.shadowJar {
  892                 /*
  893                  * Replace the default "shadow" classifier with null
  894                  * which will leave the classifier off of the file name.
  895                  */
  896                 classifier = null
  897                 /*
  898                  * Not all cases need service files merged but it is
  899                  * better to be safe
  900                  */
  901                 mergeServiceFiles()
  902                 /*
  903                  * Bundle dependencies of the "bundled" configuration.
  904                  */
  905                 configurations = [project.configurations.bundle]
  906             }
  907             // Make sure we assemble the shadow jar
  908             project.tasks.assemble.dependsOn project.tasks.shadowJar
  909             project.artifacts {
  910                 apiElements project.tasks.shadowJar
  911             }
  912         }
  913     }
  914 
  915     static void applyCommonTestConfig(Project project) {
  916         project.tasks.withType(RandomizedTestingTask) {task ->
  917             jvm "${project.runtimeJavaHome}/bin/java"
  918             parallelism System.getProperty('tests.jvms', project.rootProject.ext.defaultParallel)
  919             ifNoTests 'fail'
  920             onNonEmptyWorkDirectory 'wipe'
  921             leaveTemporary true
  922             project.sourceSets.matching { it.name == "test" }.all { test ->
  923                 task.testClassesDirs = test.output.classesDirs
  924                 task.classpath = test.runtimeClasspath
  925             }
  926             group =  JavaBasePlugin.VERIFICATION_GROUP
  927             dependsOn 'testClasses'
  928 
  929             // Make sure all test tasks are configured properly
  930             if (name != "test") {
  931                 project.tasks.matching { it.name == "test"}.all { testTask ->
  932                     task.shouldRunAfter testTask
  933                 }
  934             }
  935             if (name == "unitTest") {
  936                 include("**/*Tests.class")
  937             }
  938 
  939             // TODO: why are we not passing maxmemory to junit4?
  940             jvmArg '-Xmx' + System.getProperty('tests.heap.size', '512m')
  941             jvmArg '-Xms' + System.getProperty('tests.heap.size', '512m')
  942             jvmArg '-XX:+HeapDumpOnOutOfMemoryError'
  943             File heapdumpDir = new File(project.buildDir, 'heapdump')
  944             heapdumpDir.mkdirs()
  945             jvmArg '-XX:HeapDumpPath=' + heapdumpDir
  946             if (project.runtimeJavaVersion >= JavaVersion.VERSION_1_9) {
  947                 jvmArg '--illegal-access=warn'
  948             }
  949             argLine System.getProperty('tests.jvm.argline')
  950 
  951             // we use './temp' since this is per JVM and tests are forbidden from writing to CWD
  952             systemProperty 'java.io.tmpdir', './temp'
  953             systemProperty 'java.awt.headless', 'true'
  954             systemProperty 'tests.gradle', 'true'
  955             systemProperty 'tests.artifact', project.name
  956             systemProperty 'tests.task', path
  957             systemProperty 'tests.security.manager', 'true'
  958             systemProperty 'jna.nosys', 'true'
  959             // TODO: remove this deprecation compatibility setting for 7.0
  960             systemProperty 'es.aggregations.enable_scripted_metric_agg_param', 'false'
  961             systemProperty 'compiler.java', project.ext.compilerJavaVersion.getMajorVersion()
  962             if (project.ext.inFipsJvm) {
  963                 systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion() + "FIPS"
  964             } else {
  965                 systemProperty 'runtime.java', project.ext.runtimeJavaVersion.getMajorVersion()
  966             }
  967             // TODO: remove setting logging level via system property
  968             systemProperty 'tests.logger.level', 'WARN'
  969             for (Map.Entry<String, String> property : System.properties.entrySet()) {
  970                 if (property.getKey().startsWith('tests.') ||
  971                         property.getKey().startsWith('es.')) {
  972                     if (property.getKey().equals('tests.seed')) {
  973                         /* The seed is already set on the project so we
  974                          * shouldn't attempt to override it. */
  975                         continue;
  976                     }
  977                     systemProperty property.getKey(), property.getValue()
  978                 }
  979             }
  980 
  981             // TODO: remove this once ctx isn't added to update script params in 7.0
  982             systemProperty 'es.scripting.update.ctx_in_params', 'false'
  983 
  984             //TODO: remove this once the cname is prepended to the address by default in 7.0
  985             systemProperty 'es.http.cname_in_publish_address', 'true'
  986 
  987             // Set the system keystore/truststore password if we're running tests in a FIPS-140 JVM
  988             if (project.inFipsJvm) {
  989                 systemProperty 'javax.net.ssl.trustStorePassword', 'password'
  990                 systemProperty 'javax.net.ssl.keyStorePassword', 'password'
  991             }
  992 
  993             boolean assertionsEnabled = Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))
  994             enableSystemAssertions assertionsEnabled
  995             enableAssertions assertionsEnabled
  996 
  997             testLogging {
  998                 showNumFailuresAtEnd 25
  999                 slowTests {
 1000                     heartbeat 10
 1001                     summarySize 5
 1002                 }
 1003                 stackTraceFilters {
 1004                     // custom filters: we carefully only omit test infra noise here
 1005                     contains '.SlaveMain.'
 1006                     regex(/^(\s+at )(org\.junit\.)/)
 1007                     // also includes anonymous classes inside these two:
 1008                     regex(/^(\s+at )(com\.carrotsearch\.randomizedtesting\.RandomizedRunner)/)
 1009                     regex(/^(\s+at )(com\.carrotsearch\.randomizedtesting\.ThreadLeakControl)/)
 1010                     regex(/^(\s+at )(com\.carrotsearch\.randomizedtesting\.rules\.)/)
 1011                     regex(/^(\s+at )(org\.apache\.lucene\.util\.TestRule)/)
 1012                     regex(/^(\s+at )(org\.apache\.lucene\.util\.AbstractBeforeAfterRule)/)
 1013                 }
 1014                 if (System.getProperty('tests.class') != null && System.getProperty('tests.output') == null) {
 1015                     // if you are debugging, you want to see the output!
 1016                     outputMode 'always'
 1017                 } else {
 1018                     outputMode System.getProperty('tests.output', 'onerror')
 1019                 }
 1020             }
 1021 
 1022             balancers {
 1023                 executionTime cacheFilename: ".local-${project.version}-${name}-execution-times.log"
 1024             }
 1025 
 1026             listeners {
 1027                 junitReport()
 1028             }
 1029 
 1030             exclude '**/*$*.class'
 1031 
 1032             project.plugins.withType(ShadowPlugin).whenPluginAdded {
 1033                 // Test against a shadow jar if we made one
 1034                 classpath -= project.tasks.compileJava.outputs.files
 1035                 classpath += project.tasks.shadowJar.outputs.files
 1036                 dependsOn project.tasks.shadowJar
 1037             }
 1038         }
 1039     }
 1040 
 1041     private static String findDefaultParallel(Project project) {
 1042         if (project.file("/proc/cpuinfo").exists()) {
 1043             // Count physical cores on any Linux distro ( don't count hyper-threading )
 1044             Map<String, Integer> socketToCore = [:]
 1045             String currentID = ""
 1046             project.file("/proc/cpuinfo").readLines().forEach({ line ->
 1047                 if (line.contains(":")) {
 1048                     List<String> parts = line.split(":", 2).collect({it.trim()})
 1049                     String name = parts[0], value = parts[1]
 1050                     // the ID of the CPU socket
 1051                     if (name == "physical id") {
 1052                         currentID = value
 1053                     }
 1054                     // Number  of cores not including hyper-threading
 1055                     if (name == "cpu cores") {
 1056                         assert currentID.isEmpty() == false
 1057                         socketToCore[currentID] = Integer.valueOf(value)
 1058                         currentID = ""
 1059                     }
 1060                 }
 1061             })
 1062             return socketToCore.values().sum().toString();
 1063         } else if ('Mac OS X'.equals(System.getProperty('os.name'))) {
 1064             // Ask macOS to count physical CPUs for us
 1065             ByteArrayOutputStream stdout = new ByteArrayOutputStream()
 1066             project.exec {
 1067                 executable 'sysctl'
 1068                 args '-n', 'hw.physicalcpu'
 1069                 standardOutput = stdout
 1070             }
 1071             return stdout.toString('UTF-8').trim();
 1072         }
 1073         return 'auto';
 1074     }
 1075 
 1076     private static String gitRevision(final Project project) {
 1077         try {
 1078             /*
 1079              * We want to avoid forking another process to run git rev-parse HEAD. Instead, we will read the refs manually. The
 1080              * documentation for this follows from https://git-scm.com/docs/gitrepository-layout and https://git-scm.com/docs/git-worktree.
 1081              *
 1082              * There are two cases to consider:
 1083              *  - a plain repository with .git directory at the root of the working tree
 1084              *  - a worktree with a plain text .git file at the root of the working tree
 1085              *
 1086              * In each case, our goal is to parse the HEAD file to get either a ref or a bare revision (in the case of being in detached
 1087              * HEAD state).
 1088              *
 1089              * In the case of a plain repository, we can read the HEAD file directly, resolved directly from the .git directory.
 1090              *
 1091              * In the case of a worktree, we read the gitdir from the plain text .git file. This resolves to a directory from which we read
 1092              * the HEAD file and resolve commondir to the plain git repository.
 1093              */
 1094             final Path dotGit = project.getRootProject().getRootDir().toPath().resolve(".git");
 1095             String revision;
 1096             if (Files.exists(dotGit) == false) {
 1097                 return "unknown";
 1098             }
 1099             final Path head;
 1100             final Path gitDir;
 1101             if (Files.isDirectory(dotGit)) {
 1102                 // this is a git repository, we can read HEAD directly
 1103                 head = dotGit.resolve("HEAD");
 1104                 gitDir = dotGit;
 1105             } else {
 1106                 // this is a git worktree, follow the pointer to the repository
 1107                 final Path workTree = Paths.get(readFirstLine(dotGit).substring("gitdir:".length()).trim());
 1108                 head = workTree.resolve("HEAD");
 1109                 final Path commonDir = Paths.get(readFirstLine(workTree.resolve("commondir")));
 1110                 if (commonDir.isAbsolute()) {
 1111                     gitDir = commonDir;
 1112                 } else {
 1113                     // this is the common case
 1114                     gitDir = workTree.resolve(commonDir);
 1115                 }
 1116             }
 1117             final String ref = readFirstLine(head);
 1118             if (ref.startsWith("ref:")) {
 1119                 revision = readFirstLine(gitDir.resolve(ref.substring("ref:".length()).trim()));
 1120             } else {
 1121                 // we are in detached HEAD state
 1122                 revision = ref;
 1123             }
 1124             return revision;
 1125         } catch (final IOException e) {
 1126             // for now, do not be lenient until we have better understanding of real-world scenarios where this happens
 1127             throw new GradleException("unable to read the git revision", e);
 1128         }
 1129     }
 1130 
 1131     private static String readFirstLine(final Path path) throws IOException {
 1132         return Files.lines(path, StandardCharsets.UTF_8)
 1133                 .findFirst()
 1134                 .orElseThrow(
 1135                         new Supplier<IOException>() {
 1136 
 1137                             @Override
 1138                             IOException get() {
 1139                                 return new IOException("file [" + path + "] is empty");
 1140                             }
 1141 
 1142                         });
 1143     }
 1144 
 1145     /** Configures the test task */
 1146     static Task configureTest(Project project) {
 1147         project.tasks.getByName('test') {
 1148             include '**/*Tests.class'
 1149         }
 1150     }
 1151 
 1152     private static configurePrecommit(Project project) {
 1153         Task precommit = PrecommitTasks.create(project, true)
 1154         project.check.dependsOn(precommit)
 1155         project.test.mustRunAfter(precommit)
 1156         // only require dependency licenses for non-elasticsearch deps
 1157         project.dependencyLicenses.dependencies = project.configurations.runtime.fileCollection {
 1158             it.group.startsWith('org.elasticsearch') == false
 1159         } - project.configurations.compileOnly
 1160         project.plugins.withType(ShadowPlugin).whenPluginAdded {
 1161             project.dependencyLicenses.dependencies += project.configurations.bundle.fileCollection {
 1162                 it.group.startsWith('org.elasticsearch') == false
 1163             }
 1164         }
 1165     }
 1166 
 1167     private static configureDependenciesInfo(Project project) {
 1168         Task deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask.class)
 1169         deps.runtimeConfiguration = project.configurations.runtime
 1170         project.plugins.withType(ShadowPlugin).whenPluginAdded {
 1171             deps.runtimeConfiguration = project.configurations.create('infoDeps')
 1172             deps.runtimeConfiguration.extendsFrom(project.configurations.runtime, project.configurations.bundle)
 1173         }
 1174         deps.compileOnlyConfiguration = project.configurations.compileOnly
 1175         project.afterEvaluate {
 1176             deps.mappings = project.dependencyLicenses.mappings
 1177         }
 1178     }
 1179 }