代码拉取完成,页面将自动刷新
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import com.avast.gradle.dockercompose.tasks.ComposePull
import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin
import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin
import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.internal.BuildPlugin
import org.elasticsearch.gradle.Version
import org.elasticsearch.gradle.VersionProperties
import org.elasticsearch.gradle.internal.info.BuildParams
import org.elasticsearch.gradle.plugin.PluginBuildPlugin
import org.gradle.plugins.ide.eclipse.model.AccessRule
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.gradle.util.DistributionLocator
import org.gradle.util.GradleVersion
import static org.elasticsearch.gradle.util.GradleUtils.maybeConfigure
import org.gradle.plugins.ide.eclipse.model.ProjectDependency
import org.elasticsearch.gradle.testclusters.TestClustersPlugin
import org.elasticsearch.gradle.internal.test.RestTestBasePlugin
import org.elasticsearch.gradle.internal.InternalPluginBuildPlugin
import org.elasticsearch.gradle.internal.InternalTestClustersPlugin
plugins {
id 'lifecycle-base'
id 'elasticsearch.docker-support'
id 'elasticsearch.global-build-info'
id 'elasticsearch.build-scan'
id 'elasticsearch.build-complete'
id 'elasticsearch.jdk-download'
id 'elasticsearch.internal-distribution-download'
id 'elasticsearch.runtime-jdk-provision'
id 'elasticsearch.ide'
id 'elasticsearch.forbidden-dependencies'
id 'elasticsearch.formatting'
id 'elasticsearch.local-distribution'
id 'elasticsearch.fips'
id 'elasticsearch.internal-testclusters'
id 'elasticsearch.run'
id "com.diffplug.spotless" version "5.12.0" apply false
}
// common maven publishing configuration
allprojects {
group = 'org.elasticsearch'
version = VersionProperties.elasticsearch
description = "Elasticsearch subproject ${project.path}"
}
String licenseCommit
if (VersionProperties.elasticsearch.toString().endsWith('-SNAPSHOT')) {
licenseCommit = BuildParams.gitRevision ?: "master" // leniency for non git builds
} else {
licenseCommit = "v${version}"
}
String elasticLicenseUrl = "https://raw.githubusercontent.com/elastic/elasticsearch/${licenseCommit}/licenses/ELASTIC-LICENSE-2.0.txt"
subprojects {
// We disable this plugin for now till we shaked out the issues we see
// e.g. see https://github.com/elastic/elasticsearch/issues/72169
// apply plugin:'elasticsearch.internal-test-rerun'
// Default to the SSPL+Elastic dual license
project.ext.projectLicenses = [
'Server Side Public License, v 1': 'https://www.mongodb.com/licensing/server-side-public-license',
'Elastic License 2.0': elasticLicenseUrl
]
// But stick the Elastic license url in project.ext so we can get it if we need to switch to it
project.ext.elasticLicenseUrl = elasticLicenseUrl
// we only use maven publish to add tasks for pom generation
plugins.withType(MavenPublishPlugin).whenPluginAdded {
publishing {
publications {
// add license information to generated poms
all {
pom.withXml { XmlProvider xml ->
Node node = xml.asNode()
node.appendNode('inceptionYear', '2009')
Node licensesNode = node.appendNode('licenses')
project.ext.projectLicenses.forEach { licenseName, licenseUrl ->
Node license = licensesNode.appendNode('license')
license.appendNode('name', licenseName)
license.appendNode('url', licenseUrl)
license.appendNode('distribution', 'repo')
}
Node developer = node.appendNode('developers').appendNode('developer')
developer.appendNode('name', 'Elastic')
developer.appendNode('url', 'https://www.elastic.co')
}
}
}
repositories {
maven {
name = 'test'
url = "${rootProject.buildDir}/local-test-repo"
}
}
}
}
plugins.withType(BuildPlugin).whenPluginAdded {
project.licenseFile = project.rootProject.file('licenses/SSPL-1.0+ELASTIC-LICENSE-2.0.txt')
project.noticeFile = project.rootProject.file('NOTICE.txt')
}
plugins.withType(InternalPluginBuildPlugin).whenPluginAdded {
project.dependencies {
compileOnly project(":server")
testImplementation project(":test:framework")
}
}
// Ultimately the RestTestBase Plugin should apply the InternalTestClusters Plugin itself instead of TestClusters
// but this requires major rework on the func test infrastructure.
// TODO: This will be addressed once we have https://github.com/elastic/elasticsearch/issues/71593 resolved
project.plugins.withType(RestTestBasePlugin) {
project.plugins.apply(InternalTestClustersPlugin)
}
}
/**
* This is a convenient method for declaring test artifact dependencies provided by the internal
* test artifact plugin. It replaces basically the longer dependency notation with explicit capability
* declaration like this:
*
* testImplementation(project(xpackModule('repositories-metering-api'))) {
* capabilities {
* requireCapability("org.elasticsearch.gradle:repositories-metering-api-test-artifacts")
* }
* }
*
* */
ext.testArtifact = { p, String name = "test" ->
def projectDependency = p.dependencies.create(p)
projectDependency.capabilities {
requireCapabilities("org.elasticsearch.gradle:${projectDependency.name}-${name}-artifacts")
};
}
tasks.register("updateCIBwcVersions") {
doLast {
File yml = file(".ci/bwcVersions")
yml.text = ""
yml << "BWC_VERSION:\n"
BuildParams.bwcVersions.indexCompatible.each {
yml << " - \"$it\"\n"
}
}
}
// build metadata from previous build, contains eg hashes for bwc builds
String buildMetadataValue = System.getenv('BUILD_METADATA')
if (buildMetadataValue == null) {
buildMetadataValue = ''
}
Map<String, String> buildMetadataMap = buildMetadataValue.tokenize(';').collectEntries {
def (String key, String value) = it.split('=')
return [key, value]
}
// injecting groovy property variables into all projects
allprojects {
project.ext {
// for ide hacks...
isEclipse = System.getProperty("eclipse.launcher") != null || // Detects gradle launched from Eclipse's IDE
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff
gradle.startParameter.taskNames.contains('cleanEclipse')
buildMetadata = buildMetadataMap
}
}
tasks.register("verifyVersions") {
doLast {
if (gradle.startParameter.isOffline()) {
throw new GradleException("Must run in online mode to verify versions")
}
// Read the list from maven central.
// Fetch the metadata and parse the xml into Version instances because it's more straight forward here
// rather than bwcVersion ( VersionCollection ).
new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
BuildParams.bwcVersions.compareToAuthoritative(
new XmlParser().parse(s)
.versioning.versions.version
.collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ }
.collect { Version.fromString(it) }
)
}
String ciYml = file(".ci/bwcVersions").text
BuildParams.bwcVersions.indexCompatible.each {
if (ciYml.contains("\"$it\"\n") == false) {
throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results");
}
}
}
}
/*
* When adding backcompat behavior that spans major versions, temporarily
* disabling the backcompat tests is necessary. This flag controls
* the enabled state of every bwc task. It should be set back to true
* after the backport of the backcompat code is complete.
*/
boolean bwc_tests_enabled = true
// place a PR link here when committing bwc changes:
String bwc_tests_disabled_issue = ""
/*
* FIPS 140-2 behavior was fixed in 7.11.0. Before that there is no way to run elasticsearch in a
* JVM that is properly configured to be in fips mode with BCFIPS. For now we need to disable
* all bwc testing in fips mode.
*/
if ( BuildParams.inFipsJvm ) {
bwc_tests_enabled = false
bwc_tests_disabled_issue = "https://github.com/elastic/elasticsearch/issues/66772"
}
if (bwc_tests_enabled == false) {
if (bwc_tests_disabled_issue.isEmpty()) {
throw new GradleException("bwc_tests_disabled_issue must be set when bwc_tests_enabled == false")
}
println "========================= WARNING ========================="
println " Backwards compatibility tests are disabled!"
println "See ${bwc_tests_disabled_issue}"
println "==========================================================="
}
if (project.gradle.startParameter.taskNames.find { it.startsWith("checkPart") } != null) {
// Disable BWC tests for checkPart* tasks as it's expected that this will run un it's own check
bwc_tests_enabled = false
}
subprojects {
ext.bwc_tests_enabled = bwc_tests_enabled
}
tasks.register("verifyBwcTestsEnabled") {
doLast {
if (bwc_tests_enabled == false) {
throw new GradleException('Bwc tests are disabled. They must be re-enabled after completing backcompat behavior backporting.')
}
}
}
tasks.register("branchConsistency") {
description 'Ensures this branch is internally consistent. For example, that versions constants match released versions.'
group 'Verification'
dependsOn ":verifyVersions", ":verifyBwcTestsEnabled"
}
allprojects {
// ignore missing javadocs
tasks.withType(Javadoc).configureEach { Javadoc javadoc ->
// the -quiet here is because of a bug in gradle, in that adding a string option
// by itself is not added to the options. By adding quiet, both this option and
// the "value" -quiet is added, separated by a space. This is ok since the javadoc
// command already adds -quiet, so we are just duplicating it
// see https://discuss.gradle.org/t/add-custom-javadoc-option-that-does-not-take-an-argument/5959
javadoc.options.encoding = 'UTF8'
javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet')
}
project.afterEvaluate {
// Handle javadoc dependencies across projects. Order matters: the linksOffline for
// org.elasticsearch:elasticsearch must be the last one or all the links for the
// other packages (e.g org.elasticsearch.client) will point to server rather than
// their own artifacts.
if (project.plugins.hasPlugin(BuildPlugin) || project.plugins.hasPlugin(PluginBuildPlugin)) {
String artifactsHost = VersionProperties.elasticsearch.endsWith("-SNAPSHOT") ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co"
Closure sortClosure = { a, b -> b.group <=> a.group }
Closure depJavadocClosure = { shadowed, dep ->
if ((dep instanceof ProjectDependency) == false) {
return
}
Project upstreamProject = dep.dependencyProject
if (upstreamProject == null) {
return
}
if (shadowed) {
/*
* Include the source of shadowed upstream projects so we don't
* have to publish their javadoc.
*/
project.evaluationDependsOn(upstreamProject.path)
project.javadoc.source += upstreamProject.javadoc.source
/*
* Instead we need the upstream project's javadoc classpath so
* we don't barf on the classes that it references.
*/
project.javadoc.classpath += upstreamProject.javadoc.classpath
} else {
// Link to non-shadowed dependant projects
project.javadoc.dependsOn "${upstreamProject.path}:javadoc"
String externalLinkName = upstreamProject.archivesBaseName
String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + externalLinkName.replaceAll('\\.', '/') + '/' + dep.version
project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/"
}
}
boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin)
project.configurations.compileClasspath.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(hasShadow, c) })
project.configurations.compileOnly.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
if (hasShadow) {
// include any dependencies for shadow JAR projects that are *not* bundled in the shadow JAR
project.configurations.shadow.dependencies
.findAll()
.toSorted(sortClosure)
.each({ c -> depJavadocClosure(false, c) })
}
}
}
}
// Ensure similar tasks in dependent projects run first. The projectsEvaluated here is
// important because, while dependencies.all will pickup future dependencies,
// it is not necessarily true that the task exists in both projects at the time
// the dependency is added.
gradle.projectsEvaluated {
allprojects {
if (project.path == ':test:framework') {
// :test:framework:test cannot run before and after :server:test
return
}
tasks.matching { it.name.equals('integTest')}.configureEach {integTestTask ->
integTestTask.mustRunAfter tasks.matching { it.name.equals("test") }
}
configurations.matching { it.canBeResolved }.all { Configuration configuration ->
dependencies.matching { it instanceof ProjectDependency }.all { ProjectDependency dep ->
Project upstreamProject = dep.dependencyProject
if (project.path != upstreamProject?.path) {
for (String taskName : ['test', 'integTest']) {
project.tasks.matching { it.name == taskName }.configureEach {task ->
task.shouldRunAfter(upstreamProject.tasks.matching { upStreamTask -> upStreamTask.name == taskName })
}
}
}
}
}
}
}
// eclipse configuration
allprojects {
apply plugin: 'eclipse'
// Name all the non-root projects after their path so that paths get grouped together when imported into eclipse.
if (path != ':') {
eclipse.project.name = path
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
eclipse.project.name = eclipse.project.name.replace(':', '_')
}
}
plugins.withType(JavaBasePlugin) {
java.modularity.inferModulePath.set(false)
eclipse.classpath.file.whenMerged { classpath ->
/*
* give each source folder a unique corresponding output folder
* outside of the usual `build` folder. We can't put the build
* in the usual build folder because eclipse becomes *very* sad
* if we delete it. Which `gradlew clean` does all the time.
*/
int i = 0;
classpath.entries.findAll { it instanceof SourceFolder }.each { folder ->
i++;
folder.output = 'out/eclipse/' + i
}
// Starting with Gradle 6.7 test dependencies are not exposed by eclipse
// projects by default. This breaks project dependencies using the `java-test-fixtures` plugin
// or dependencies on other projects manually declared testArtifacts configurations
// This issue is tracked in gradle issue tracker.
// See https://github.com/gradle/gradle/issues/14932 for further details
classpath.entries.findAll {
it instanceof ProjectDependency
}.each {
it.entryAttributes.remove('without_test_code')
}
}
}
/*
* Allow accessing com/sun/net/httpserver in projects that have
* configured forbidden apis to allow it.
*/
plugins.withType(ForbiddenApisPlugin) {
eclipse.classpath.file.whenMerged { classpath ->
if (false == forbiddenApisTest.bundledSignatures.contains('jdk-non-portable')) {
classpath.entries
.findAll { it.kind == "con" && it.toString().contains("org.eclipse.jdt.launching.JRE_CONTAINER") }
.each {
it.accessRules.add(new AccessRule("accessible", "com/sun/net/httpserver/*"))
}
}
}
}
File licenseHeaderFile
String prefix = ':x-pack'
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
prefix = prefix.replace(':', '_')
}
if (eclipse.project.name.startsWith(prefix)) {
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/elastic-license-2.0-header.txt')
} else {
licenseHeaderFile = new File(project.rootDir, 'buildSrc/src/main/resources/license-headers/sspl+elastic-license-header.txt')
}
String lineSeparator = Os.isFamily(Os.FAMILY_WINDOWS) ? '\\\\r\\\\n' : '\\\\n'
String licenseHeader = licenseHeaderFile.getText('UTF-8').replace(System.lineSeparator(), lineSeparator)
tasks.register('copyEclipseSettings', Copy) {
mustRunAfter 'wipeEclipseSettings'
// TODO: "package this up" for external builds
from new File(project.rootDir, 'buildSrc/src/main/resources/eclipse.settings')
into '.settings'
filter { it.replaceAll('@@LICENSE_HEADER_TEXT@@', licenseHeader) }
}
// otherwise .settings is not nuked entirely
tasks.register('wipeEclipseSettings', Delete) {
delete '.settings'
}
tasks.named('cleanEclipse') { dependsOn 'wipeEclipseSettings' }
// otherwise the eclipse merging is *super confusing*
tasks.named('eclipse') { dependsOn 'cleanEclipse', 'copyEclipseSettings' }
}
tasks.named("wrapper").configure {
distributionType = 'ALL'
doLast {
final DistributionLocator locator = new DistributionLocator()
final GradleVersion version = GradleVersion.version(wrapper.gradleVersion)
final URI distributionUri = locator.getDistributionFor(version, wrapper.distributionType.name().toLowerCase(Locale.ENGLISH))
final URI sha256Uri = new URI(distributionUri.toString() + ".sha256")
final String sha256Sum = new String(sha256Uri.toURL().bytes)
wrapper.getPropertiesFile() << "distributionSha256Sum=${sha256Sum}\n"
println "Added checksum to wrapper properties"
// Update build-tools to reflect the Gradle upgrade
// TODO: we can remove this once we have tests to make sure older versions work.
project(':build-tools').file('src/main/resources/minimumGradleVersion').text = gradleVersion
println "Updated minimum Gradle Version"
}
}
gradle.projectsEvaluated {
subprojects {
/*
* Remove assemble/dependenciesInfo on all qa projects because we don't
* need to publish artifacts for them.
*/
if (project.name.equals('qa') || project.path.contains(':qa:')) {
maybeConfigure(project.tasks, 'assemble') {
it.enabled = false
}
maybeConfigure(project.tasks, 'dependenciesInfo') {
it.enabled = false
}
maybeConfigure(project.tasks, 'dependenciesGraph') {
it.enabled = false
}
}
}
// Having the same group and name for distinct projects causes Gradle to consider them equal when resolving
// dependencies leading to hard to debug failures. Run a check across all project to prevent this from happening.
// see: https://github.com/gradle/gradle/issues/847
Map coordsToProject = [:]
project.allprojects.forEach { p ->
String coords = "${p.group}:${p.name}"
if (false == coordsToProject.putIfAbsent(coords, p)) {
throw new GradleException(
"Detected that two projects: ${p.path} and ${coordsToProject[coords].path} " +
"have the same name and group: ${coords}. " +
"This doesn't currently work correctly in Gradle, see: " +
"https://github.com/gradle/gradle/issues/847"
)
}
}
}
allprojects {
tasks.register('resolveAllDependencies', org.elasticsearch.gradle.internal.ResolveAllDependencies) {
configs = project.configurations
if (project.path.contains("fixture")) {
dependsOn tasks.withType(ComposePull)
}
}
// helper task to print direct dependencies of a single task
project.tasks.addRule("Pattern: <taskName>Dependencies") { String taskName ->
if (taskName.endsWith("Dependencies") == false) {
return
}
if (project.tasks.findByName(taskName) != null) {
return
}
String realTaskName = taskName.substring(0, taskName.length() - "Dependencies".length())
Task realTask = project.tasks.findByName(realTaskName)
if (realTask == null) {
return
}
project.tasks.register(taskName) {
doLast {
println("${realTask.path} dependencies:")
for (Task dep : realTask.getTaskDependencies().getDependencies(realTask)) {
println(" - ${dep.path}")
}
}
}
}
def checkPart1 = tasks.register('checkPart1')
def checkPart2 = tasks.register('checkPart2')
plugins.withId('lifecycle-base') {
if (project.path.startsWith(":x-pack:")) {
checkPart2.configure { dependsOn 'check' }
} else {
checkPart1.configure { dependsOn 'check' }
}
}
}
subprojects {
project.ext.disableTasks = { String... tasknames ->
for (String taskname : tasknames) {
project.tasks.named(taskname).configure { enabled = false }
}
}
}
subprojects { Project subproj ->
plugins.withType(TestClustersPlugin).whenPluginAdded {
testClusters.all {
systemProperty "ingest.geoip.downloader.enabled.default", "false"
}
}
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。