Skip to content

Convert repository-hdfs to testclusters #41252

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 19, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 1 addition & 5 deletions plugins/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,7 @@
// only configure immediate children of plugins dir
configure(subprojects.findAll { it.parent.path == project.path }) {
group = 'org.elasticsearch.plugin'
// TODO exclude some plugins as they require features not yet supproted by testclusters
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yay!

if (false == name in ['repository-hdfs']) {
apply plugin: 'elasticsearch.testclusters'
}

apply plugin: 'elasticsearch.testclusters'
apply plugin: 'elasticsearch.esplugin'

esplugin {
Expand Down
122 changes: 50 additions & 72 deletions plugins/repository-hdfs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
*/

import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.test.ClusterConfiguration
import org.elasticsearch.gradle.test.RestIntegTestTask

import java.nio.file.Files
Expand Down Expand Up @@ -64,15 +63,17 @@ dependencies {
compile "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}"

hdfsFixture project(':test:fixtures:hdfs-fixture')
// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
testRuntime fileTree(dir: project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent, include: ['*.keytab'])
}

dependencyLicenses {
mapping from: /hadoop-.*/, to: 'hadoop'
}


String realm = "BUILD.ELASTIC.CO"

String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")

// Create HDFS File System Testing Fixtures for HA/Secure combinations
for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', 'secureHaHdfsFixture']) {
Expand All @@ -91,9 +92,8 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',

// If it's a secure fixture, then depend on Kerberos Fixture and principals + add the krb5conf to the JVM options
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("-Djava.security.krb5.conf=${project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")}");
miniHDFSArgs.add("-Djava.security.krb5.conf=${krb5conf}")
}

// If it's an HA fixture, set a nameservice to use in the JVM options
if (fixtureName.equals('haHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("-Dha-nameservice=ha-hdfs")
Expand All @@ -107,70 +107,44 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
if (fixtureName.equals('secureHdfsFixture') || fixtureName.equals('secureHaHdfsFixture')) {
miniHDFSArgs.add("hdfs/hdfs.build.elastic.co@${realm}")
miniHDFSArgs.add(
project(':test:fixtures:krb5kdc-fixture')
.ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "hdfs_hdfs.build.elastic.co.keytab")
)
}

args miniHDFSArgs.toArray()
}
}

// The following closure must execute before the afterEvaluate block in the constructor of the following integrationTest tasks:
project.afterEvaluate {
for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
ClusterConfiguration cluster = project.extensions.getByName("${integTestTaskName}Cluster") as ClusterConfiguration
cluster.dependsOn(project.bundlePlugin)

Task restIntegTestTask = project.tasks.getByName(integTestTaskName)
restIntegTestTask.clusterConfig.plugin(project.path)

// Default jvm arguments for all test clusters
String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
" " + "-Xmx" + System.getProperty('tests.heap.size', '512m') +
" " + System.getProperty('tests.jvm.argline', '')

// If it's a secure cluster, add the keytab as an extra config, and set the krb5 conf in the JVM options.
if (integTestTaskName.equals('integTestSecure') || integTestTaskName.equals('integTestSecureHa')) {
String krb5conf = project(':test:fixtures:krb5kdc-fixture').ext.krb5Conf("hdfs")
restIntegTestTask.clusterConfig.extraConfigFile(
"repository-hdfs/krb5.keytab",
"${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}"
)
jvmArgs = jvmArgs + " " + "-Djava.security.krb5.conf=${krb5conf}"

// If it's the HA + Secure tests then also set the Kerberos settings for the integration test JVM since we'll
// need to auth to HDFS to trigger namenode failovers.
if (integTestTaskName.equals('integTestSecureHa')) {
Task restIntegTestTaskRunner = project.tasks.getByName("${integTestTaskName}Runner")
restIntegTestTaskRunner.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
restIntegTestTaskRunner.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
restIntegTestTaskRunner.jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
restIntegTestTaskRunner.systemProperty (
for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) {
task "${integTestTaskName}"(type: RestIntegTestTask) {
description = "Runs rest tests against an elasticsearch cluster with HDFS."
dependsOn(project.bundlePlugin)
runner {
if (integTestTaskName.contains("Secure")) {
dependsOn secureHdfsFixture
systemProperty "test.krb5.principal.es", "elasticsearch@${realm}"
systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}"
jvmArgs "-Djava.security.krb5.conf=${krb5conf}"
systemProperty (
"test.krb5.keytab.hdfs",
project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab")
)
}
}

restIntegTestTask.clusterConfig.jvmArgs = jvmArgs
}
}

// Create a Integration Test suite just for HA based tests
RestIntegTestTask integTestHa = project.tasks.create('integTestHa', RestIntegTestTask.class) {
description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode."
}

// Create a Integration Test suite just for security based tests
RestIntegTestTask integTestSecure = project.tasks.create('integTestSecure', RestIntegTestTask.class) {
description = "Runs rest tests against an elasticsearch cluster with HDFS secured by MIT Kerberos."
testClusters."${integTestTaskName}" {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the two "secure" clusters are identical, is there any need to declare both of them? Could we get away with two test clusters, secure and non-secure and have the secure integration tests both use the same cluster?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think they end up being identical, one is HA and the other non HA, but even if they are, I don't want to got that far in this PR.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I guess my question is are they actually different if we don't configure them differently? Or at least it's not obvious to me that they are different. I'm ok with them being separate. Just thought it might be a slight optimization to not have another cluster we need to setup/teardown.

plugin(file(bundlePlugin.archiveFile))
if (integTestTaskName.contains("Secure")) {
systemProperty "java.security.krb5.conf", krb5conf
extraConfigFile(
"repository-hdfs/krb5.keytab",
file("${project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs", "elasticsearch.keytab")}")
)
}
}
}

// Create a Integration Test suite just for HA related security based tests
RestIntegTestTask integTestSecureHa = project.tasks.create('integTestSecureHa', RestIntegTestTask.class) {
description = "Runs rest tests against an elasticsearch cluster with HDFS configured with HA Namenode and secured by MIT Kerberos."
}

// Determine HDFS Fixture compatibility for the current build environment.
boolean fixtureSupported = false
Expand Down Expand Up @@ -199,21 +173,27 @@ if (legalPath == false) {

// Always ignore HA integration tests in the normal integration test runner, they are included below as
// part of their own HA-specific integration test tasks.
integTestRunner.exclude('**/Ha*TestSuiteIT.class')
integTest.runner {
exclude('**/Ha*TestSuiteIT.class')
}

if (fixtureSupported) {
// Check depends on the HA test. Already depends on the standard test.
project.check.dependsOn(integTestHa)

// Both standard and HA tests depend on their respective HDFS fixtures
integTestCluster.dependsOn hdfsFixture
integTestHaCluster.dependsOn haHdfsFixture
integTest.dependsOn hdfsFixture
integTestHa.dependsOn haHdfsFixture

// The normal test runner only runs the standard hdfs rest tests
integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository'
integTest.runner {
systemProperty 'tests.rest.suite', 'hdfs_repository'
}

// Only include the HA integration tests for the HA test task
integTestHaRunner.setIncludes(['**/Ha*TestSuiteIT.class'])
integTestHa.runner {
setIncludes(['**/Ha*TestSuiteIT.class'])
}
} else {
if (legalPath) {
logger.warn("hdfsFixture unsupported, please set HADOOP_HOME and put HADOOP_HOME\\bin in PATH")
Expand All @@ -222,29 +202,27 @@ if (fixtureSupported) {
}

// The normal integration test runner will just test that the plugin loads
integTestRunner.systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
integTest.runner {
systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
}
// HA fixture is unsupported. Don't run them.
integTestHa.setEnabled(false)
}

check.dependsOn(integTestSecure, integTestSecureHa)

// Fixture dependencies
integTestSecureCluster.dependsOn secureHdfsFixture
integTestSecureHaCluster.dependsOn secureHaHdfsFixture

// Set the keytab files in the classpath so that we can access them from test code without the security manager
// freaking out.
project.dependencies {
testRuntime fileTree(dir: project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("hdfs","hdfs_hdfs.build.elastic.co.keytab").parent, include: ['*.keytab'])
}

// Run just the secure hdfs rest test suite.
integTestSecureRunner.systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
integTestSecure.runner {
systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
}
// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
integTestSecureRunner.exclude('**/Ha*TestSuiteIT.class')
integTestSecure.runner {
exclude('**/Ha*TestSuiteIT.class')
}
// Only include the HA integration tests for the HA test task
integTestSecureHaRunner.setIncludes(['**/Ha*TestSuiteIT.class'])
integTestSecureHa.runner {
setIncludes(['**/Ha*TestSuiteIT.class'])
}

thirdPartyAudit {
ignoreMissingClasses()
Expand Down