Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
multiple hdfs fixtures working
  • Loading branch information
masseyke committed Sep 13, 2021
commit 0a525d927fdec7403dbafee941c758fc2a214b76
41 changes: 27 additions & 14 deletions plugins/repository-hdfs/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,9 @@ for (String fixtureName : ['hdfs' + hadoopVersion + 'Fixture', 'haHdfs' + hadoop
Set disabledIntegTestTaskNames = []

for (int hadoopVersion = minTestedHadopoVersion; hadoopVersion <= maxTestedHadopoVersion; hadoopVersion++) {
final int hadoopVer = hadoopVersion
for (String integTestTaskName : ['integTest' + hadoopVersion, 'integTestHa' + hadoopVersion, 'integTestSecure' + hadoopVersion,
'integTestSecureHa' + hadoopVersion]) {
final int hadoopVer = hadoopVersion
def testTask = tasks.register(integTestTaskName, RestIntegTestTask) {
description = "Runs rest tests against an elasticsearch cluster with HDFS" + hadoopVer
dependsOn("bundlePlugin")
Expand Down Expand Up @@ -231,17 +231,29 @@ for (int hadoopVersion = minTestedHadopoVersion; hadoopVersion <= maxTestedHadop
}
}
}

def processHadoopTestResources = tasks.register("processHadoop" + hadoopVer + "TestResources", Copy)
processHadoopTestResources.configure {
Map<String, Object> expansions = [
'hdfs_port': 10003 - (2 * hadoopVer),
'secure_hdfs_port': 10002 - (2 * hadoopVer),
]
inputs.properties(expansions)
filter("tokens" : expansions.collectEntries {k, v -> [k, v
.toString()]} /* must be a map of strings */, ReplaceTokens.class)
it.into("build/resources/test/rest-api-spec/test")
it.into("hdfs_repository_" + hadoopVer) {
from "src/test/resources/rest-api-spec/test/hdfs_repository"
}
it.into("secure_hdfs_repository_" + hadoopVer) {
from "src/test/resources/rest-api-spec/test/secure_hdfs_repository"
}
}
tasks.named("processTestResources").configure {
dependsOn (processHadoopTestResources)
}
}
// The following doesn't actuallyw work b/c it only happens once
tasks.named("processTestResources").configure {
// Map<String, Object> expansions = [
// 'hdfs_port': 10003 - (2 * hadoopVer),
// 'secure_hdfs_port': 10002 - (2 * hadoopVer),
// ]
// inputs.properties(expansions)
// filter("tokens" : expansions.collectEntries {k, v -> [k, v
// .toString()]} /* must be a map of strings */, ReplaceTokens.class)
}

// Determine HDFS Fixture compatibility for the current build environment.
boolean fixtureSupported = false
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
Expand Down Expand Up @@ -287,7 +299,7 @@ for (int hadoopVersion = minTestedHadopoVersion; hadoopVersion <= maxTestedHadop
dependsOn "hdfs" + hadoopVer + "Fixture"

// The normal test runner only runs the standard hdfs rest tests
systemProperty 'tests.rest.suite', 'hdfs_repository'
systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer
}
tasks.named("integTestHa" + hadoopVer).configure {
dependsOn "haHdfs" + hadoopVer + "Fixture"
Expand All @@ -305,7 +317,7 @@ for (int hadoopVersion = minTestedHadopoVersion; hadoopVersion <= maxTestedHadop

// The normal integration test runner will just test that the plugin loads
tasks.named("integTest" + hadoopVer).configure {
systemProperty 'tests.rest.suite', 'hdfs_repository/10_basic'
systemProperty 'tests.rest.suite', 'hdfs_repository_' + hadoopVer + '/10_basic'
}
// HA fixture is unsupported. Don't run them.
tasks.named("integTestHa" + hadoopVer).configure {
Expand All @@ -319,7 +331,7 @@ for (int hadoopVersion = minTestedHadopoVersion; hadoopVersion <= maxTestedHadop

// Run just the secure hdfs rest test suite.
tasks.named("integTestSecure" + hadoopVer).configure {
systemProperty 'tests.rest.suite', 'secure_hdfs_repository'
systemProperty 'tests.rest.suite', 'secure_hdfs_repository_' + hadoopVer
// Ignore HA integration Tests. They are included below as part of integTestSecureHa test runner.
exclude('**/Ha*TestSuiteIT.class')
}
Expand All @@ -328,6 +340,7 @@ for (int hadoopVersion = minTestedHadopoVersion; hadoopVersion <= maxTestedHadop
setIncludes(['**/Ha*TestSuiteIT.class'])
}
}

tasks.named("thirdPartyAudit").configure {
ignoreMissingClasses()
ignoreViolations(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "test/repository_create"

# Get repository
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "foo/bar"

# Get repository
Expand Down Expand Up @@ -39,7 +39,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "foo/bar"

# Get repository again
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "test/repository_verify"

# Verify repository
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "test/snapshot"

# Create index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "test/snapshot_get"

# Create index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "/user/elasticsearch/existing/readonly-repository"
readonly: true

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%hdfs_port%"
uri: "hdfs://localhost:@hdfs_port@"
path: "test/restore"

# Create index
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/test/repository_create"
security:
principal: "[email protected]"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/foo/bar"
security:
principal: "[email protected]"
Expand Down Expand Up @@ -41,7 +41,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/foo/bar"
security:
principal: "[email protected]"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/test/repository_verify"
security:
principal: "[email protected]"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/test/snapshot"
security:
principal: "[email protected]"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/test/snapshot_get"
security:
principal: "[email protected]"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/existing/readonly-repository"
security:
principal: "[email protected]"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
body:
type: hdfs
settings:
uri: "hdfs://localhost:%secure_hdfs_port%"
uri: "hdfs://localhost:@secure_hdfs_port@"
path: "/user/elasticsearch/test/restore"
security:
principal: "[email protected]"
Expand Down