Skip to content

Commit

Permalink
Remove obsolete sparkRunner task from hadoop-format: not triggered, n…
Browse files Browse the repository at this point in the history
…o tests selected (addresses apache#23728)
  • Loading branch information
Moritz Mack committed Oct 21, 2022
1 parent 3ab9507 commit ea566b7
Showing 1 changed file with 0 additions and 40 deletions.
40 changes: 0 additions & 40 deletions sdks/java/io/hadoop-format/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,6 @@ hadoopVersions.each {kv -> configurations.create("hadoopVersion$kv.key")}

def elastic_search_version = "7.12.0"

configurations.create("sparkRunner")
configurations.sparkRunner {
// Ban certain dependencies to prevent a StackOverflow within Spark
// because JUL -> SLF4J -> JUL, and similarly JDK14 -> SLF4J -> JDK14
exclude group: "org.slf4j", module: "jul-to-slf4j"
exclude group: "org.slf4j", module: "slf4j-jdk14"
}

// Ban dependencies from the test runtime classpath
configurations.testRuntimeClasspath {
// Prevent a StackOverflow because of wiring LOG4J -> SLF4J -> LOG4J
Expand Down Expand Up @@ -115,15 +107,6 @@ dependencies {
testRuntimeOnly library.java.slf4j_jdk14
testRuntimeOnly project(path: ":runners:direct-java", configuration: "shadow")

delegate.add("sparkRunner", project(path: ":sdks:java:io:hadoop-format", configuration: "testRuntimeMigration"))

sparkRunner project(path: ":examples:java", configuration: "testRuntimeMigration")
sparkRunner project(path: ":examples:java:twitter", configuration: "testRuntimeMigration")
sparkRunner project(":runners:spark:2")
sparkRunner project(":sdks:java:io:hadoop-file-system")
sparkRunner library.java.spark_streaming
sparkRunner library.java.spark_core

hadoopVersions.each {kv ->
"hadoopVersion$kv.key" "org.apache.hadoop:hadoop-common:$kv.value"
"hadoopVersion$kv.key" "org.apache.hadoop:hadoop-mapreduce-client-core:$kv.value"
Expand Down Expand Up @@ -169,29 +152,6 @@ task createTargetDirectoryForCassandra() {
}
test.dependsOn createTargetDirectoryForCassandra

def runnerClass = "org.apache.beam.runners.spark.TestSparkRunner"
task sparkRunner(type: Test) {
group = "Verification"
def beamTestPipelineOptions = [
"--project=hadoop-format",
"--tempRoot=/tmp/hadoop-format/",
"--streaming=false",
"--runner=" + runnerClass,
"--enableSparkMetricSinks=false",
]
classpath = configurations.sparkRunner
include "**/HadoopFormatIOSequenceFileTest.class"
useJUnit {
includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
}
forkEvery 1
maxParallelForks 4
systemProperty "spark.ui.enabled", "false"
systemProperty "spark.ui.showConsoleProgress", "false"
systemProperty "beam.spark.test.reuseSparkContext", "true"
systemProperty "beamTestPipelineOptions", JsonOutput.toJson(beamTestPipelineOptions)
}

task hadoopVersionsTest(group: "Verification") {
description = "Runs Hadoop format tests with different Hadoop versions"
dependsOn createTaskNames(hadoopVersions, "Test")
Expand Down

0 comments on commit ea566b7

Please sign in to comment.