diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
index 81457e6845ec3..54dbc809441ac 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
@@ -97,6 +97,11 @@
junit-vintage-engine
test
+
+ org.mockito
+ mockito-junit-jupiter
+ test
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClock.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClock.java
index 70016bfe6d02d..1b3c91e7c82a6 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClock.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClock.java
@@ -18,21 +18,22 @@
*/
package org.apache.hadoop.mapred;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.Assert.*;
/**
- * test Clock class
- *
+ * test Clock class
*/
public class TestClock {
- @Test (timeout=10000)
+ @Test
+ @Timeout(value = 10)
public void testClock(){
- Clock clock= new Clock();
- long templateTime=System.currentTimeMillis();
- long time=clock.getTime();
- assertEquals(templateTime, time,30);
-
+ Clock clock = new Clock();
+ long templateTime = System.currentTimeMillis();
+ long time = clock.getTime();
+ assertEquals(templateTime, time, 30);
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClusterStatus.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClusterStatus.java
index 399e972d92145..6704d565b5ffc 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClusterStatus.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestClusterStatus.java
@@ -17,25 +17,29 @@
*/
package org.apache.hadoop.mapred;
-import org.junit.Assert;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
-import org.junit.Test;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestClusterStatus {
private ClusterStatus clusterStatus = new ClusterStatus();
@SuppressWarnings("deprecation")
- @Test (timeout = 10000)
- public void testGraylistedTrackers() {
- Assert.assertEquals(0, clusterStatus.getGraylistedTrackers());
- Assert.assertTrue(clusterStatus.getGraylistedTrackerNames().isEmpty());
+ @Test
+ @Timeout(value = 10)
+ public void testGrayListedTrackers() {
+ assertEquals(0, clusterStatus.getGraylistedTrackers());
+ assertTrue(clusterStatus.getGraylistedTrackerNames().isEmpty());
}
@SuppressWarnings("deprecation")
- @Test (timeout = 10000)
+ @Test
+ @Timeout(value = 10)
public void testJobTrackerState() {
- Assert.assertEquals(JobTracker.State.RUNNING,
+ assertEquals(JobTracker.State.RUNNING,
clusterStatus.getJobTrackerState());
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java
index a3e4907962134..ea2d565f4f2f7 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestCounters.java
@@ -17,9 +17,12 @@
*/
package org.apache.hadoop.mapred;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.text.ParseException;
@@ -27,8 +30,6 @@
import java.util.Iterator;
import java.util.Random;
-import org.junit.Assert;
-
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.Counters.CountersExceededException;
import org.apache.hadoop.mapred.Counters.Group;
@@ -38,7 +39,7 @@
import org.apache.hadoop.mapreduce.TaskCounter;
import org.apache.hadoop.mapreduce.counters.FrameworkCounterGroup;
import org.apache.hadoop.mapreduce.counters.CounterGroupFactory.FrameworkGroupFactory;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -85,14 +86,14 @@ private Counters getEnumCounters(String[] gNames, String[] cNames) {
*/
private void testCounter(Counters counter) throws ParseException {
String compactEscapedString = counter.makeEscapedCompactString();
- assertFalse("compactEscapedString should not contain null",
- compactEscapedString.contains("null"));
+ assertFalse(compactEscapedString.contains("null"),
+ "compactEscapedString should not contain null");
Counters recoveredCounter =
Counters.fromEscapedCompactString(compactEscapedString);
// Check for recovery from string
- assertEquals("Recovered counter does not match on content",
- counter, recoveredCounter);
+ assertEquals(counter, recoveredCounter,
+ "Recovered counter does not match on content");
}
@Test
@@ -134,19 +135,19 @@ public void testCounterValue() {
long expectedValue = initValue;
Counter counter = counters.findCounter("foo", "bar");
counter.setValue(initValue);
- assertEquals("Counter value is not initialized correctly",
- expectedValue, counter.getValue());
+ assertEquals(expectedValue, counter.getValue(),
+ "Counter value is not initialized correctly");
for (int j = 0; j < NUMBER_INC; j++) {
int incValue = rand.nextInt();
counter.increment(incValue);
expectedValue += incValue;
- assertEquals("Counter value is not incremented correctly",
- expectedValue, counter.getValue());
+ assertEquals(expectedValue, counter.getValue(),
+ "Counter value is not incremented correctly");
}
expectedValue = rand.nextInt();
counter.setValue(expectedValue);
- assertEquals("Counter value is not set correctly",
- expectedValue, counter.getValue());
+ assertEquals(expectedValue, counter.getValue(),
+ "Counter value is not set correctly");
}
}
@@ -174,29 +175,28 @@ public void testWriteWithLegacyNames() {
@SuppressWarnings("deprecation")
private void checkLegacyNames(Counters counters) {
- assertEquals("New name", 1, counters.findCounter(
- TaskCounter.class.getName(), "MAP_INPUT_RECORDS").getValue());
- assertEquals("Legacy name", 1, counters.findCounter(
+ assertEquals(1, counters.findCounter(
+ TaskCounter.class.getName(), "MAP_INPUT_RECORDS").getValue(), "New name");
+ assertEquals(1, counters.findCounter(
"org.apache.hadoop.mapred.Task$Counter",
- "MAP_INPUT_RECORDS").getValue());
- assertEquals("Legacy enum", 1,
- counters.findCounter(Task.Counter.MAP_INPUT_RECORDS).getValue());
+ "MAP_INPUT_RECORDS").getValue(), "Legacy name");
+ assertEquals(1, counters.findCounter(Task.Counter.MAP_INPUT_RECORDS).getValue(), "Legacy enum");
- assertEquals("New name", 1, counters.findCounter(
- JobCounter.class.getName(), "DATA_LOCAL_MAPS").getValue());
- assertEquals("Legacy name", 1, counters.findCounter(
+ assertEquals(1, counters.findCounter(
+ JobCounter.class.getName(), "DATA_LOCAL_MAPS").getValue(), "New name");
+ assertEquals(1, counters.findCounter(
"org.apache.hadoop.mapred.JobInProgress$Counter",
- "DATA_LOCAL_MAPS").getValue());
- assertEquals("Legacy enum", 1,
- counters.findCounter(JobInProgress.Counter.DATA_LOCAL_MAPS).getValue());
+ "DATA_LOCAL_MAPS").getValue(), "Legacy name");
+ assertEquals(1,
+ counters.findCounter(JobInProgress.Counter.DATA_LOCAL_MAPS).getValue(), "Legacy enum");
- assertEquals("New name", 1, counters.findCounter(
- FileSystemCounter.class.getName(), "FILE_BYTES_READ").getValue());
- assertEquals("New name and method", 1, counters.findCounter("file",
- FileSystemCounter.BYTES_READ).getValue());
- assertEquals("Legacy name", 1, counters.findCounter(
+ assertEquals(1, counters.findCounter(
+ FileSystemCounter.class.getName(), "FILE_BYTES_READ").getValue(), "New name");
+ assertEquals(1, counters.findCounter("file",
+ FileSystemCounter.BYTES_READ).getValue(), "New name and method");
+ assertEquals(1, counters.findCounter(
"FileSystemCounters",
- "FILE_BYTES_READ").getValue());
+ "FILE_BYTES_READ").getValue(), "Legacy name");
}
@SuppressWarnings("deprecation")
@@ -266,8 +266,8 @@ public void testMakeCompactString() {
assertEquals("group1.counter1:1", counters.makeCompactString());
counters.incrCounter("group2", "counter2", 3);
String cs = counters.makeCompactString();
- assertTrue("Bad compact string",
- cs.equals(GC1 + ',' + GC2) || cs.equals(GC2 + ',' + GC1));
+ assertTrue(cs.equals(GC1 + ',' + GC2) || cs.equals(GC2 + ',' + GC1),
+ "Bad compact string");
}
@Test
@@ -321,7 +321,7 @@ private void shouldThrow(Class extends Exception> ecls, Runnable runnable) {
} catch (CountersExceededException e) {
return;
}
- Assert.fail("Should've thrown " + ecls.getSimpleName());
+ fail("Should've thrown " + ecls.getSimpleName());
}
public static void main(String[] args) throws IOException {
@@ -341,12 +341,12 @@ public void testFrameworkCounter() {
org.apache.hadoop.mapreduce.Counter count1 =
counterGroup.findCounter(JobCounter.NUM_FAILED_MAPS.toString());
- Assert.assertNotNull(count1);
+ assertNotNull(count1);
// Verify no exception get thrown when finding an unknown counter
org.apache.hadoop.mapreduce.Counter count2 =
counterGroup.findCounter("Unknown");
- Assert.assertNull(count2);
+ assertNull(count2);
}
@SuppressWarnings("rawtypes")
@@ -363,19 +363,19 @@ public void testTaskCounter() {
org.apache.hadoop.mapreduce.Counter count1 =
counterGroup.findCounter(
TaskCounter.PHYSICAL_MEMORY_BYTES.toString());
- Assert.assertNotNull(count1);
+ assertNotNull(count1);
count1.increment(10);
count1.increment(10);
- Assert.assertEquals(20, count1.getValue());
+ assertEquals(20, count1.getValue());
// Verify no exception get thrown when finding an unknown counter
org.apache.hadoop.mapreduce.Counter count2 =
counterGroup.findCounter(
TaskCounter.MAP_PHYSICAL_MEMORY_BYTES_MAX.toString());
- Assert.assertNotNull(count2);
+ assertNotNull(count2);
count2.increment(5);
count2.increment(10);
- Assert.assertEquals(10, count2.getValue());
+ assertEquals(10, count2.getValue());
}
@Test
@@ -385,12 +385,12 @@ public void testFilesystemCounter() {
org.apache.hadoop.mapreduce.Counter count1 =
fsGroup.findCounter("ANY_BYTES_READ");
- Assert.assertNotNull(count1);
+ assertNotNull(count1);
// Verify no exception get thrown when finding an unknown counter
org.apache.hadoop.mapreduce.Counter count2 =
fsGroup.findCounter("Unknown");
- Assert.assertNull(count2);
+ assertNull(count2);
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java
index 3f3cb24a1a5aa..e69935b956113 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapred/TestFileInputFormat.java
@@ -34,17 +34,19 @@
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.util.Lists;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.MethodSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@RunWith(value = Parameterized.class)
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertInstanceOf;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+
public class TestFileInputFormat {
private static final Logger LOG =
@@ -56,19 +58,18 @@ public class TestFileInputFormat {
private static FileSystem localFs;
private int numThreads;
-
- public TestFileInputFormat(int numThreads) {
- this.numThreads = numThreads;
- LOG.info("Running with numThreads: " + numThreads);
+
+ public void initTestFileInputFormat(int pNumThreads) {
+ this.numThreads = pNumThreads;
+ LOG.info("Running with numThreads: " + pNumThreads);
}
-
- @Parameters
+
public static Collection