From 93e161c1ea010d8a1fbbcf67a03e4e926388ba2a Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Fri, 14 Oct 2022 04:56:36 +0100 Subject: [PATCH 1/8] MAPREDUCE-7419. Upgrade Junit 4 to 5 in hadoop-mapreduce-client-common --- .../hadoop-mapreduce-client-common/pom.xml | 15 + .../apache/hadoop/mapred/TestJobClient.java | 47 +-- .../hadoop/mapred/TestJobClientGetJob.java | 14 +- .../TestLocalDistributedCacheManager.java | 40 +-- .../mapred/TestLocalModeWithNewApis.java | 19 +- .../mapred/TestMRWithDistributedCache.java | 136 ++++---- .../hadoop/mapreduce/TestTypeConverter.java | 90 +++--- .../hadoop/mapreduce/v2/TestRPCFactories.java | 21 +- .../mapreduce/v2/TestRecordFactory.java | 25 +- .../mapreduce/v2/api/records/TestIds.java | 34 +- .../v2/jobhistory/TestFileNameIndexUtils.java | 171 +++++----- .../v2/jobhistory/TestJobHistoryUtils.java | 104 +++--- .../hadoop/mapreduce/v2/util/TestMRApps.java | 298 ++++++++++-------- 13 files changed, 520 insertions(+), 494 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml index d5d3b8fd171e9..38e7d2756d49e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml @@ -51,6 +51,21 @@ assertj-core test + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.platform + junit-platform-launcher + test + diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java index bf37b03b61f77..959010d56768e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java @@ -22,28 +22,31 @@ import java.util.Collection; import org.apache.hadoop.conf.Configuration; + +import static org.junit.jupiter.api.Assertions.*; + import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo; import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; -import org.junit.After; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestJobClient { final static String TEST_DIR = new File("target", TestJobClient.class.getSimpleName()).getAbsolutePath(); - @After + @AfterEach public void tearDown() { FileUtil.fullyDelete(new File(TEST_DIR)); } @Test - public void testGetClusterStatusWithLocalJobRunner() throws Exception { + void testGetClusterStatusWithLocalJobRunner() throws Exception { Configuration conf = new Configuration(); conf.set(JTConfig.JT_IPC_ADDRESS, MRConfig.LOCAL_FRAMEWORK_NAME); conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); @@ -51,48 +54,48 @@ public void testGetClusterStatusWithLocalJobRunner() throws Exception { ClusterStatus clusterStatus = client.getClusterStatus(true); Collection activeTrackerNames = clusterStatus .getActiveTrackerNames(); - Assert.assertEquals(0, activeTrackerNames.size()); + assertEquals(0, activeTrackerNames.size()); int blacklistedTrackers = clusterStatus.getBlacklistedTrackers(); - Assert.assertEquals(0, blacklistedTrackers); + assertEquals(0, blacklistedTrackers); Collection blackListedTrackersInfo = clusterStatus .getBlackListedTrackersInfo(); - Assert.assertEquals(0, blackListedTrackersInfo.size()); + assertEquals(0, blackListedTrackersInfo.size()); } - @Test(timeout = 10000) - public void testIsJobDirValid() throws IOException { + @Test + @Timeout(10000) + void testIsJobDirValid() throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); Path testDir = new Path(TEST_DIR); fs.mkdirs(testDir); - Assert.assertFalse(JobClient.isJobDirValid(testDir, fs)); + assertFalse(JobClient.isJobDirValid(testDir, fs)); Path jobconf = new Path(testDir, "job.xml"); Path jobsplit = new Path(testDir, "job.split"); fs.create(jobconf); fs.create(jobsplit); - Assert.assertTrue(JobClient.isJobDirValid(testDir, fs)); - + assertTrue(JobClient.isJobDirValid(testDir, fs)); + fs.delete(jobconf, true); fs.delete(jobsplit, true); } - - @Test(timeout = 10000) - public void testGetStagingAreaDir() throws IOException, InterruptedException { + + @Test + @Timeout(10000) + void testGetStagingAreaDir() throws IOException, InterruptedException { Configuration conf = new Configuration(); JobClient client = new JobClient(conf); - Assert.assertTrue( - "Mismatch in paths", - client.getClusterHandle().getStagingAreaDir().toString() - .equals(client.getStagingAreaDir().toString())); + assertEquals(client.getClusterHandle().getStagingAreaDir().toString(), client.getStagingAreaDir().toString()); } /** * Asks the compiler to check if JobClient is AutoClosable. */ - @Test(timeout = 10000) - public void testAutoClosable() throws IOException { + @Test + @Timeout(10000) + void testAutoClosable() throws IOException { Configuration conf = new Configuration(); try (JobClient jobClient = new JobClient(conf)) { } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java index 9ae7b6183048c..db04a75236ceb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java @@ -18,15 +18,15 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertNotNull; - import java.io.IOException; import org.apache.hadoop.conf.Configuration; + +import static org.junit.jupiter.api.Assertions.assertNotNull; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestJobClientGetJob { @@ -42,10 +42,10 @@ private Path createTempFile(String filename, String contents) os.close(); return path; } - + @SuppressWarnings("deprecation") @Test - public void testGetRunningJobFromJobClient() throws Exception { + void testGetRunningJobFromJobClient() throws Exception { JobConf conf = new JobConf(); conf.set("mapreduce.framework.name", "local"); FileInputFormat.addInputPath(conf, createTempFile("in", "hello")); @@ -54,10 +54,10 @@ public void testGetRunningJobFromJobClient() throws Exception { FileOutputFormat.setOutputPath(conf, outputDir); JobClient jc = new JobClient(conf); RunningJob runningJob = jc.submitJob(conf); - assertNotNull("Running job", runningJob); + assertNotNull(runningJob, "Running job"); // Check that the running job can be retrieved by ID RunningJob newRunningJob = jc.getJob(runningJob.getID()); - assertNotNull("New running job", newRunningJob); + assertNotNull(newRunningJob, "New running job"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java index 50cc63094bd8c..36a4b830c1630 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java @@ -18,8 +18,8 @@ package org.apache.hadoop.mapred; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -55,9 +55,9 @@ import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.util.functional.CallableRaisingIOE; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -109,7 +109,7 @@ private static void delete(File file) throws IOException { } } - @Before + @BeforeEach public void setup() throws Exception { mockfs = mock(FileSystem.class); localDir = new File(System.getProperty("test.build.dir", "target/test-dir"), @@ -118,7 +118,7 @@ public void setup() throws Exception { localDir.mkdirs(); } - @After + @AfterEach public void cleanup() throws Exception { delete(localDir); } @@ -143,7 +143,7 @@ public void seek(long position) {} } @Test - public void testDownload() throws Exception { + void testDownload() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); conf.setClass("fs.mock.impl", MockFileSystem.class, FileSystem.class); @@ -163,8 +163,8 @@ public void testDownload() throws Exception { when(mockfs.getFileStatus(any(Path.class))).thenAnswer(new Answer() { @Override public FileStatus answer(InvocationOnMock args) throws Throwable { - Path p = (Path)args.getArguments()[0]; - if("file.txt".equals(p.getName())) { + Path p = (Path) args.getArguments()[0]; + if ("file.txt".equals(p.getName())) { return createMockTestFileStatus(filePath); } else { throw notMocked(p); @@ -180,7 +180,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable { // anything else: FNFE when(mockfs.openFile(any(Path.class))).thenAnswer( (Answer) args -> { - Path src = (Path)args.getArguments()[0]; + Path src = (Path) args.getArguments()[0]; if ("file.txt".equals(src.getName())) { return new MockOpenFileBuilder(mockfs, src, () -> CompletableFuture.completedFuture(in)); @@ -214,7 +214,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable { * no files were probed for/opened. */ @Test - public void testEmptyDownload() throws Exception { + void testEmptyDownload() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); conf.setClass("fs.mock.impl", MockFileSystem.class, FileSystem.class); @@ -228,15 +228,15 @@ public void testEmptyDownload() throws Exception { when(mockfs.getFileStatus(any(Path.class))).thenAnswer( (Answer) args -> { - Path p = (Path)args.getArguments()[0]; + Path p = (Path) args.getArguments()[0]; throw notMocked(p); }); when(mockfs.getConf()).thenReturn(conf); when(mockfs.openFile(any(Path.class))).thenAnswer( (Answer) args -> { - Path src = (Path)args.getArguments()[0]; - throw notMocked(src); + Path src = (Path) args.getArguments()[0]; + throw notMocked(src); }); conf.set(MRJobConfig.CACHE_FILES, ""); conf.set(MRConfig.LOCAL_DIR, localDir.getAbsolutePath()); @@ -253,7 +253,7 @@ public void testEmptyDownload() throws Exception { * The same file can be added to the cache twice. */ @Test - public void testDuplicateDownload() throws Exception { + void testDuplicateDownload() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); conf.setClass("fs.mock.impl", MockFileSystem.class, FileSystem.class); @@ -272,8 +272,8 @@ public void testDuplicateDownload() throws Exception { when(mockfs.getFileStatus(any(Path.class))).thenAnswer(new Answer() { @Override public FileStatus answer(InvocationOnMock args) throws Throwable { - Path p = (Path)args.getArguments()[0]; - if("file.txt".equals(p.getName())) { + Path p = (Path) args.getArguments()[0]; + if ("file.txt".equals(p.getName())) { return createMockTestFileStatus(filePath); } else { throw notMocked(p); @@ -286,7 +286,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable { new FSDataInputStream(new MockInputStream(TEST_DATA)); when(mockfs.openFile(any(Path.class))).thenAnswer( (Answer) args -> { - Path src = (Path)args.getArguments()[0]; + Path src = (Path) args.getArguments()[0]; if ("file.txt".equals(src.getName())) { return new MockOpenFileBuilder(mockfs, src, () -> CompletableFuture.completedFuture(in)); @@ -323,7 +323,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable { * validity of one approach over the other. */ @Test - public void testMultipleCacheSetup() throws Exception { + void testMultipleCacheSetup() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); LocalDistributedCacheManager manager = new LocalDistributedCacheManager(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java index 556f7fd716aa7..b35df051364c3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java @@ -17,7 +17,8 @@ */ package org.apache.hadoop.mapred; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.BufferedReader; import java.io.DataOutputStream; @@ -38,9 +39,9 @@ import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,18 +52,18 @@ public class TestLocalModeWithNewApis { Configuration conf; - @Before + @BeforeEach public void setUp() throws Exception { conf = new Configuration(); conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); } - @After + @AfterEach public void tearDown() throws Exception { } @Test - public void testNewApis() throws Exception { + void testNewApis() throws Exception { Random r = new Random(System.currentTimeMillis()); Path tmpBaseDir = new Path("/tmp/wc-" + r.nextInt()); final Path inDir = new Path(tmpBaseDir, "input"); @@ -93,8 +94,8 @@ public void testNewApis() throws Exception { String output = readOutput(outDir, conf); assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" + - "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", output); - + "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", output); + outFs.delete(tmpBaseDir, true); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java index 62b8815e6f1fa..8180bf46fddcc 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java @@ -27,9 +27,10 @@ import java.util.jar.JarOutputStream; import java.util.zip.ZipEntry; -import org.junit.Assert; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +import static org.junit.jupiter.api.Assertions.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; @@ -50,6 +51,7 @@ import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + /** * Tests the use of the * {@link org.apache.hadoop.mapreduce.filecache.DistributedCache} within the @@ -96,23 +98,23 @@ public void setup(TaskInputOutputContext context) FileSystem fs = LocalFileSystem.get(conf); // Check that 2 files and 2 archives are present - Assert.assertEquals(2, localFiles.length); - Assert.assertEquals(2, localArchives.length); - Assert.assertEquals(2, files.length); - Assert.assertEquals(2, archives.length); + assertEquals(2, localFiles.length); + assertEquals(2, localArchives.length); + assertEquals(2, files.length); + assertEquals(2, archives.length); // Check the file name - Assert.assertTrue(files[0].getPath().endsWith("distributed.first")); - Assert.assertTrue(files[1].getPath().endsWith("distributed.second.jar")); + assertTrue(files[0].getPath().endsWith("distributed.first")); + assertTrue(files[1].getPath().endsWith("distributed.second.jar")); // Check lengths of the files - Assert.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen()); - Assert.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1); + assertEquals(1, fs.getFileStatus(localFiles[0]).getLen()); + assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1); // Check extraction of the archive - Assert.assertTrue(fs.exists(new Path(localArchives[0], + assertTrue(fs.exists(new Path(localArchives[0], "distributed.jar.inside3"))); - Assert.assertTrue(fs.exists(new Path(localArchives[1], + assertTrue(fs.exists(new Path(localArchives[1], "distributed.jar.inside4"))); // Check the class loaders @@ -120,19 +122,20 @@ public void setup(TaskInputOutputContext context) ClassLoader cl = Thread.currentThread().getContextClassLoader(); // Both the file and the archive were added to classpath, so both // should be reachable via the class loader. - Assert.assertNotNull(cl.getResource("distributed.jar.inside2")); - Assert.assertNotNull(cl.getResource("distributed.jar.inside3")); - Assert.assertNull(cl.getResource("distributed.jar.inside4")); + assertNotNull(cl.getResource("distributed.jar.inside2")); + assertNotNull(cl.getResource("distributed.jar.inside3")); + assertNull(cl.getResource("distributed.jar.inside4")); // Check that the symlink for the renaming was created in the cwd; - Assert.assertTrue("symlink distributed.first.symlink doesn't exist", - symlinkFile.exists()); - Assert.assertEquals("symlink distributed.first.symlink length not 1", 1, - symlinkFile.length()); + assertTrue(symlinkFile.exists(), + "symlink distributed.first.symlink doesn't exist"); + assertEquals(1, + symlinkFile.length(), + "symlink distributed.first.symlink length not 1"); //This last one is a difference between MRv2 and MRv1 - Assert.assertTrue("second file should be symlinked too", - expectedAbsentSymlinkFile.exists()); + assertTrue(expectedAbsentSymlinkFile.exists(), + "second file should be symlinked too"); } } @@ -188,18 +191,18 @@ private void testWithConf(Configuration conf) throws IOException, /** Tests using the local job runner. */ @Test - public void testLocalJobRunner() throws Exception { + void testLocalJobRunner() throws Exception { symlinkFile.delete(); // ensure symlink is not present (e.g. if test is - // killed part way through) + // killed part way through) Configuration c = new Configuration(); c.set(JTConfig.JT_IPC_ADDRESS, "local"); c.set("fs.defaultFS", "file:///"); testWithConf(c); - - assertFalse("Symlink not removed by local job runner", - // Symlink target will have gone so can't use File.exists() - Arrays.asList(new File(".").list()).contains(symlinkFile.getName())); + + assertFalse(Arrays.asList(new File(".").list()).contains(symlinkFile.getName()), + // Symlink target will have gone so can't use File.exists() + "Symlink not removed by local job runner"); } private Path createTempFile(String filename, String contents) @@ -223,92 +226,93 @@ private Path makeJar(Path p, int index) throws FileNotFoundException, return p; } - @Test (timeout = 10000) - public void testDeprecatedFunctions() throws Exception { + @Test + @Timeout(10000) + void testDeprecatedFunctions() throws Exception { DistributedCache.addLocalArchives(conf, "Test Local Archives 1"); - Assert.assertEquals("Test Local Archives 1", + assertEquals("Test Local Archives 1", conf.get(DistributedCache.CACHE_LOCALARCHIVES)); - Assert.assertEquals(1, + assertEquals(1, JobContextImpl.getLocalCacheArchives(conf).length); - Assert.assertEquals("Test Local Archives 1", + assertEquals("Test Local Archives 1", JobContextImpl.getLocalCacheArchives(conf)[0].getName()); DistributedCache.addLocalArchives(conf, "Test Local Archives 2"); - Assert.assertEquals("Test Local Archives 1,Test Local Archives 2", + assertEquals("Test Local Archives 1,Test Local Archives 2", conf.get(DistributedCache.CACHE_LOCALARCHIVES)); - Assert.assertEquals(2, + assertEquals(2, JobContextImpl.getLocalCacheArchives(conf).length); - Assert.assertEquals("Test Local Archives 2", + assertEquals("Test Local Archives 2", JobContextImpl.getLocalCacheArchives(conf)[1].getName()); DistributedCache.setLocalArchives(conf, "Test Local Archives 3"); - Assert.assertEquals("Test Local Archives 3", + assertEquals("Test Local Archives 3", conf.get(DistributedCache.CACHE_LOCALARCHIVES)); - Assert.assertEquals(1, + assertEquals(1, JobContextImpl.getLocalCacheArchives(conf).length); - Assert.assertEquals("Test Local Archives 3", + assertEquals("Test Local Archives 3", JobContextImpl.getLocalCacheArchives(conf)[0].getName()); DistributedCache.addLocalFiles(conf, "Test Local Files 1"); - Assert.assertEquals("Test Local Files 1", + assertEquals("Test Local Files 1", conf.get(DistributedCache.CACHE_LOCALFILES)); - Assert.assertEquals(1, + assertEquals(1, JobContextImpl.getLocalCacheFiles(conf).length); - Assert.assertEquals("Test Local Files 1", + assertEquals("Test Local Files 1", JobContextImpl.getLocalCacheFiles(conf)[0].getName()); DistributedCache.addLocalFiles(conf, "Test Local Files 2"); - Assert.assertEquals("Test Local Files 1,Test Local Files 2", + assertEquals("Test Local Files 1,Test Local Files 2", conf.get(DistributedCache.CACHE_LOCALFILES)); - Assert.assertEquals(2, + assertEquals(2, JobContextImpl.getLocalCacheFiles(conf).length); - Assert.assertEquals("Test Local Files 2", + assertEquals("Test Local Files 2", JobContextImpl.getLocalCacheFiles(conf)[1].getName()); DistributedCache.setLocalFiles(conf, "Test Local Files 3"); - Assert.assertEquals("Test Local Files 3", + assertEquals("Test Local Files 3", conf.get(DistributedCache.CACHE_LOCALFILES)); - Assert.assertEquals(1, + assertEquals(1, JobContextImpl.getLocalCacheFiles(conf).length); - Assert.assertEquals("Test Local Files 3", + assertEquals("Test Local Files 3", JobContextImpl.getLocalCacheFiles(conf)[0].getName()); DistributedCache.setArchiveTimestamps(conf, "1234567890"); - Assert.assertEquals(1234567890, + assertEquals(1234567890, conf.getLong(DistributedCache.CACHE_ARCHIVES_TIMESTAMPS, 0)); - Assert.assertEquals(1, + assertEquals(1, JobContextImpl.getArchiveTimestamps(conf).length); - Assert.assertEquals(1234567890, + assertEquals(1234567890, JobContextImpl.getArchiveTimestamps(conf)[0]); DistributedCache.setFileTimestamps(conf, "1234567890"); - Assert.assertEquals(1234567890, + assertEquals(1234567890, conf.getLong(DistributedCache.CACHE_FILES_TIMESTAMPS, 0)); - Assert.assertEquals(1, + assertEquals(1, JobContextImpl.getFileTimestamps(conf).length); - Assert.assertEquals(1234567890, + assertEquals(1234567890, JobContextImpl.getFileTimestamps(conf)[0]); DistributedCache.createAllSymlink(conf, new File("Test Job Cache Dir"), new File("Test Work Dir")); - Assert.assertNull(conf.get(DistributedCache.CACHE_SYMLINK)); - Assert.assertTrue(DistributedCache.getSymlink(conf)); + assertNull(conf.get(DistributedCache.CACHE_SYMLINK)); + assertTrue(DistributedCache.getSymlink(conf)); - Assert.assertTrue(symlinkFile.createNewFile()); + assertTrue(symlinkFile.createNewFile()); FileStatus fileStatus = DistributedCache.getFileStatus(conf, symlinkFile.toURI()); - Assert.assertNotNull(fileStatus); - Assert.assertEquals(fileStatus.getModificationTime(), + assertNotNull(fileStatus); + assertEquals(fileStatus.getModificationTime(), DistributedCache.getTimestamp(conf, symlinkFile.toURI())); - Assert.assertTrue(symlinkFile.delete()); + assertTrue(symlinkFile.delete()); Job.addCacheArchive(symlinkFile.toURI(), conf); - Assert.assertEquals(symlinkFile.toURI().toString(), + assertEquals(symlinkFile.toURI().toString(), conf.get(DistributedCache.CACHE_ARCHIVES)); - Assert.assertEquals(1, JobContextImpl.getCacheArchives(conf).length); - Assert.assertEquals(symlinkFile.toURI(), + assertEquals(1, JobContextImpl.getCacheArchives(conf).length); + assertEquals(symlinkFile.toURI(), JobContextImpl.getCacheArchives(conf)[0]); Job.addCacheFile(symlinkFile.toURI(), conf); - Assert.assertEquals(symlinkFile.toURI().toString(), + assertEquals(symlinkFile.toURI().toString(), conf.get(DistributedCache.CACHE_FILES)); - Assert.assertEquals(1, JobContextImpl.getCacheFiles(conf).length); - Assert.assertEquals(symlinkFile.toURI(), + assertEquals(1, JobContextImpl.getCacheFiles(conf).length); + assertEquals(symlinkFile.toURI(), JobContextImpl.getCacheFiles(conf)[0]); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java index 737918473ee62..38870bda0a7c7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java @@ -20,6 +20,7 @@ import org.apache.hadoop.util.StringUtils; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -42,39 +43,38 @@ import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.util.Records; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; public class TestTypeConverter { @Test - public void testEnums() throws Exception { + void testEnums() throws Exception { for (YarnApplicationState applicationState : YarnApplicationState.values()) { TypeConverter.fromYarn(applicationState, FinalApplicationStatus.FAILED); } // ad hoc test of NEW_SAVING, which is newly added - Assert.assertEquals(State.PREP, TypeConverter.fromYarn( + assertEquals(State.PREP, TypeConverter.fromYarn( YarnApplicationState.NEW_SAVING, FinalApplicationStatus.FAILED)); - + for (TaskType taskType : TaskType.values()) { TypeConverter.fromYarn(taskType); } - + for (JobState jobState : JobState.values()) { TypeConverter.fromYarn(jobState); } - + for (QueueState queueState : QueueState.values()) { TypeConverter.fromYarn(queueState); } - + for (TaskState taskState : TaskState.values()) { TypeConverter.fromYarn(taskState); } } - + @Test - public void testFromYarn() throws Exception { + void testFromYarn() throws Exception { int appStartTime = 612354; int appFinishTime = 612355; YarnApplicationState state = YarnApplicationState.RUNNING; @@ -98,14 +98,14 @@ public void testFromYarn() throws Exception { appUsageRpt.setUsedResources(r); applicationReport.setApplicationResourceUsageReport(appUsageRpt); JobStatus jobStatus = TypeConverter.fromYarn(applicationReport, "dummy-jobfile"); - Assert.assertEquals(appStartTime, jobStatus.getStartTime()); - Assert.assertEquals(appFinishTime, jobStatus.getFinishTime()); - Assert.assertEquals(state.toString(), jobStatus.getState().toString()); - Assert.assertEquals(JobPriority.NORMAL, jobStatus.getPriority()); + assertEquals(appStartTime, jobStatus.getStartTime()); + assertEquals(appFinishTime, jobStatus.getFinishTime()); + assertEquals(state.toString(), jobStatus.getState().toString()); + assertEquals(JobPriority.NORMAL, jobStatus.getPriority()); } @Test - public void testFromYarnApplicationReport() { + void testFromYarnApplicationReport() { ApplicationId mockAppId = mock(ApplicationId.class); when(mockAppId.getClusterTimestamp()).thenReturn(12345L); when(mockAppId.getId()).thenReturn(6789); @@ -122,7 +122,7 @@ public void testFromYarnApplicationReport() { try { JobStatus status = TypeConverter.fromYarn(mockReport, jobFile); } catch (NullPointerException npe) { - Assert.fail("Type converstion from YARN fails for jobs without " + + fail("Type converstion from YARN fails for jobs without " + "ApplicationUsageReport"); } @@ -137,32 +137,32 @@ public void testFromYarnApplicationReport() { appUsageRpt.setUsedResources(r); when(mockReport.getApplicationResourceUsageReport()).thenReturn(appUsageRpt); JobStatus status = TypeConverter.fromYarn(mockReport, jobFile); - Assert.assertNotNull("fromYarn returned null status", status); - Assert.assertEquals("jobFile set incorrectly", "dummy-path/job.xml", status.getJobFile()); - Assert.assertEquals("queue set incorrectly", "dummy-queue", status.getQueue()); - Assert.assertEquals("trackingUrl set incorrectly", "dummy-tracking-url", status.getTrackingUrl()); - Assert.assertEquals("user set incorrectly", "dummy-user", status.getUsername()); - Assert.assertEquals("schedulingInfo set incorrectly", "dummy-tracking-url", status.getSchedulingInfo()); - Assert.assertEquals("jobId set incorrectly", 6789, status.getJobID().getId()); - Assert.assertEquals("state set incorrectly", JobStatus.State.KILLED, status.getState()); - Assert.assertEquals("needed mem info set incorrectly", 2048, status.getNeededMem()); - Assert.assertEquals("num rsvd slots info set incorrectly", 1, status.getNumReservedSlots()); - Assert.assertEquals("num used slots info set incorrectly", 3, status.getNumUsedSlots()); - Assert.assertEquals("rsvd mem info set incorrectly", 2048, status.getReservedMem()); - Assert.assertEquals("used mem info set incorrectly", 2048, status.getUsedMem()); - Assert.assertEquals("priority set incorrectly", JobPriority.HIGH, status.getPriority()); + assertNotNull(status, "fromYarn returned null status"); + assertEquals("dummy-path/job.xml", status.getJobFile(), "jobFile set incorrectly"); + assertEquals("dummy-queue", status.getQueue(), "queue set incorrectly"); + assertEquals("dummy-tracking-url", status.getTrackingUrl(), "trackingUrl set incorrectly"); + assertEquals("dummy-user", status.getUsername(), "user set incorrectly"); + assertEquals("dummy-tracking-url", status.getSchedulingInfo(), "schedulingInfo set incorrectly"); + assertEquals(6789, status.getJobID().getId(), "jobId set incorrectly"); + assertEquals(JobStatus.State.KILLED, status.getState(), "state set incorrectly"); + assertEquals(2048, status.getNeededMem(), "needed mem info set incorrectly"); + assertEquals(1, status.getNumReservedSlots(), "num rsvd slots info set incorrectly"); + assertEquals(3, status.getNumUsedSlots(), "num used slots info set incorrectly"); + assertEquals(2048, status.getReservedMem(), "rsvd mem info set incorrectly"); + assertEquals(2048, status.getUsedMem(), "used mem info set incorrectly"); + assertEquals(JobPriority.HIGH, status.getPriority(), "priority set incorrectly"); } @Test - public void testFromYarnQueueInfo() { + void testFromYarnQueueInfo() { org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = Records .newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class); queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED); org.apache.hadoop.mapreduce.QueueInfo returned = TypeConverter.fromYarn(queueInfo, new Configuration()); - Assert.assertEquals("queueInfo translation didn't work.", - returned.getState().toString(), - StringUtils.toLowerCase(queueInfo.getQueueState().toString())); + assertEquals(returned.getState().toString(), + StringUtils.toLowerCase(queueInfo.getQueueState().toString()), + "queueInfo translation didn't work."); } /** @@ -170,24 +170,24 @@ public void testFromYarnQueueInfo() { * queue */ @Test - public void testFromYarnQueue() { + void testFromYarnQueue() { //Define child queue org.apache.hadoop.yarn.api.records.QueueInfo child = - Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); + Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); Mockito.when(child.getQueueState()).thenReturn(QueueState.RUNNING); //Define parent queue org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = - Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); + Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); List children = - new ArrayList(); + new ArrayList(); children.add(child); //Add one child Mockito.when(queueInfo.getChildQueues()).thenReturn(children); Mockito.when(queueInfo.getQueueState()).thenReturn(QueueState.RUNNING); //Call the function we're testing org.apache.hadoop.mapreduce.QueueInfo returned = - TypeConverter.fromYarn(queueInfo, new Configuration()); + TypeConverter.fromYarn(queueInfo, new Configuration()); //Verify that the converted queue has the 1 child we had added assertThat(returned.getQueueChildren().size()) @@ -196,7 +196,7 @@ public void testFromYarnQueue() { } @Test - public void testFromYarnJobReport() throws Exception { + void testFromYarnJobReport() throws Exception { int jobStartTime = 612354; int jobFinishTime = 612355; JobState state = JobState.RUNNING; @@ -204,7 +204,7 @@ public void testFromYarnJobReport() throws Exception { JobReport jobReport = Records.newRecord(JobReport.class); ApplicationId applicationId = ApplicationId.newInstance(0, 0); jobId.setAppId(applicationId); - jobId.setId(0); + jobId.setId(0); jobReport.setJobId(jobId); jobReport.setJobState(state); jobReport.setStartTime(jobStartTime); @@ -212,9 +212,9 @@ public void testFromYarnJobReport() throws Exception { jobReport.setUser("TestTypeConverter-user"); jobReport.setJobPriority(Priority.newInstance(0)); JobStatus jobStatus = TypeConverter.fromYarn(jobReport, "dummy-jobfile"); - Assert.assertEquals(jobStartTime, jobStatus.getStartTime()); - Assert.assertEquals(jobFinishTime, jobStatus.getFinishTime()); - Assert.assertEquals(state.toString(), jobStatus.getState().toString()); - Assert.assertEquals(JobPriority.DEFAULT, jobStatus.getPriority()); + assertEquals(jobStartTime, jobStatus.getStartTime()); + assertEquals(jobFinishTime, jobStatus.getFinishTime()); + assertEquals(state.toString(), jobStatus.getState().toString()); + assertEquals(JobPriority.DEFAULT, jobStatus.getPriority()); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java index 200f7ac255abd..8245ede921e6e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java @@ -22,9 +22,9 @@ import java.io.IOException; import java.net.InetSocketAddress; -import org.junit.Assert; - import org.apache.hadoop.conf.Configuration; + +import static org.junit.jupiter.api.Assertions.fail; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenRequest; @@ -59,16 +59,15 @@ import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl; import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestRPCFactories { - - - + + @Test - public void test() { + void test() { testPbServerFactory(); - + testPbClientFactory(); } @@ -86,7 +85,7 @@ private void testPbServerFactory() { server.start(); } catch (YarnRuntimeException e) { e.printStackTrace(); - Assert.fail("Failed to crete server"); + fail("Failed to crete server"); } finally { server.stop(); } @@ -112,12 +111,12 @@ private void testPbClientFactory() { client = (MRClientProtocol) RpcClientFactoryPBImpl.get().getClient(MRClientProtocol.class, 1, NetUtils.getConnectAddress(server), conf); } catch (YarnRuntimeException e) { e.printStackTrace(); - Assert.fail("Failed to crete client"); + fail("Failed to crete client"); } } catch (YarnRuntimeException e) { e.printStackTrace(); - Assert.fail("Failed to crete server"); + fail("Failed to crete server"); } finally { server.stop(); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java index 2e793220f0fa0..3c0ef96cc244a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java @@ -18,37 +18,40 @@ package org.apache.hadoop.mapreduce.v2; -import org.junit.Assert; - import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl; + import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl; import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup; import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.CounterGroupPBImpl; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestRecordFactory { - + @Test - public void testPbRecordFactory() { + void testPbRecordFactory() { RecordFactory pbRecordFactory = RecordFactoryPBImpl.get(); - + try { CounterGroup response = pbRecordFactory.newRecordInstance(CounterGroup.class); - Assert.assertEquals(CounterGroupPBImpl.class, response.getClass()); + assertEquals(CounterGroupPBImpl.class, response.getClass()); } catch (YarnRuntimeException e) { e.printStackTrace(); - Assert.fail("Failed to crete record"); + fail("Failed to crete record"); } - + try { GetCountersRequest response = pbRecordFactory.newRecordInstance(GetCountersRequest.class); - Assert.assertEquals(GetCountersRequestPBImpl.class, response.getClass()); + assertEquals(GetCountersRequestPBImpl.class, response.getClass()); } catch (YarnRuntimeException e) { e.printStackTrace(); - Assert.fail("Failed to crete record"); + fail("Failed to crete record"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java index b12925f297cdc..7ca43bb2ea181 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java @@ -19,18 +19,16 @@ package org.apache.hadoop.mapreduce.v2.api.records; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.*; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestIds { @Test - public void testJobId() { + void testJobId() { long ts1 = 1315890136000l; long ts2 = 1315890136001l; JobId j1 = createJobId(ts1, 2); @@ -38,9 +36,9 @@ public void testJobId() { JobId j3 = createJobId(ts2, 1); JobId j4 = createJobId(ts1, 2); - assertTrue(j1.equals(j4)); - assertFalse(j1.equals(j2)); - assertFalse(j1.equals(j3)); + assertEquals(j1, j4); + assertNotEquals(j1, j2); + assertNotEquals(j1, j3); assertTrue(j1.compareTo(j4) == 0); assertTrue(j1.compareTo(j2) > 0); @@ -56,7 +54,7 @@ public void testJobId() { } @Test - public void testTaskId() { + void testTaskId() { long ts1 = 1315890136000l; long ts2 = 1315890136001l; TaskId t1 = createTaskId(ts1, 1, 2, TaskType.MAP); @@ -65,10 +63,10 @@ public void testTaskId() { TaskId t4 = createTaskId(ts1, 1, 2, TaskType.MAP); TaskId t5 = createTaskId(ts2, 1, 1, TaskType.MAP); - assertTrue(t1.equals(t4)); - assertFalse(t1.equals(t2)); - assertFalse(t1.equals(t3)); - assertFalse(t1.equals(t5)); + assertEquals(t1, t4); + assertNotEquals(t1, t2); + assertNotEquals(t1, t3); + assertNotEquals(t1, t5); assertTrue(t1.compareTo(t4) == 0); assertTrue(t1.compareTo(t2) < 0); @@ -86,7 +84,7 @@ public void testTaskId() { } @Test - public void testTaskAttemptId() { + void testTaskAttemptId() { long ts1 = 1315890136000l; long ts2 = 1315890136001l; TaskAttemptId t1 = createTaskAttemptId(ts1, 2, 2, TaskType.MAP, 2); @@ -96,10 +94,10 @@ public void testTaskAttemptId() { TaskAttemptId t5 = createTaskAttemptId(ts1, 2, 1, TaskType.MAP, 3); TaskAttemptId t6 = createTaskAttemptId(ts1, 2, 2, TaskType.MAP, 2); - assertTrue(t1.equals(t6)); - assertFalse(t1.equals(t2)); - assertFalse(t1.equals(t3)); - assertFalse(t1.equals(t5)); + assertEquals(t1, t6); + assertNotEquals(t1, t2); + assertNotEquals(t1, t3); + assertNotEquals(t1, t5); assertTrue(t1.compareTo(t6) == 0); assertTrue(t1.compareTo(t2) < 0); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java index f9322d86d1757..6204460e16601 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java @@ -20,13 +20,13 @@ import java.io.IOException; import static java.nio.charset.StandardCharsets.UTF_8; +import static org.junit.jupiter.api.Assertions.*; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.records.JobId; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestFileNameIndexUtils { @@ -87,7 +87,7 @@ public class TestFileNameIndexUtils { private static final String JOB_START_TIME = "1317928742060"; @Test - public void testEncodingDecodingEquivalence() throws IOException { + void testEncodingDecodingEquivalence() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -105,30 +105,20 @@ public void testEncodingDecodingEquivalence() throws IOException { String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info); JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals("Job id different after encoding and decoding", - info.getJobId(), parsedInfo.getJobId()); - Assert.assertEquals("Submit time different after encoding and decoding", - info.getSubmitTime(), parsedInfo.getSubmitTime()); - Assert.assertEquals("User different after encoding and decoding", - info.getUser(), parsedInfo.getUser()); - Assert.assertEquals("Job name different after encoding and decoding", - info.getJobName(), parsedInfo.getJobName()); - Assert.assertEquals("Finish time different after encoding and decoding", - info.getFinishTime(), parsedInfo.getFinishTime()); - Assert.assertEquals("Num maps different after encoding and decoding", - info.getNumMaps(), parsedInfo.getNumMaps()); - Assert.assertEquals("Num reduces different after encoding and decoding", - info.getNumReduces(), parsedInfo.getNumReduces()); - Assert.assertEquals("Job status different after encoding and decoding", - info.getJobStatus(), parsedInfo.getJobStatus()); - Assert.assertEquals("Queue name different after encoding and decoding", - info.getQueueName(), parsedInfo.getQueueName()); - Assert.assertEquals("Job start time different after encoding and decoding", - info.getJobStartTime(), parsedInfo.getJobStartTime()); + assertEquals(info.getJobId(), parsedInfo.getJobId(), "Job id different after encoding and decoding"); + assertEquals(info.getSubmitTime(), parsedInfo.getSubmitTime(), "Submit time different after encoding and decoding"); + assertEquals(info.getUser(), parsedInfo.getUser(), "User different after encoding and decoding"); + assertEquals(info.getJobName(), parsedInfo.getJobName(), "Job name different after encoding and decoding"); + assertEquals(info.getFinishTime(), parsedInfo.getFinishTime(), "Finish time different after encoding and decoding"); + assertEquals(info.getNumMaps(), parsedInfo.getNumMaps(), "Num maps different after encoding and decoding"); + assertEquals(info.getNumReduces(), parsedInfo.getNumReduces(), "Num reduces different after encoding and decoding"); + assertEquals(info.getJobStatus(), parsedInfo.getJobStatus(), "Job status different after encoding and decoding"); + assertEquals(info.getQueueName(), parsedInfo.getQueueName(), "Queue name different after encoding and decoding"); + assertEquals(info.getJobStartTime(), parsedInfo.getJobStartTime(), "Job start time different after encoding and decoding"); } @Test - public void testUserNamePercentEncoding() throws IOException { + void testUserNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -144,12 +134,12 @@ public void testUserNamePercentEncoding() throws IOException { info.setJobStartTime(Long.parseLong(JOB_START_TIME)); String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info); - Assert.assertTrue("User name not encoded correctly into job history file", - jobHistoryFile.contains(USER_NAME_WITH_DELIMITER_ESCAPE)); + assertTrue(jobHistoryFile.contains(USER_NAME_WITH_DELIMITER_ESCAPE), + "User name not encoded correctly into job history file"); } @Test - public void testTrimJobName() throws IOException { + void testTrimJobName() throws IOException { int jobNameTrimLength = 5; JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); @@ -166,12 +156,12 @@ public void testTrimJobName() throws IOException { info.setJobStartTime(Long.parseLong(JOB_START_TIME)); String jobHistoryFile = - FileNameIndexUtils.getDoneFileName(info, jobNameTrimLength); + FileNameIndexUtils.getDoneFileName(info, jobNameTrimLength); JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals("Job name did not get trimmed correctly", - info.getJobName().substring(0, jobNameTrimLength), - parsedInfo.getJobName()); + assertEquals(info.getJobName().substring(0, jobNameTrimLength), + parsedInfo.getJobName(), + "Job name did not get trimmed correctly"); } /** @@ -179,7 +169,7 @@ public void testTrimJobName() throws IOException { * even if there are some multibyte characters in the job name. */ @Test - public void testJobNameWithMultibyteChars() throws IOException { + void testJobNameWithMultibyteChars() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -206,17 +196,17 @@ public void testJobNameWithMultibyteChars() throws IOException { String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 50); - Assert.assertTrue(jobHistoryFile.length() <= 255); + assertTrue(jobHistoryFile.length() <= 255); String trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 3 x 16 < 50 < 3 x 17 so the length of trimedJobName should be 48 - Assert.assertEquals(48, trimedJobName.getBytes(UTF_8).length); + assertEquals(48, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility byte[] trimedJobNameInByte = trimedJobName.getBytes(UTF_8); String reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); - Assert.assertArrayEquals(trimedJobNameInByte, + assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); @@ -231,17 +221,17 @@ public void testJobNameWithMultibyteChars() throws IOException { jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 27); - Assert.assertTrue(jobHistoryFile.length() <= 255); + assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 6 x 4 < 27 < 6 x 5 so the length of trimedJobName should be 24 - Assert.assertEquals(24, trimedJobName.getBytes(UTF_8).length); + assertEquals(24, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); - Assert.assertArrayEquals(trimedJobNameInByte, + assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); @@ -256,17 +246,17 @@ public void testJobNameWithMultibyteChars() throws IOException { jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 40); - Assert.assertTrue(jobHistoryFile.length() <= 255); + assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 9 x 4 < 40 < 9 x 5 so the length of trimedJobName should be 36 - Assert.assertEquals(36, trimedJobName.getBytes(UTF_8).length); + assertEquals(36, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); - Assert.assertArrayEquals(trimedJobNameInByte, + assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); @@ -281,29 +271,29 @@ public void testJobNameWithMultibyteChars() throws IOException { jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 49); - Assert.assertTrue(jobHistoryFile.length() <= 255); + assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // 12 x 4 < 49 < 12 x 5 so the length of trimedJobName should be 48 - Assert.assertEquals(48, trimedJobName.getBytes(UTF_8).length); + assertEquals(48, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); - Assert.assertArrayEquals(trimedJobNameInByte, + assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); sb.setLength(0); // Test for the combination of 1 to 4 bytes UTF-8 characters sb.append('\u732B') // cat in Kanji (encoded into 3 bytes x 3 characters) - .append("[") // (encoded into 1 byte x 3 characters) - .append('\u03BB') // small lambda (encoded into 2 bytes x 3 characters) - .append('/') // (encoded into 1 byte x 3 characters) - .append('A') // not url-encoded (1 byte x 1 character) - .append("\ud867\ude49") // flying fish in - // Kanji (encoded into 4 bytes x 3 characters) - .append('\u72AC'); // dog in Kanji (encoded into 3 bytes x 3 characters) + .append("[") // (encoded into 1 byte x 3 characters) + .append('\u03BB') // small lambda (encoded into 2 bytes x 3 characters) + .append('/') // (encoded into 1 byte x 3 characters) + .append('A') // not url-encoded (1 byte x 1 character) + .append("\ud867\ude49") // flying fish in + // Kanji (encoded into 4 bytes x 3 characters) + .append('\u72AC'); // dog in Kanji (encoded into 3 bytes x 3 characters) longJobName = sb.toString(); info.setJobName(longJobName); @@ -311,23 +301,23 @@ public void testJobNameWithMultibyteChars() throws IOException { jobHistoryFile = FileNameIndexUtils.getDoneFileName(info, 23); - Assert.assertTrue(jobHistoryFile.length() <= 255); + assertTrue(jobHistoryFile.length() <= 255); trimedJobName = jobHistoryFile.split( FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name // total size of the first 5 characters = 22 // 23 < total size of the first 6 characters - Assert.assertEquals(22, trimedJobName.getBytes(UTF_8).length); + assertEquals(22, trimedJobName.getBytes(UTF_8).length); // validate whether trimmedJobName by testing reversibility trimedJobNameInByte = trimedJobName.getBytes(UTF_8); reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8); - Assert.assertArrayEquals(trimedJobNameInByte, + assertArrayEquals(trimedJobNameInByte, reEncodedTrimedJobName.getBytes(UTF_8)); } @Test - public void testUserNamePercentDecoding() throws IOException { + void testUserNamePercentDecoding() throws IOException { String jobHistoryFile = String.format(JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, @@ -341,12 +331,11 @@ public void testUserNamePercentDecoding() throws IOException { JOB_START_TIME); JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals("User name doesn't match", - USER_NAME_WITH_DELIMITER, info.getUser()); + assertEquals(USER_NAME_WITH_DELIMITER, info.getUser(), "User name doesn't match"); } @Test - public void testJobNamePercentEncoding() throws IOException { + void testJobNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -362,12 +351,12 @@ public void testJobNamePercentEncoding() throws IOException { info.setJobStartTime(Long.parseLong(JOB_START_TIME)); String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info); - Assert.assertTrue("Job name not encoded correctly into job history file", - jobHistoryFile.contains(JOB_NAME_WITH_DELIMITER_ESCAPE)); + assertTrue(jobHistoryFile.contains(JOB_NAME_WITH_DELIMITER_ESCAPE), + "Job name not encoded correctly into job history file"); } @Test - public void testJobNamePercentDecoding() throws IOException { + void testJobNamePercentDecoding() throws IOException { String jobHistoryFile = String.format(JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, @@ -378,15 +367,14 @@ public void testJobNamePercentDecoding() throws IOException { NUM_REDUCES, JOB_STATUS, QUEUE_NAME, - JOB_START_TIME ); + JOB_START_TIME); JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals("Job name doesn't match", - JOB_NAME_WITH_DELIMITER, info.getJobName()); + assertEquals(JOB_NAME_WITH_DELIMITER, info.getJobName(), "Job name doesn't match"); } @Test - public void testQueueNamePercentEncoding() throws IOException { + void testQueueNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -402,12 +390,12 @@ public void testQueueNamePercentEncoding() throws IOException { info.setJobStartTime(Long.parseLong(JOB_START_TIME)); String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info); - Assert.assertTrue("Queue name not encoded correctly into job history file", - jobHistoryFile.contains(QUEUE_NAME_WITH_DELIMITER_ESCAPE)); + assertTrue(jobHistoryFile.contains(QUEUE_NAME_WITH_DELIMITER_ESCAPE), + "Queue name not encoded correctly into job history file"); } @Test - public void testQueueNamePercentDecoding() throws IOException { + void testQueueNamePercentDecoding() throws IOException { String jobHistoryFile = String.format(JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, @@ -418,15 +406,14 @@ public void testQueueNamePercentDecoding() throws IOException { NUM_REDUCES, JOB_STATUS, QUEUE_NAME_WITH_DELIMITER_ESCAPE, - JOB_START_TIME ); + JOB_START_TIME); JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals("Queue name doesn't match", - QUEUE_NAME_WITH_DELIMITER, info.getQueueName()); + assertEquals(QUEUE_NAME_WITH_DELIMITER, info.getQueueName(), "Queue name doesn't match"); } @Test - public void testJobStartTimeBackwardsCompatible() throws IOException{ + void testJobStartTimeBackwardsCompatible() throws IOException { String jobHistoryFile = String.format(OLD_FORMAT_BEFORE_ADD_START_TIME, JOB_ID, SUBMIT_TIME, @@ -436,13 +423,13 @@ public void testJobStartTimeBackwardsCompatible() throws IOException{ NUM_MAPS, NUM_REDUCES, JOB_STATUS, - QUEUE_NAME ); + QUEUE_NAME); JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals(info.getJobStartTime(), info.getSubmitTime()); + assertEquals(info.getJobStartTime(), info.getSubmitTime()); } @Test - public void testJobHistoryFileNameBackwardsCompatible() throws IOException { + void testJobHistoryFileNameBackwardsCompatible() throws IOException { JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -462,28 +449,20 @@ public void testJobHistoryFileNameBackwardsCompatible() throws IOException { JOB_STATUS); JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals("Job id incorrect after decoding old history file", - jobId, info.getJobId()); - Assert.assertEquals("Submit time incorrect after decoding old history file", - submitTime, info.getSubmitTime()); - Assert.assertEquals("User incorrect after decoding old history file", - USER_NAME, info.getUser()); - Assert.assertEquals("Job name incorrect after decoding old history file", - JOB_NAME, info.getJobName()); - Assert.assertEquals("Finish time incorrect after decoding old history file", - finishTime, info.getFinishTime()); - Assert.assertEquals("Num maps incorrect after decoding old history file", - numMaps, info.getNumMaps()); - Assert.assertEquals("Num reduces incorrect after decoding old history file", - numReduces, info.getNumReduces()); - Assert.assertEquals("Job status incorrect after decoding old history file", - JOB_STATUS, info.getJobStatus()); - Assert.assertNull("Queue name incorrect after decoding old history file", - info.getQueueName()); + assertEquals(jobId, info.getJobId(), "Job id incorrect after decoding old history file"); + assertEquals(submitTime, info.getSubmitTime(), "Submit time incorrect after decoding old history file"); + assertEquals(USER_NAME, info.getUser(), "User incorrect after decoding old history file"); + assertEquals(JOB_NAME, info.getJobName(), "Job name incorrect after decoding old history file"); + assertEquals(finishTime, info.getFinishTime(), "Finish time incorrect after decoding old history file"); + assertEquals(numMaps, info.getNumMaps(), "Num maps incorrect after decoding old history file"); + assertEquals(numReduces, info.getNumReduces(), "Num reduces incorrect after decoding old history file"); + assertEquals(JOB_STATUS, info.getJobStatus(), "Job status incorrect after decoding old history file"); + assertNull(info.getQueueName(), + "Queue name incorrect after decoding old history file"); } @Test - public void testTrimJobNameEqualsLimitLength() throws IOException { + void testTrimJobNameEqualsLimitLength() throws IOException { int jobNameTrimLength = 9; JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); @@ -503,7 +482,7 @@ public void testTrimJobNameEqualsLimitLength() throws IOException { jobNameTrimLength); JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - Assert.assertEquals("Job name did not get trimmed correctly", info - .getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName()); + assertEquals(info + .getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName(), "Job name did not get trimmed correctly"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java index 447d18aa5589c..f96c31523b379 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java @@ -32,10 +32,10 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils.getConfiguredHistoryIntermediateUserDoneDirPermissions; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestJobHistoryUtils { @@ -45,9 +45,9 @@ public class TestJobHistoryUtils { @Test @SuppressWarnings("unchecked") - public void testGetHistoryDirsForCleaning() throws IOException { + void testGetHistoryDirsForCleaning() throws IOException { Path pRoot = new Path(TEST_DIR, "org.apache.hadoop.mapreduce.v2.jobhistory." - + "TestJobHistoryUtils.testGetHistoryDirsForCleaning"); + + "TestJobHistoryUtils.testGetHistoryDirsForCleaning"); FileContext fc = FileContext.getFileContext(); Calendar cCal = Calendar.getInstance(); int year = 2013; @@ -58,68 +58,68 @@ public void testGetHistoryDirsForCleaning() throws IOException { clearDir(fc, pRoot); Path pId00 = createPath(fc, pRoot, year, month, day, "000000"); - Path pId01 = createPath(fc, pRoot, year, month, day+1, "000001"); - Path pId02 = createPath(fc, pRoot, year, month, day-1, "000002"); - Path pId03 = createPath(fc, pRoot, year, month+1, day, "000003"); - Path pId04 = createPath(fc, pRoot, year, month+1, day+1, "000004"); - Path pId05 = createPath(fc, pRoot, year, month+1, day-1, "000005"); - Path pId06 = createPath(fc, pRoot, year, month-1, day, "000006"); - Path pId07 = createPath(fc, pRoot, year, month-1, day+1, "000007"); - Path pId08 = createPath(fc, pRoot, year, month-1, day-1, "000008"); - Path pId09 = createPath(fc, pRoot, year+1, month, day, "000009"); - Path pId10 = createPath(fc, pRoot, year+1, month, day+1, "000010"); - Path pId11 = createPath(fc, pRoot, year+1, month, day-1, "000011"); - Path pId12 = createPath(fc, pRoot, year+1, month+1, day, "000012"); - Path pId13 = createPath(fc, pRoot, year+1, month+1, day+1, "000013"); - Path pId14 = createPath(fc, pRoot, year+1, month+1, day-1, "000014"); - Path pId15 = createPath(fc, pRoot, year+1, month-1, day, "000015"); - Path pId16 = createPath(fc, pRoot, year+1, month-1, day+1, "000016"); - Path pId17 = createPath(fc, pRoot, year+1, month-1, day-1, "000017"); - Path pId18 = createPath(fc, pRoot, year-1, month, day, "000018"); - Path pId19 = createPath(fc, pRoot, year-1, month, day+1, "000019"); - Path pId20 = createPath(fc, pRoot, year-1, month, day-1, "000020"); - Path pId21 = createPath(fc, pRoot, year-1, month+1, day, "000021"); - Path pId22 = createPath(fc, pRoot, year-1, month+1, day+1, "000022"); - Path pId23 = createPath(fc, pRoot, year-1, month+1, day-1, "000023"); - Path pId24 = createPath(fc, pRoot, year-1, month-1, day, "000024"); - Path pId25 = createPath(fc, pRoot, year-1, month-1, day+1, "000025"); - Path pId26 = createPath(fc, pRoot, year-1, month-1, day-1, "000026"); + Path pId01 = createPath(fc, pRoot, year, month, day + 1, "000001"); + Path pId02 = createPath(fc, pRoot, year, month, day - 1, "000002"); + Path pId03 = createPath(fc, pRoot, year, month + 1, day, "000003"); + Path pId04 = createPath(fc, pRoot, year, month + 1, day + 1, "000004"); + Path pId05 = createPath(fc, pRoot, year, month + 1, day - 1, "000005"); + Path pId06 = createPath(fc, pRoot, year, month - 1, day, "000006"); + Path pId07 = createPath(fc, pRoot, year, month - 1, day + 1, "000007"); + Path pId08 = createPath(fc, pRoot, year, month - 1, day - 1, "000008"); + Path pId09 = createPath(fc, pRoot, year + 1, month, day, "000009"); + Path pId10 = createPath(fc, pRoot, year + 1, month, day + 1, "000010"); + Path pId11 = createPath(fc, pRoot, year + 1, month, day - 1, "000011"); + Path pId12 = createPath(fc, pRoot, year + 1, month + 1, day, "000012"); + Path pId13 = createPath(fc, pRoot, year + 1, month + 1, day + 1, "000013"); + Path pId14 = createPath(fc, pRoot, year + 1, month + 1, day - 1, "000014"); + Path pId15 = createPath(fc, pRoot, year + 1, month - 1, day, "000015"); + Path pId16 = createPath(fc, pRoot, year + 1, month - 1, day + 1, "000016"); + Path pId17 = createPath(fc, pRoot, year + 1, month - 1, day - 1, "000017"); + Path pId18 = createPath(fc, pRoot, year - 1, month, day, "000018"); + Path pId19 = createPath(fc, pRoot, year - 1, month, day + 1, "000019"); + Path pId20 = createPath(fc, pRoot, year - 1, month, day - 1, "000020"); + Path pId21 = createPath(fc, pRoot, year - 1, month + 1, day, "000021"); + Path pId22 = createPath(fc, pRoot, year - 1, month + 1, day + 1, "000022"); + Path pId23 = createPath(fc, pRoot, year - 1, month + 1, day - 1, "000023"); + Path pId24 = createPath(fc, pRoot, year - 1, month - 1, day, "000024"); + Path pId25 = createPath(fc, pRoot, year - 1, month - 1, day + 1, "000025"); + Path pId26 = createPath(fc, pRoot, year - 1, month - 1, day - 1, "000026"); // non-expected names should be ignored without problems Path pId27 = createPath(fc, pRoot, "foo", "" + month, "" + day, "000027"); Path pId28 = createPath(fc, pRoot, "" + year, "foo", "" + day, "000028"); Path pId29 = createPath(fc, pRoot, "" + year, "" + month, "foo", "000029"); List dirs = JobHistoryUtils - .getHistoryDirsForCleaning(fc, pRoot, cutoff); + .getHistoryDirsForCleaning(fc, pRoot, cutoff); Collections.sort(dirs); - Assert.assertEquals(14, dirs.size()); - Assert.assertEquals(pId26.toUri().getPath(), + assertEquals(14, dirs.size()); + assertEquals(pId26.toUri().getPath(), dirs.get(0).getPath().toUri().getPath()); - Assert.assertEquals(pId24.toUri().getPath(), + assertEquals(pId24.toUri().getPath(), dirs.get(1).getPath().toUri().getPath()); - Assert.assertEquals(pId25.toUri().getPath(), + assertEquals(pId25.toUri().getPath(), dirs.get(2).getPath().toUri().getPath()); - Assert.assertEquals(pId20.toUri().getPath(), + assertEquals(pId20.toUri().getPath(), dirs.get(3).getPath().toUri().getPath()); - Assert.assertEquals(pId18.toUri().getPath(), + assertEquals(pId18.toUri().getPath(), dirs.get(4).getPath().toUri().getPath()); - Assert.assertEquals(pId19.toUri().getPath(), + assertEquals(pId19.toUri().getPath(), dirs.get(5).getPath().toUri().getPath()); - Assert.assertEquals(pId23.toUri().getPath(), + assertEquals(pId23.toUri().getPath(), dirs.get(6).getPath().toUri().getPath()); - Assert.assertEquals(pId21.toUri().getPath(), + assertEquals(pId21.toUri().getPath(), dirs.get(7).getPath().toUri().getPath()); - Assert.assertEquals(pId22.toUri().getPath(), + assertEquals(pId22.toUri().getPath(), dirs.get(8).getPath().toUri().getPath()); - Assert.assertEquals(pId08.toUri().getPath(), + assertEquals(pId08.toUri().getPath(), dirs.get(9).getPath().toUri().getPath()); - Assert.assertEquals(pId06.toUri().getPath(), + assertEquals(pId06.toUri().getPath(), dirs.get(10).getPath().toUri().getPath()); - Assert.assertEquals(pId07.toUri().getPath(), + assertEquals(pId07.toUri().getPath(), dirs.get(11).getPath().toUri().getPath()); - Assert.assertEquals(pId02.toUri().getPath(), + assertEquals(pId02.toUri().getPath(), dirs.get(12).getPath().toUri().getPath()); - Assert.assertEquals(pId00.toUri().getPath(), + assertEquals(pId00.toUri().getPath(), dirs.get(13).getPath().toUri().getPath()); } @@ -149,18 +149,18 @@ private Path createPath(FileContext fc, Path root, String year, String month, } @Test - public void testGetConfiguredHistoryIntermediateUserDoneDirPermissions() { + void testGetConfiguredHistoryIntermediateUserDoneDirPermissions() { Configuration conf = new Configuration(); Map parameters = ImmutableMap.of( - "775", new FsPermission(0775), - "123", new FsPermission(0773), - "-rwx", new FsPermission(0770) , - "+rwx", new FsPermission(0777) + "775", new FsPermission(0775), + "123", new FsPermission(0773), + "-rwx", new FsPermission(0770), + "+rwx", new FsPermission(0777) ); for (Map.Entry entry : parameters.entrySet()) { conf.set(JHAdminConfig.MR_HISTORY_INTERMEDIATE_USER_DONE_DIR_PERMISSIONS, entry.getKey()); - Assert.assertEquals(entry.getValue(), + assertEquals(entry.getValue(), getConfiguredHistoryIntermediateUserDoneDirPermissions(conf)); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java index b4952ecc4ddcd..d096a8f95c272 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java @@ -18,12 +18,7 @@ package org.apache.hadoop.mapreduce.v2.util; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -58,14 +53,15 @@ import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestMRApps { private static File testWorkDir = null; - @BeforeClass + @BeforeAll public static void setupTestDirs() throws IOException { testWorkDir = new File("target", TestMRApps.class.getCanonicalName()); delete(testWorkDir); @@ -73,7 +69,7 @@ public static void setupTestDirs() throws IOException { testWorkDir = testWorkDir.getAbsoluteFile(); } - @AfterClass + @AfterAll public static void cleanupTestDirs() throws IOException { if (testWorkDir != null) { delete(testWorkDir); @@ -87,29 +83,35 @@ private static void delete(File dir) throws IOException { fs.delete(p, true); } - @Test (timeout = 120000) - public void testJobIDtoString() { + @Test + @Timeout(120000) + void testJobIDtoString() { JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class); jid.setAppId(ApplicationId.newInstance(0, 0)); assertEquals("job_0_0000", MRApps.toString(jid)); } - @Test (timeout = 120000) - public void testToJobID() { + @Test + @Timeout(120000) + void testToJobID() { JobId jid = MRApps.toJobID("job_1_1"); assertEquals(1, jid.getAppId().getClusterTimestamp()); assertEquals(1, jid.getAppId().getId()); assertEquals(1, jid.getId()); // tests against some proto.id and not a job.id field } - @Test (timeout = 120000, expected=IllegalArgumentException.class) - public void testJobIDShort() { - MRApps.toJobID("job_0_0_0"); + @Test + @Timeout(120000) + void testJobIDShort() { + assertThrows(IllegalArgumentException.class, () -> { + MRApps.toJobID("job_0_0_0"); + }); } //TODO_get.set - @Test (timeout = 120000) - public void testTaskIDtoString() { + @Test + @Timeout(120000) + void testTaskIDtoString() { TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class); tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); tid.getJobId().setAppId(ApplicationId.newInstance(0, 0)); @@ -124,8 +126,9 @@ public void testTaskIDtoString() { assertEquals("task_0_0000_r_000000", MRApps.toString(tid)); } - @Test (timeout = 120000) - public void testToTaskID() { + @Test + @Timeout(120000) + void testToTaskID() { TaskId tid = MRApps.toTaskID("task_1_2_r_3"); assertEquals(1, tid.getJobId().getAppId().getClusterTimestamp()); assertEquals(2, tid.getJobId().getAppId().getId()); @@ -137,19 +140,26 @@ public void testToTaskID() { assertEquals(TaskType.MAP, tid.getTaskType()); } - @Test(timeout = 120000, expected=IllegalArgumentException.class) - public void testTaskIDShort() { - MRApps.toTaskID("task_0_0000_m"); + @Test + @Timeout(120000) + void testTaskIDShort() { + assertThrows(IllegalArgumentException.class, () -> { + MRApps.toTaskID("task_0_0000_m"); + }); } - @Test(timeout = 120000, expected=IllegalArgumentException.class) - public void testTaskIDBadType() { - MRApps.toTaskID("task_0_0000_x_000000"); + @Test + @Timeout(120000) + void testTaskIDBadType() { + assertThrows(IllegalArgumentException.class, () -> { + MRApps.toTaskID("task_0_0000_x_000000"); + }); } //TODO_get.set - @Test (timeout = 120000) - public void testTaskAttemptIDtoString() { + @Test + @Timeout(120000) + void testTaskAttemptIDtoString() { TaskAttemptId taid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class); taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class)); taid.getTaskId().setTaskType(TaskType.MAP); @@ -158,8 +168,9 @@ public void testTaskAttemptIDtoString() { assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid)); } - @Test (timeout = 120000) - public void testToTaskAttemptID() { + @Test + @Timeout(120000) + void testToTaskAttemptID() { TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_m_2_3"); assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp()); assertEquals(1, taid.getTaskId().getJobId().getAppId().getId()); @@ -168,32 +179,36 @@ public void testToTaskAttemptID() { assertEquals(3, taid.getId()); } - @Test(timeout = 120000, expected=IllegalArgumentException.class) - public void testTaskAttemptIDShort() { - MRApps.toTaskAttemptID("attempt_0_0_0_m_0"); + @Test + @Timeout(120000) + void testTaskAttemptIDShort() { + assertThrows(IllegalArgumentException.class, () -> { + MRApps.toTaskAttemptID("attempt_0_0_0_m_0"); + }); } - @Test (timeout = 120000) - public void testGetJobFileWithUser() { + @Test + @Timeout(120000) + void testGetJobFileWithUser() { Configuration conf = new Configuration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, "/my/path/to/staging"); - String jobFile = MRApps.getJobFile(conf, "dummy-user", + String jobFile = MRApps.getJobFile(conf, "dummy-user", new JobID("dummy-job", 12345)); - assertNotNull("getJobFile results in null.", jobFile); - assertEquals("jobFile with specified user is not as expected.", - "/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile); + assertNotNull(jobFile, "getJobFile results in null."); + assertEquals("/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile, "jobFile with specified user is not as expected."); } - @Test (timeout = 120000) - public void testSetClasspath() throws IOException { + @Test + @Timeout(120000) + void testSetClasspath() throws IOException { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); Job job = Job.getInstance(conf); Map environment = new HashMap(); MRApps.setClasspath(environment, job.getConfiguration()); assertTrue(environment.get("CLASSPATH").startsWith( - ApplicationConstants.Environment.PWD.$$() - + ApplicationConstants.CLASS_PATH_SEPARATOR)); + ApplicationConstants.Environment.PWD.$$() + + ApplicationConstants.CLASS_PATH_SEPARATOR)); String yarnAppClasspath = job.getConfiguration().get( YarnConfiguration.YARN_APPLICATION_CLASSPATH, StringUtils.join(",", @@ -201,23 +216,24 @@ public void testSetClasspath() throws IOException { if (yarnAppClasspath != null) { yarnAppClasspath = yarnAppClasspath.replaceAll(",\\s*", - ApplicationConstants.CLASS_PATH_SEPARATOR).trim(); + ApplicationConstants.CLASS_PATH_SEPARATOR).trim(); } assertTrue(environment.get("CLASSPATH").contains(yarnAppClasspath)); - String mrAppClasspath = + String mrAppClasspath = job.getConfiguration().get( MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, MRJobConfig.DEFAULT_MAPREDUCE_CROSS_PLATFORM_APPLICATION_CLASSPATH); if (mrAppClasspath != null) { mrAppClasspath = mrAppClasspath.replaceAll(",\\s*", - ApplicationConstants.CLASS_PATH_SEPARATOR).trim(); + ApplicationConstants.CLASS_PATH_SEPARATOR).trim(); } assertTrue(environment.get("CLASSPATH").contains(mrAppClasspath)); } - - @Test (timeout = 120000) - public void testSetClasspathWithArchives () throws IOException { + + @Test + @Timeout(120000) + void testSetClasspathWithArchives() throws IOException { File testTGZ = new File(testWorkDir, "test.tgz"); FileOutputStream out = new FileOutputStream(testTGZ); out.write(0); @@ -227,27 +243,28 @@ public void testSetClasspathWithArchives () throws IOException { Job job = Job.getInstance(conf); conf = job.getConfiguration(); String testTGZQualifiedPath = FileSystem.getLocal(conf).makeQualified(new Path( - testTGZ.getAbsolutePath())).toString(); + testTGZ.getAbsolutePath())).toString(); conf.set(MRJobConfig.CLASSPATH_ARCHIVES, testTGZQualifiedPath); conf.set(MRJobConfig.CACHE_ARCHIVES, testTGZQualifiedPath + "#testTGZ"); Map environment = new HashMap(); MRApps.setClasspath(environment, conf); assertTrue(environment.get("CLASSPATH").startsWith( - ApplicationConstants.Environment.PWD.$$() + ApplicationConstants.CLASS_PATH_SEPARATOR)); + ApplicationConstants.Environment.PWD.$$() + ApplicationConstants.CLASS_PATH_SEPARATOR)); String confClasspath = job.getConfiguration().get( YarnConfiguration.YARN_APPLICATION_CLASSPATH, StringUtils.join(",", YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)); if (confClasspath != null) { confClasspath = confClasspath.replaceAll(",\\s*", ApplicationConstants.CLASS_PATH_SEPARATOR) - .trim(); + .trim(); } assertTrue(environment.get("CLASSPATH").contains(confClasspath)); assertTrue(environment.get("CLASSPATH").contains("testTGZ")); } - @Test (timeout = 120000) - public void testSetClasspathWithUserPrecendence() { + @Test + @Timeout(120000) + void testSetClasspathWithUserPrecendence() { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); @@ -260,14 +277,15 @@ public void testSetClasspathWithUserPrecendence() { String env_str = env.get("CLASSPATH"); String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/*", - "job.jar/classes/", "job.jar/lib/*", - ApplicationConstants.Environment.PWD.$$() + "/*")); - assertTrue("MAPREDUCE_JOB_USER_CLASSPATH_FIRST set, but not taking effect!", - env_str.startsWith(expectedClasspath)); + "job.jar/classes/", "job.jar/lib/*", + ApplicationConstants.Environment.PWD.$$() + "/*")); + assertTrue(env_str.startsWith(expectedClasspath), + "MAPREDUCE_JOB_USER_CLASSPATH_FIRST set, but not taking effect!"); } - @Test (timeout = 120000) - public void testSetClasspathWithNoUserPrecendence() { + @Test + @Timeout(120000) + void testSetClasspathWithNoUserPrecendence() { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false); @@ -280,15 +298,16 @@ public void testSetClasspathWithNoUserPrecendence() { String env_str = env.get("CLASSPATH"); String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList("job.jar/*", "job.jar/classes/", "job.jar/lib/*", - ApplicationConstants.Environment.PWD.$$() + "/*")); - assertTrue("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, and job.jar is not in" - + " the classpath!", env_str.contains(expectedClasspath)); - assertFalse("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!", - env_str.startsWith(expectedClasspath)); + ApplicationConstants.Environment.PWD.$$() + "/*")); + assertTrue(env_str.contains(expectedClasspath), "MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, and job.jar is not in" + + " the classpath!"); + assertFalse(env_str.startsWith(expectedClasspath), + "MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!"); } - - @Test (timeout = 120000) - public void testSetClasspathWithJobClassloader() throws IOException { + + @Test + @Timeout(120000) + void testSetClasspathWithJobClassloader() throws IOException { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true); @@ -296,20 +315,21 @@ public void testSetClasspathWithJobClassloader() throws IOException { MRApps.setClasspath(env, conf); String cp = env.get("CLASSPATH"); String appCp = env.get("APP_CLASSPATH"); - assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the" - + " classpath!", cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job")); - assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!", - cp.contains("PWD")); + assertFalse(cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job"), "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the" + + " classpath!"); + assertFalse(cp.contains("PWD"), + "MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!"); String expectedAppClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/*", - "job.jar/classes/", "job.jar/lib/*", - ApplicationConstants.Environment.PWD.$$() + "/*")); - assertEquals("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app" - + " classpath!", expectedAppClasspath, appCp); + "job.jar/classes/", "job.jar/lib/*", + ApplicationConstants.Environment.PWD.$$() + "/*")); + assertEquals(expectedAppClasspath, appCp, "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app" + + " classpath!"); } - @Test (timeout = 3000000) - public void testSetClasspathWithFramework() throws IOException { + @Test + @Timeout(3000000) + void testSetClasspathWithFramework() throws IOException { final String FRAMEWORK_NAME = "some-framework-name"; final String FRAMEWORK_PATH = "some-framework-path#" + FRAMEWORK_NAME; Configuration conf = new Configuration(); @@ -320,9 +340,9 @@ public void testSetClasspathWithFramework() throws IOException { MRApps.setClasspath(env, conf); fail("Failed to catch framework path set without classpath change"); } catch (IllegalArgumentException e) { - assertTrue("Unexpected IllegalArgumentException", - e.getMessage().contains("Could not locate MapReduce framework name '" - + FRAMEWORK_NAME + "'")); + assertTrue(e.getMessage().contains("Could not locate MapReduce framework name '" + + FRAMEWORK_NAME + "'"), + "Unexpected IllegalArgumentException"); } env.clear(); @@ -335,8 +355,7 @@ public void testSetClasspathWithFramework() throws IOException { String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList(ApplicationConstants.Environment.PWD.$$(), FRAMEWORK_CLASSPATH, stdClasspath)); - assertEquals("Incorrect classpath with framework and no user precedence", - expectedClasspath, env.get("CLASSPATH")); + assertEquals(expectedClasspath, env.get("CLASSPATH"), "Incorrect classpath with framework and no user precedence"); env.clear(); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); @@ -344,37 +363,38 @@ public void testSetClasspathWithFramework() throws IOException { expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList(ApplicationConstants.Environment.PWD.$$(), stdClasspath, FRAMEWORK_CLASSPATH)); - assertEquals("Incorrect classpath with framework and user precedence", - expectedClasspath, env.get("CLASSPATH")); + assertEquals(expectedClasspath, env.get("CLASSPATH"), "Incorrect classpath with framework and user precedence"); } - @Test (timeout = 30000) - public void testSetupDistributedCacheEmpty() throws IOException { + @Test + @Timeout(30000) + void testSetupDistributedCacheEmpty() throws IOException { Configuration conf = new Configuration(); Map localResources = new HashMap(); MRApps.setupDistributedCache(conf, localResources); - assertTrue("Empty Config did not produce an empty list of resources", - localResources.isEmpty()); + assertTrue(localResources.isEmpty(), + "Empty Config did not produce an empty list of resources"); } - + @SuppressWarnings("deprecation") - @Test(timeout = 120000) - public void testSetupDistributedCacheConflicts() throws Exception { + @Test + @Timeout(120000) + void testSetupDistributedCacheConflicts() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); - + URI mockUri = URI.create("mockfs://mock/"); - FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf)) + FileSystem mockFs = ((FilterFileSystem) FileSystem.get(mockUri, conf)) .getRawFileSystem(); - + URI archive = new URI("mockfs://mock/tmp/something.zip#something"); Path archivePath = new Path(archive); URI file = new URI("mockfs://mock/tmp/something.txt#something"); Path filePath = new Path(file); - + when(mockFs.resolvePath(archivePath)).thenReturn(archivePath); when(mockFs.resolvePath(filePath)).thenReturn(filePath); - + Job.addCacheArchive(archive, conf); conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10"); conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10"); @@ -383,8 +403,8 @@ public void testSetupDistributedCacheConflicts() throws Exception { conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11"); conf.set(MRJobConfig.CACHE_FILES_SIZES, "11"); conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true"); - Map localResources = - new HashMap(); + Map localResources = + new HashMap(); MRApps.setupDistributedCache(conf, localResources); assertEquals(1, localResources.size()); @@ -395,32 +415,33 @@ public void testSetupDistributedCacheConflicts() throws Exception { assertEquals(10l, lr.getTimestamp()); assertEquals(LocalResourceType.ARCHIVE, lr.getType()); } - + @SuppressWarnings("deprecation") - @Test(timeout = 120000) - public void testSetupDistributedCacheConflictsFiles() throws Exception { + @Test + @Timeout(120000) + void testSetupDistributedCacheConflictsFiles() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); - + URI mockUri = URI.create("mockfs://mock/"); - FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf)) + FileSystem mockFs = ((FilterFileSystem) FileSystem.get(mockUri, conf)) .getRawFileSystem(); - + URI file = new URI("mockfs://mock/tmp/something.zip#something"); Path filePath = new Path(file); URI file2 = new URI("mockfs://mock/tmp/something.txt#something"); Path file2Path = new Path(file2); - + when(mockFs.resolvePath(filePath)).thenReturn(filePath); when(mockFs.resolvePath(file2Path)).thenReturn(file2Path); - + Job.addCacheFile(file, conf); Job.addCacheFile(file2, conf); conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "10,11"); conf.set(MRJobConfig.CACHE_FILES_SIZES, "10,11"); conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true,true"); - Map localResources = - new HashMap(); + Map localResources = + new HashMap(); MRApps.setupDistributedCache(conf, localResources); assertEquals(1, localResources.size()); @@ -431,25 +452,26 @@ public void testSetupDistributedCacheConflictsFiles() throws Exception { assertEquals(10l, lr.getTimestamp()); assertEquals(LocalResourceType.FILE, lr.getType()); } - + @SuppressWarnings("deprecation") - @Test (timeout = 30000) - public void testSetupDistributedCache() throws Exception { + @Test + @Timeout(30000) + void testSetupDistributedCache() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); - + URI mockUri = URI.create("mockfs://mock/"); - FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf)) + FileSystem mockFs = ((FilterFileSystem) FileSystem.get(mockUri, conf)) .getRawFileSystem(); - + URI archive = new URI("mockfs://mock/tmp/something.zip"); Path archivePath = new Path(archive); URI file = new URI("mockfs://mock/tmp/something.txt#something"); Path filePath = new Path(file); - + when(mockFs.resolvePath(archivePath)).thenReturn(archivePath); when(mockFs.resolvePath(filePath)).thenReturn(filePath); - + Job.addCacheArchive(archive, conf); conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10"); conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10"); @@ -458,8 +480,8 @@ public void testSetupDistributedCache() throws Exception { conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11"); conf.set(MRJobConfig.CACHE_FILES_SIZES, "11"); conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true"); - Map localResources = - new HashMap(); + Map localResources = + new HashMap(); MRApps.setupDistributedCache(conf, localResources); assertEquals(2, localResources.size()); LocalResource lr = localResources.get("something.zip"); @@ -482,7 +504,7 @@ public void initialize(URI name, Configuration conf) throws IOException {} } @Test - public void testLogSystemProperties() throws Exception { + void testLogSystemProperties() throws Exception { Configuration conf = new Configuration(); // test no logging conf.set(MRJobConfig.MAPREDUCE_JVM_SYSTEM_PROPERTIES_TO_LOG, " "); @@ -502,7 +524,7 @@ public void testLogSystemProperties() throws Exception { } @Test - public void testTaskStateUI() { + void testTaskStateUI() { assertTrue(MRApps.TaskStateUI.PENDING.correspondsTo(TaskState.SCHEDULED)); assertTrue(MRApps.TaskStateUI.COMPLETED.correspondsTo(TaskState.SUCCEEDED)); assertTrue(MRApps.TaskStateUI.COMPLETED.correspondsTo(TaskState.FAILED)); @@ -527,27 +549,29 @@ public void testTaskStateUI() { }; @Test - public void testSystemClasses() { + void testSystemClasses() { final List systemClasses = Arrays.asList(StringUtils.getTrimmedStrings( - ApplicationClassLoader.SYSTEM_CLASSES_DEFAULT)); + ApplicationClassLoader.SYSTEM_CLASSES_DEFAULT)); for (String defaultXml : DEFAULT_XMLS) { - assertTrue(defaultXml + " must be system resource", - ApplicationClassLoader.isSystemClass(defaultXml, systemClasses)); + assertTrue(ApplicationClassLoader.isSystemClass(defaultXml, systemClasses), + defaultXml + " must be system resource"); } for (String klass : SYS_CLASSES) { - assertTrue(klass + " must be system class", - ApplicationClassLoader.isSystemClass(klass, systemClasses)); + assertTrue(ApplicationClassLoader.isSystemClass(klass, systemClasses), + klass + " must be system class"); } - assertFalse("/fake/Klass must not be a system class", - ApplicationClassLoader.isSystemClass("/fake/Klass", systemClasses)); + assertFalse(ApplicationClassLoader.isSystemClass("/fake/Klass", systemClasses), + "/fake/Klass must not be a system class"); } - @Test(expected = IllegalArgumentException.class) - public void testInvalidWebappAddress() throws Exception { - Configuration conf = new Configuration(); - conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, "19888"); - MRWebAppUtil.getApplicationWebURLOnJHSWithScheme( - conf, ApplicationId.newInstance(0, 1)); + @Test + void testInvalidWebappAddress() throws Exception { + assertThrows(IllegalArgumentException.class, () -> { + Configuration conf = new Configuration(); + conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, "19888"); + MRWebAppUtil.getApplicationWebURLOnJHSWithScheme( + conf, ApplicationId.newInstance(0, 1)); + }); } } From fa10a07476dea1e0310bfc8754ca70575ee2f232 Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Sat, 15 Oct 2022 13:15:15 +0100 Subject: [PATCH 2/8] stylecheck fix --- .../apache/hadoop/mapred/TestJobClient.java | 3 +- .../hadoop/mapreduce/TestTypeConverter.java | 3 +- .../v2/jobhistory/TestFileNameIndexUtils.java | 49 ++++++++++------- .../hadoop/mapreduce/v2/util/TestMRApps.java | 52 +++++++++---------- 4 files changed, 61 insertions(+), 46 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java index 959010d56768e..be77354a99c42 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java @@ -87,7 +87,8 @@ void testGetStagingAreaDir() throws IOException, InterruptedException { Configuration conf = new Configuration(); JobClient client = new JobClient(conf); - assertEquals(client.getClusterHandle().getStagingAreaDir().toString(), client.getStagingAreaDir().toString()); + assertEquals(client.getClusterHandle().getStagingAreaDir().toString(), + client.getStagingAreaDir().toString()); } /** diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java index 38870bda0a7c7..088473d291242 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java @@ -142,7 +142,8 @@ void testFromYarnApplicationReport() { assertEquals("dummy-queue", status.getQueue(), "queue set incorrectly"); assertEquals("dummy-tracking-url", status.getTrackingUrl(), "trackingUrl set incorrectly"); assertEquals("dummy-user", status.getUsername(), "user set incorrectly"); - assertEquals("dummy-tracking-url", status.getSchedulingInfo(), "schedulingInfo set incorrectly"); + assertEquals("dummy-tracking-url", status.getSchedulingInfo(), + "schedulingInfo set incorrectly"); assertEquals(6789, status.getJobID().getId(), "jobId set incorrectly"); assertEquals(JobStatus.State.KILLED, status.getState(), "state set incorrectly"); assertEquals(2048, status.getNeededMem(), "needed mem info set incorrectly"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java index 6204460e16601..e64ce6b4fd0e0 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java @@ -105,16 +105,26 @@ void testEncodingDecodingEquivalence() throws IOException { String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info); JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - assertEquals(info.getJobId(), parsedInfo.getJobId(), "Job id different after encoding and decoding"); - assertEquals(info.getSubmitTime(), parsedInfo.getSubmitTime(), "Submit time different after encoding and decoding"); - assertEquals(info.getUser(), parsedInfo.getUser(), "User different after encoding and decoding"); - assertEquals(info.getJobName(), parsedInfo.getJobName(), "Job name different after encoding and decoding"); - assertEquals(info.getFinishTime(), parsedInfo.getFinishTime(), "Finish time different after encoding and decoding"); - assertEquals(info.getNumMaps(), parsedInfo.getNumMaps(), "Num maps different after encoding and decoding"); - assertEquals(info.getNumReduces(), parsedInfo.getNumReduces(), "Num reduces different after encoding and decoding"); - assertEquals(info.getJobStatus(), parsedInfo.getJobStatus(), "Job status different after encoding and decoding"); - assertEquals(info.getQueueName(), parsedInfo.getQueueName(), "Queue name different after encoding and decoding"); - assertEquals(info.getJobStartTime(), parsedInfo.getJobStartTime(), "Job start time different after encoding and decoding"); + assertEquals(info.getJobId(), parsedInfo.getJobId(), + "Job id different after encoding and decoding"); + assertEquals(info.getSubmitTime(), parsedInfo.getSubmitTime(), + "Submit time different after encoding and decoding"); + assertEquals(info.getUser(), parsedInfo.getUser(), + "User different after encoding and decoding"); + assertEquals(info.getJobName(), parsedInfo.getJobName(), + "Job name different after encoding and decoding"); + assertEquals(info.getFinishTime(), parsedInfo.getFinishTime(), + "Finish time different after encoding and decoding"); + assertEquals(info.getNumMaps(), parsedInfo.getNumMaps(), + "Num maps different after encoding and decoding"); + assertEquals(info.getNumReduces(), parsedInfo.getNumReduces(), + "Num reduces different after encoding and decoding"); + assertEquals(info.getJobStatus(), parsedInfo.getJobStatus(), + "Job status different after encoding and decoding"); + assertEquals(info.getQueueName(), parsedInfo.getQueueName(), + "Queue name different after encoding and decoding"); + assertEquals(info.getJobStartTime(), parsedInfo.getJobStartTime(), + "Job start time different after encoding and decoding"); } @Test @@ -450,15 +460,18 @@ void testJobHistoryFileNameBackwardsCompatible() throws IOException { JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile); assertEquals(jobId, info.getJobId(), "Job id incorrect after decoding old history file"); - assertEquals(submitTime, info.getSubmitTime(), "Submit time incorrect after decoding old history file"); + assertEquals(submitTime, info.getSubmitTime(), + "Submit time incorrect after decoding old history file"); assertEquals(USER_NAME, info.getUser(), "User incorrect after decoding old history file"); assertEquals(JOB_NAME, info.getJobName(), "Job name incorrect after decoding old history file"); - assertEquals(finishTime, info.getFinishTime(), "Finish time incorrect after decoding old history file"); + assertEquals(finishTime, info.getFinishTime(), + "Finish time incorrect after decoding old history file"); assertEquals(numMaps, info.getNumMaps(), "Num maps incorrect after decoding old history file"); - assertEquals(numReduces, info.getNumReduces(), "Num reduces incorrect after decoding old history file"); - assertEquals(JOB_STATUS, info.getJobStatus(), "Job status incorrect after decoding old history file"); - assertNull(info.getQueueName(), - "Queue name incorrect after decoding old history file"); + assertEquals(numReduces, info.getNumReduces(), + "Num reduces incorrect after decoding old history file"); + assertEquals(JOB_STATUS, info.getJobStatus(), + "Job status incorrect after decoding old history file"); + assertNull(info.getQueueName(), "Queue name incorrect after decoding old history file"); } @Test @@ -482,7 +495,7 @@ void testTrimJobNameEqualsLimitLength() throws IOException { jobNameTrimLength); JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile); - assertEquals(info - .getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName(), "Job name did not get trimmed correctly"); + assertEquals(info.getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName(), + "Job name did not get trimmed correctly"); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java index d096a8f95c272..6da0867f41151 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java @@ -160,10 +160,12 @@ void testTaskIDBadType() { @Test @Timeout(120000) void testTaskAttemptIDtoString() { - TaskAttemptId taid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class); + TaskAttemptId taid = + RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class); taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class)); taid.getTaskId().setTaskType(TaskType.MAP); - taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); + taid.getTaskId() + .setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); taid.getTaskId().getJobId().setAppId(ApplicationId.newInstance(0, 0)); assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid)); } @@ -195,7 +197,8 @@ void testGetJobFileWithUser() { String jobFile = MRApps.getJobFile(conf, "dummy-user", new JobID("dummy-job", 12345)); assertNotNull(jobFile, "getJobFile results in null."); - assertEquals("/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile, "jobFile with specified user is not as expected."); + assertEquals("/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile, + "jobFile with specified user is not as expected."); } @Test @@ -299,8 +302,8 @@ void testSetClasspathWithNoUserPrecendence() { String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, Arrays.asList("job.jar/*", "job.jar/classes/", "job.jar/lib/*", ApplicationConstants.Environment.PWD.$$() + "/*")); - assertTrue(env_str.contains(expectedClasspath), "MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, and job.jar is not in" - + " the classpath!"); + assertTrue(env_str.contains(expectedClasspath), + "MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, and job.jar is not in" + " the classpath!"); assertFalse(env_str.startsWith(expectedClasspath), "MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!"); } @@ -315,16 +318,14 @@ void testSetClasspathWithJobClassloader() throws IOException { MRApps.setClasspath(env, conf); String cp = env.get("CLASSPATH"); String appCp = env.get("APP_CLASSPATH"); - assertFalse(cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job"), "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the" - + " classpath!"); - assertFalse(cp.contains("PWD"), - "MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!"); + assertFalse(cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job"), + "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the" + " classpath!"); + assertFalse(cp.contains("PWD"), "MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!"); String expectedAppClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, - Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/*", - "job.jar/classes/", "job.jar/lib/*", - ApplicationConstants.Environment.PWD.$$() + "/*")); - assertEquals(expectedAppClasspath, appCp, "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app" - + " classpath!"); + Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/*", "job.jar/classes/", + "job.jar/lib/*", ApplicationConstants.Environment.PWD.$$() + "/*")); + assertEquals(expectedAppClasspath, appCp, + "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app" + " classpath!"); } @Test @@ -353,17 +354,19 @@ void testSetClasspathWithFramework() throws IOException { Arrays.asList("job.jar/*", "job.jar/classes/", "job.jar/lib/*", ApplicationConstants.Environment.PWD.$$() + "/*")); String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, - Arrays.asList(ApplicationConstants.Environment.PWD.$$(), - FRAMEWORK_CLASSPATH, stdClasspath)); - assertEquals(expectedClasspath, env.get("CLASSPATH"), "Incorrect classpath with framework and no user precedence"); + Arrays.asList(ApplicationConstants.Environment.PWD.$$(), FRAMEWORK_CLASSPATH, + stdClasspath)); + assertEquals(expectedClasspath, env.get("CLASSPATH"), + "Incorrect classpath with framework and no user precedence"); env.clear(); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); MRApps.setClasspath(env, conf); expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR, - Arrays.asList(ApplicationConstants.Environment.PWD.$$(), - stdClasspath, FRAMEWORK_CLASSPATH)); - assertEquals(expectedClasspath, env.get("CLASSPATH"), "Incorrect classpath with framework and user precedence"); + Arrays.asList(ApplicationConstants.Environment.PWD.$$(), stdClasspath, + FRAMEWORK_CLASSPATH)); + assertEquals(expectedClasspath, env.get("CLASSPATH"), + "Incorrect classpath with framework and user precedence"); } @Test @@ -541,12 +544,9 @@ void testTaskStateUI() { "/org/apache/hadoop/fake/Klass" }; - private static final String[] DEFAULT_XMLS = new String[] { - "core-default.xml", - "mapred-default.xml", - "hdfs-default.xml", - "yarn-default.xml" - }; + private static final String[] DEFAULT_XMLS = + new String[] {"core-default.xml", "mapred-default.xml", "hdfs-default.xml", + "yarn-default.xml"}; @Test void testSystemClasses() { From e9dea304cc02926653d6fe228abcaa54463d260a Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Tue, 16 May 2023 11:12:55 +0100 Subject: [PATCH 3/8] addressing comments --- .../test/java/org/apache/hadoop/mapred/TestJobClient.java | 8 +++++--- .../org/apache/hadoop/mapred/TestJobClientGetJob.java | 7 ++++--- .../apache/hadoop/mapred/TestMRWithDistributedCache.java | 8 ++++++-- .../org/apache/hadoop/mapreduce/TestTypeConverter.java | 4 +++- .../apache/hadoop/mapreduce/v2/api/records/TestIds.java | 7 +++++-- 5 files changed, 23 insertions(+), 11 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java index be77354a99c42..c04d16507a7f8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java @@ -23,8 +23,6 @@ import org.apache.hadoop.conf.Configuration; -import static org.junit.jupiter.api.Assertions.*; - import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -35,6 +33,10 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class TestJobClient { final static String TEST_DIR = new File("target", @@ -64,7 +66,7 @@ void testGetClusterStatusWithLocalJobRunner() throws Exception { @Test @Timeout(10000) - void testIsJobDirValid() throws IOException { + public void testIsJobDirValid() throws IOException { Configuration conf = new Configuration(); FileSystem fs = FileSystem.getLocal(conf); Path testDir = new Path(TEST_DIR); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java index db04a75236ceb..012e3b51a0aba 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java @@ -20,13 +20,14 @@ import java.io.IOException; -import org.apache.hadoop.conf.Configuration; +import org.junit.jupiter.api.Test; -import static org.junit.jupiter.api.Assertions.assertNotNull; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertNotNull; public class TestJobClientGetJob { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java index 8180bf46fddcc..7cc08131fc140 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java @@ -30,8 +30,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; -import static org.junit.jupiter.api.Assertions.*; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FSDataOutputStream; @@ -52,6 +50,12 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + /** * Tests the use of the * {@link org.apache.hadoop.mapreduce.filecache.DistributedCache} within the diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java index 088473d291242..03ab5a185877a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java @@ -20,7 +20,9 @@ import org.apache.hadoop.util.StringUtils; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java index 7ca43bb2ea181..4737eea7e4356 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java @@ -19,12 +19,15 @@ package org.apache.hadoop.mapreduce.v2.api.records; -import static org.junit.jupiter.api.Assertions.*; - import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + public class TestIds { @Test From 7e9273de5320e458d9a67143ed35239c7705bd41 Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Tue, 16 May 2023 13:02:07 +0100 Subject: [PATCH 4/8] temp commit to run all tests --- hadoop-project/pom.xml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index bc03267d4a553..fdfe01051dca1 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -2587,3 +2587,5 @@ + + From 8cbbccc7ece955725954e2fc1870a233e5a663ed Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Wed, 17 May 2023 11:36:36 +0100 Subject: [PATCH 5/8] temp commit to trigger all tests --- LICENSE-binary | 2 ++ 1 file changed, 2 insertions(+) diff --git a/LICENSE-binary b/LICENSE-binary index 9dba36439ca78..ab5cea15dfa08 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -536,3 +536,5 @@ Public Domain ------------- aopalliance:aopalliance:1.0 + + From 3f6f8e83f2dec26c1598123418567c0b3e18895f Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Sat, 20 May 2023 13:28:42 +0100 Subject: [PATCH 6/8] Addressing missed comments --- .../apache/hadoop/mapred/TestJobClient.java | 6 +- .../hadoop/mapred/TestJobClientGetJob.java | 2 +- .../TestLocalDistributedCacheManager.java | 8 +-- .../mapred/TestLocalModeWithNewApis.java | 2 +- .../mapred/TestMRWithDistributedCache.java | 4 +- .../hadoop/mapreduce/TestTypeConverter.java | 12 ++-- .../hadoop/mapreduce/v2/TestRPCFactories.java | 2 +- .../mapreduce/v2/TestRecordFactory.java | 2 +- .../mapreduce/v2/api/records/TestIds.java | 6 +- .../v2/jobhistory/TestFileNameIndexUtils.java | 24 ++++---- .../v2/jobhistory/TestJobHistoryUtils.java | 4 +- .../hadoop/mapreduce/v2/util/TestMRApps.java | 58 ++++++++++--------- 12 files changed, 68 insertions(+), 62 deletions(-) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java index c04d16507a7f8..6c0be34ba4386 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java @@ -48,7 +48,7 @@ public void tearDown() { } @Test - void testGetClusterStatusWithLocalJobRunner() throws Exception { + public void testGetClusterStatusWithLocalJobRunner() throws Exception { Configuration conf = new Configuration(); conf.set(JTConfig.JT_IPC_ADDRESS, MRConfig.LOCAL_FRAMEWORK_NAME); conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); @@ -85,7 +85,7 @@ public void testIsJobDirValid() throws IOException { @Test @Timeout(10000) - void testGetStagingAreaDir() throws IOException, InterruptedException { + public void testGetStagingAreaDir() throws IOException, InterruptedException { Configuration conf = new Configuration(); JobClient client = new JobClient(conf); @@ -98,7 +98,7 @@ void testGetStagingAreaDir() throws IOException, InterruptedException { */ @Test @Timeout(10000) - void testAutoClosable() throws IOException { + public void testAutoClosable() throws IOException { Configuration conf = new Configuration(); try (JobClient jobClient = new JobClient(conf)) { } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java index 012e3b51a0aba..7c8bca7199ff3 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java @@ -46,7 +46,7 @@ private Path createTempFile(String filename, String contents) @SuppressWarnings("deprecation") @Test - void testGetRunningJobFromJobClient() throws Exception { + public void testGetRunningJobFromJobClient() throws Exception { JobConf conf = new JobConf(); conf.set("mapreduce.framework.name", "local"); FileInputFormat.addInputPath(conf, createTempFile("in", "hello")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java index 36a4b830c1630..38aa4dd2f30b1 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java @@ -143,7 +143,7 @@ public void seek(long position) {} } @Test - void testDownload() throws Exception { + public void testDownload() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); conf.setClass("fs.mock.impl", MockFileSystem.class, FileSystem.class); @@ -214,7 +214,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable { * no files were probed for/opened. */ @Test - void testEmptyDownload() throws Exception { + public void testEmptyDownload() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); conf.setClass("fs.mock.impl", MockFileSystem.class, FileSystem.class); @@ -253,7 +253,7 @@ void testEmptyDownload() throws Exception { * The same file can be added to the cache twice. */ @Test - void testDuplicateDownload() throws Exception { + public void testDuplicateDownload() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); conf.setClass("fs.mock.impl", MockFileSystem.class, FileSystem.class); @@ -323,7 +323,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable { * validity of one approach over the other. */ @Test - void testMultipleCacheSetup() throws Exception { + public void testMultipleCacheSetup() throws Exception { JobID jobId = new JobID(); JobConf conf = new JobConf(); LocalDistributedCacheManager manager = new LocalDistributedCacheManager(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java index b35df051364c3..628ff15095cc8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java @@ -63,7 +63,7 @@ public void tearDown() throws Exception { } @Test - void testNewApis() throws Exception { + public void testNewApis() throws Exception { Random r = new Random(System.currentTimeMillis()); Path tmpBaseDir = new Path("/tmp/wc-" + r.nextInt()); final Path inDir = new Path(tmpBaseDir, "input"); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java index 7cc08131fc140..5e608296471ca 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java @@ -195,7 +195,7 @@ private void testWithConf(Configuration conf) throws IOException, /** Tests using the local job runner. */ @Test - void testLocalJobRunner() throws Exception { + public void testLocalJobRunner() throws Exception { symlinkFile.delete(); // ensure symlink is not present (e.g. if test is // killed part way through) @@ -232,7 +232,7 @@ private Path makeJar(Path p, int index) throws FileNotFoundException, @Test @Timeout(10000) - void testDeprecatedFunctions() throws Exception { + public void testDeprecatedFunctions() throws Exception { DistributedCache.addLocalArchives(conf, "Test Local Archives 1"); assertEquals("Test Local Archives 1", conf.get(DistributedCache.CACHE_LOCALARCHIVES)); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java index 03ab5a185877a..205a2330a10f8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java @@ -50,7 +50,7 @@ public class TestTypeConverter { @Test - void testEnums() throws Exception { + public void testEnums() throws Exception { for (YarnApplicationState applicationState : YarnApplicationState.values()) { TypeConverter.fromYarn(applicationState, FinalApplicationStatus.FAILED); } @@ -76,7 +76,7 @@ void testEnums() throws Exception { } @Test - void testFromYarn() throws Exception { + public void testFromYarn() throws Exception { int appStartTime = 612354; int appFinishTime = 612355; YarnApplicationState state = YarnApplicationState.RUNNING; @@ -107,7 +107,7 @@ void testFromYarn() throws Exception { } @Test - void testFromYarnApplicationReport() { + public void testFromYarnApplicationReport() { ApplicationId mockAppId = mock(ApplicationId.class); when(mockAppId.getClusterTimestamp()).thenReturn(12345L); when(mockAppId.getId()).thenReturn(6789); @@ -157,7 +157,7 @@ void testFromYarnApplicationReport() { } @Test - void testFromYarnQueueInfo() { + public void testFromYarnQueueInfo() { org.apache.hadoop.yarn.api.records.QueueInfo queueInfo = Records .newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class); queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED); @@ -173,7 +173,7 @@ void testFromYarnQueueInfo() { * queue */ @Test - void testFromYarnQueue() { + public void testFromYarnQueue() { //Define child queue org.apache.hadoop.yarn.api.records.QueueInfo child = Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class); @@ -199,7 +199,7 @@ void testFromYarnQueue() { } @Test - void testFromYarnJobReport() throws Exception { + public void testFromYarnJobReport() throws Exception { int jobStartTime = 612354; int jobFinishTime = 612355; JobState state = JobState.RUNNING; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java index 8245ede921e6e..8cd347671762d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java @@ -65,7 +65,7 @@ public class TestRPCFactories { @Test - void test() { + public void test() { testPbServerFactory(); testPbClientFactory(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java index 3c0ef96cc244a..953c7a2427aec 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java @@ -35,7 +35,7 @@ public class TestRecordFactory { @Test - void testPbRecordFactory() { + public void testPbRecordFactory() { RecordFactory pbRecordFactory = RecordFactoryPBImpl.get(); try { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java index 4737eea7e4356..ee2d014872ecd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java @@ -31,7 +31,7 @@ public class TestIds { @Test - void testJobId() { + public void testJobId() { long ts1 = 1315890136000l; long ts2 = 1315890136001l; JobId j1 = createJobId(ts1, 2); @@ -57,7 +57,7 @@ void testJobId() { } @Test - void testTaskId() { + public void testTaskId() { long ts1 = 1315890136000l; long ts2 = 1315890136001l; TaskId t1 = createTaskId(ts1, 1, 2, TaskType.MAP); @@ -87,7 +87,7 @@ void testTaskId() { } @Test - void testTaskAttemptId() { + public void testTaskAttemptId() { long ts1 = 1315890136000l; long ts2 = 1315890136001l; TaskAttemptId t1 = createTaskAttemptId(ts1, 2, 2, TaskType.MAP, 2); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java index e64ce6b4fd0e0..478eefea69e67 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java @@ -87,7 +87,7 @@ public class TestFileNameIndexUtils { private static final String JOB_START_TIME = "1317928742060"; @Test - void testEncodingDecodingEquivalence() throws IOException { + public void testEncodingDecodingEquivalence() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -128,7 +128,7 @@ void testEncodingDecodingEquivalence() throws IOException { } @Test - void testUserNamePercentEncoding() throws IOException { + public void testUserNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -149,7 +149,7 @@ void testUserNamePercentEncoding() throws IOException { } @Test - void testTrimJobName() throws IOException { + public void testTrimJobName() throws IOException { int jobNameTrimLength = 5; JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); @@ -179,7 +179,7 @@ void testTrimJobName() throws IOException { * even if there are some multibyte characters in the job name. */ @Test - void testJobNameWithMultibyteChars() throws IOException { + public void testJobNameWithMultibyteChars() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -327,7 +327,7 @@ void testJobNameWithMultibyteChars() throws IOException { } @Test - void testUserNamePercentDecoding() throws IOException { + public void testUserNamePercentDecoding() throws IOException { String jobHistoryFile = String.format(JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, @@ -345,7 +345,7 @@ void testUserNamePercentDecoding() throws IOException { } @Test - void testJobNamePercentEncoding() throws IOException { + public void testJobNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -366,7 +366,7 @@ void testJobNamePercentEncoding() throws IOException { } @Test - void testJobNamePercentDecoding() throws IOException { + public void testJobNamePercentDecoding() throws IOException { String jobHistoryFile = String.format(JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, @@ -384,7 +384,7 @@ void testJobNamePercentDecoding() throws IOException { } @Test - void testQueueNamePercentEncoding() throws IOException { + public void testQueueNamePercentEncoding() throws IOException { JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -405,7 +405,7 @@ void testQueueNamePercentEncoding() throws IOException { } @Test - void testQueueNamePercentDecoding() throws IOException { + public void testQueueNamePercentDecoding() throws IOException { String jobHistoryFile = String.format(JOB_HISTORY_FILE_FORMATTER, JOB_ID, SUBMIT_TIME, @@ -423,7 +423,7 @@ void testQueueNamePercentDecoding() throws IOException { } @Test - void testJobStartTimeBackwardsCompatible() throws IOException { + public void testJobStartTimeBackwardsCompatible() throws IOException { String jobHistoryFile = String.format(OLD_FORMAT_BEFORE_ADD_START_TIME, JOB_ID, SUBMIT_TIME, @@ -439,7 +439,7 @@ void testJobStartTimeBackwardsCompatible() throws IOException { } @Test - void testJobHistoryFileNameBackwardsCompatible() throws IOException { + public void testJobHistoryFileNameBackwardsCompatible() throws IOException { JobID oldJobId = JobID.forName(JOB_ID); JobId jobId = TypeConverter.toYarn(oldJobId); @@ -475,7 +475,7 @@ void testJobHistoryFileNameBackwardsCompatible() throws IOException { } @Test - void testTrimJobNameEqualsLimitLength() throws IOException { + public void testTrimJobNameEqualsLimitLength() throws IOException { int jobNameTrimLength = 9; JobIndexInfo info = new JobIndexInfo(); JobID oldJobId = JobID.forName(JOB_ID); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java index f96c31523b379..3eaf358d8c054 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java @@ -45,7 +45,7 @@ public class TestJobHistoryUtils { @Test @SuppressWarnings("unchecked") - void testGetHistoryDirsForCleaning() throws IOException { + public void testGetHistoryDirsForCleaning() throws IOException { Path pRoot = new Path(TEST_DIR, "org.apache.hadoop.mapreduce.v2.jobhistory." + "TestJobHistoryUtils.testGetHistoryDirsForCleaning"); FileContext fc = FileContext.getFileContext(); @@ -149,7 +149,7 @@ private Path createPath(FileContext fc, Path root, String year, String month, } @Test - void testGetConfiguredHistoryIntermediateUserDoneDirPermissions() { + public void testGetConfiguredHistoryIntermediateUserDoneDirPermissions() { Configuration conf = new Configuration(); Map parameters = ImmutableMap.of( "775", new FsPermission(0775), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java index 6da0867f41151..a2201c9ff168e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java @@ -18,7 +18,13 @@ package org.apache.hadoop.mapreduce.v2.util; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -85,7 +91,7 @@ private static void delete(File dir) throws IOException { @Test @Timeout(120000) - void testJobIDtoString() { + public void testJobIDtoString() { JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class); jid.setAppId(ApplicationId.newInstance(0, 0)); assertEquals("job_0_0000", MRApps.toString(jid)); @@ -93,7 +99,7 @@ void testJobIDtoString() { @Test @Timeout(120000) - void testToJobID() { + public void testToJobID() { JobId jid = MRApps.toJobID("job_1_1"); assertEquals(1, jid.getAppId().getClusterTimestamp()); assertEquals(1, jid.getAppId().getId()); @@ -102,7 +108,7 @@ void testToJobID() { @Test @Timeout(120000) - void testJobIDShort() { + public void testJobIDShort() { assertThrows(IllegalArgumentException.class, () -> { MRApps.toJobID("job_0_0_0"); }); @@ -111,7 +117,7 @@ void testJobIDShort() { //TODO_get.set @Test @Timeout(120000) - void testTaskIDtoString() { + public void testTaskIDtoString() { TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class); tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); tid.getJobId().setAppId(ApplicationId.newInstance(0, 0)); @@ -128,7 +134,7 @@ void testTaskIDtoString() { @Test @Timeout(120000) - void testToTaskID() { + public void testToTaskID() { TaskId tid = MRApps.toTaskID("task_1_2_r_3"); assertEquals(1, tid.getJobId().getAppId().getClusterTimestamp()); assertEquals(2, tid.getJobId().getAppId().getId()); @@ -142,7 +148,7 @@ void testToTaskID() { @Test @Timeout(120000) - void testTaskIDShort() { + public void testTaskIDShort() { assertThrows(IllegalArgumentException.class, () -> { MRApps.toTaskID("task_0_0000_m"); }); @@ -150,7 +156,7 @@ void testTaskIDShort() { @Test @Timeout(120000) - void testTaskIDBadType() { + public void testTaskIDBadType() { assertThrows(IllegalArgumentException.class, () -> { MRApps.toTaskID("task_0_0000_x_000000"); }); @@ -159,7 +165,7 @@ void testTaskIDBadType() { //TODO_get.set @Test @Timeout(120000) - void testTaskAttemptIDtoString() { + public void testTaskAttemptIDtoString() { TaskAttemptId taid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class); taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class)); @@ -172,7 +178,7 @@ void testTaskAttemptIDtoString() { @Test @Timeout(120000) - void testToTaskAttemptID() { + public void testToTaskAttemptID() { TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_m_2_3"); assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp()); assertEquals(1, taid.getTaskId().getJobId().getAppId().getId()); @@ -183,7 +189,7 @@ void testToTaskAttemptID() { @Test @Timeout(120000) - void testTaskAttemptIDShort() { + public void testTaskAttemptIDShort() { assertThrows(IllegalArgumentException.class, () -> { MRApps.toTaskAttemptID("attempt_0_0_0_m_0"); }); @@ -191,7 +197,7 @@ void testTaskAttemptIDShort() { @Test @Timeout(120000) - void testGetJobFileWithUser() { + public void testGetJobFileWithUser() { Configuration conf = new Configuration(); conf.set(MRJobConfig.MR_AM_STAGING_DIR, "/my/path/to/staging"); String jobFile = MRApps.getJobFile(conf, "dummy-user", @@ -203,7 +209,7 @@ void testGetJobFileWithUser() { @Test @Timeout(120000) - void testSetClasspath() throws IOException { + public void testSetClasspath() throws IOException { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); Job job = Job.getInstance(conf); @@ -236,7 +242,7 @@ void testSetClasspath() throws IOException { @Test @Timeout(120000) - void testSetClasspathWithArchives() throws IOException { + public void testSetClasspathWithArchives() throws IOException { File testTGZ = new File(testWorkDir, "test.tgz"); FileOutputStream out = new FileOutputStream(testTGZ); out.write(0); @@ -267,7 +273,7 @@ void testSetClasspathWithArchives() throws IOException { @Test @Timeout(120000) - void testSetClasspathWithUserPrecendence() { + public void testSetClasspathWithUserPrecendence() { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true); @@ -288,7 +294,7 @@ void testSetClasspathWithUserPrecendence() { @Test @Timeout(120000) - void testSetClasspathWithNoUserPrecendence() { + public void testSetClasspathWithNoUserPrecendence() { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false); @@ -310,7 +316,7 @@ void testSetClasspathWithNoUserPrecendence() { @Test @Timeout(120000) - void testSetClasspathWithJobClassloader() throws IOException { + public void testSetClasspathWithJobClassloader() throws IOException { Configuration conf = new Configuration(); conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true); conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true); @@ -330,7 +336,7 @@ void testSetClasspathWithJobClassloader() throws IOException { @Test @Timeout(3000000) - void testSetClasspathWithFramework() throws IOException { + public void testSetClasspathWithFramework() throws IOException { final String FRAMEWORK_NAME = "some-framework-name"; final String FRAMEWORK_PATH = "some-framework-path#" + FRAMEWORK_NAME; Configuration conf = new Configuration(); @@ -371,7 +377,7 @@ void testSetClasspathWithFramework() throws IOException { @Test @Timeout(30000) - void testSetupDistributedCacheEmpty() throws IOException { + public void testSetupDistributedCacheEmpty() throws IOException { Configuration conf = new Configuration(); Map localResources = new HashMap(); MRApps.setupDistributedCache(conf, localResources); @@ -382,7 +388,7 @@ void testSetupDistributedCacheEmpty() throws IOException { @SuppressWarnings("deprecation") @Test @Timeout(120000) - void testSetupDistributedCacheConflicts() throws Exception { + public void testSetupDistributedCacheConflicts() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); @@ -422,7 +428,7 @@ void testSetupDistributedCacheConflicts() throws Exception { @SuppressWarnings("deprecation") @Test @Timeout(120000) - void testSetupDistributedCacheConflictsFiles() throws Exception { + public void testSetupDistributedCacheConflictsFiles() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); @@ -459,7 +465,7 @@ void testSetupDistributedCacheConflictsFiles() throws Exception { @SuppressWarnings("deprecation") @Test @Timeout(30000) - void testSetupDistributedCache() throws Exception { + public void testSetupDistributedCache() throws Exception { Configuration conf = new Configuration(); conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); @@ -507,7 +513,7 @@ public void initialize(URI name, Configuration conf) throws IOException {} } @Test - void testLogSystemProperties() throws Exception { + public void testLogSystemProperties() throws Exception { Configuration conf = new Configuration(); // test no logging conf.set(MRJobConfig.MAPREDUCE_JVM_SYSTEM_PROPERTIES_TO_LOG, " "); @@ -527,7 +533,7 @@ void testLogSystemProperties() throws Exception { } @Test - void testTaskStateUI() { + public void testTaskStateUI() { assertTrue(MRApps.TaskStateUI.PENDING.correspondsTo(TaskState.SCHEDULED)); assertTrue(MRApps.TaskStateUI.COMPLETED.correspondsTo(TaskState.SUCCEEDED)); assertTrue(MRApps.TaskStateUI.COMPLETED.correspondsTo(TaskState.FAILED)); @@ -549,7 +555,7 @@ void testTaskStateUI() { "yarn-default.xml"}; @Test - void testSystemClasses() { + public void testSystemClasses() { final List systemClasses = Arrays.asList(StringUtils.getTrimmedStrings( ApplicationClassLoader.SYSTEM_CLASSES_DEFAULT)); @@ -566,7 +572,7 @@ void testSystemClasses() { } @Test - void testInvalidWebappAddress() throws Exception { + public void testInvalidWebappAddress() throws Exception { assertThrows(IllegalArgumentException.class, () -> { Configuration conf = new Configuration(); conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, "19888"); From 08d46579111cca0f694b4a5a0f007d56de743c56 Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Sun, 21 May 2023 18:29:28 +0100 Subject: [PATCH 7/8] Revert "temp commit to run all tests" This reverts commit 7e9273de5320e458d9a67143ed35239c7705bd41. --- hadoop-project/pom.xml | 2 -- 1 file changed, 2 deletions(-) diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index fdfe01051dca1..bc03267d4a553 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -2587,5 +2587,3 @@ - - From da420c10273734a8fc4c08acba3dc34b81324941 Mon Sep 17 00:00:00 2001 From: Ashutosh Gupta Date: Sun, 21 May 2023 18:30:00 +0100 Subject: [PATCH 8/8] Revert "temp commit to trigger all tests" This reverts commit 8cbbccc7ece955725954e2fc1870a233e5a663ed. --- LICENSE-binary | 2 -- 1 file changed, 2 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index ab5cea15dfa08..9dba36439ca78 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -536,5 +536,3 @@ Public Domain ------------- aopalliance:aopalliance:1.0 - -