diff --git a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java index b56fc65b25b45..33e34cb0369bf 100644 --- a/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java +++ b/hadoop-common-project/hadoop-auth/src/test/java/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java @@ -15,6 +15,9 @@ import org.junit.Assert; import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import org.mockito.Mockito; import java.net.HttpURLConnection; @@ -23,34 +26,43 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Collection; +import java.util.Arrays; +@RunWith(Enclosed.class) public class TestAuthenticatedURL { + @RunWith(Parameterized.class) + public static class TheParameterizedPart { + @Parameterized.Parameter(value = 0) + public String tokenStr; + + @Parameterized.Parameters + public static Collection testData() { + Object[] data = new Object[] {"foo", + "RanDOMstrING", +// "", // invalid input ; fails + "123@456#"}; + return Arrays.asList(data); + } + + // PUTs #3 @Test public void testToken() throws Exception { AuthenticatedURL.Token token = new AuthenticatedURL.Token(); Assert.assertFalse(token.isSet()); - token = new AuthenticatedURL.Token("foo"); + token = new AuthenticatedURL.Token(tokenStr); Assert.assertTrue(token.isSet()); - Assert.assertEquals("foo", token.toString()); - } - - @Test - public void testInjectToken() throws Exception { - HttpURLConnection conn = Mockito.mock(HttpURLConnection.class); - AuthenticatedURL.Token token = new AuthenticatedURL.Token(); - token.set("foo"); - AuthenticatedURL.injectToken(conn, token); - Mockito.verify(conn).addRequestProperty(Mockito.eq("Cookie"), Mockito.anyString()); + Assert.assertEquals(tokenStr, token.toString()); } + // PUTs #4 @Test public void testExtractTokenOK() throws Exception { HttpURLConnection conn = Mockito.mock(HttpURLConnection.class); Mockito.when(conn.getResponseCode()).thenReturn(HttpURLConnection.HTTP_OK); - String tokenStr = "foo"; Map> headers = new HashMap>(); List cookies = new ArrayList(); cookies.add(AuthenticatedURL.AUTH_COOKIE + "=" + tokenStr); @@ -63,6 +75,26 @@ public void testExtractTokenOK() throws Exception { Assert.assertEquals(tokenStr, token.toString()); } + } + + public static class NotParameterizedPart { + + // CUTs #1 + @Test(expected = IllegalArgumentException.class) + public void testEmptyTokenFailure() { + new AuthenticatedURL.Token(null); + } + + @Test + public void testInjectToken() throws Exception { + HttpURLConnection conn = Mockito.mock(HttpURLConnection.class); + AuthenticatedURL.Token token = new AuthenticatedURL.Token(); + token.set("foo"); + AuthenticatedURL.injectToken(conn, token); + Mockito.verify(conn).addRequestProperty(Mockito.eq("Cookie"), Mockito.anyString()); + } + + @Test public void testExtractTokenFail() throws Exception { HttpURLConnection conn = Mockito.mock(HttpURLConnection.class); @@ -113,6 +145,7 @@ public void testGetAuthenticator() throws Exception { AuthenticatedURL aURL = new AuthenticatedURL(authenticator); Assert.assertEquals(authenticator, aURL.getAuthenticator()); + } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java index cb6a1fb31e6c1..697d7685c1b8d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProvider.java @@ -24,6 +24,9 @@ import org.apache.hadoop.security.ProviderUtils; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import java.io.IOException; import java.net.URI; @@ -35,6 +38,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Collection; +import java.util.Arrays; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; @@ -42,20 +47,39 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.fail; +@RunWith(Enclosed.class) public class TestKeyProvider { private static final String CIPHER = "AES"; + @RunWith(Parameterized.class) + public static class TheParameterizedPart { + @Parameterized.Parameter(value = 0) + public String name; + @Parameterized.Parameter(value = 1) + public Integer version; + + @Parameterized.Parameters + public static Collection buildVersionName() { + return Arrays.asList(new Object[][] { + { "/a/b", 3 }, + { "/aaa", 12 }, + { "", 0 }, + { "@#!&^^&$@", -1 }, + { "@@", -99999 } + }); + } + + // PUTs #15 @Test public void testBuildVersionName() throws Exception { - assertEquals("/a/b@3", KeyProvider.buildVersionName("/a/b", 3)); - assertEquals("/aaa@12", KeyProvider.buildVersionName("/aaa", 12)); + assertEquals(name + "@" + version, KeyProvider.buildVersionName(name, version)); } + // PUTs #16 @Test public void testParseVersionName() throws Exception { - assertEquals("/a/b", KeyProvider.getBaseName("/a/b@3")); - assertEquals("/aaa", KeyProvider.getBaseName("/aaa@112")); + assertEquals(name, KeyProvider.getBaseName(name + "@" + version)); try { KeyProvider.getBaseName("no-slashes"); assertTrue("should have thrown", false); @@ -64,6 +88,10 @@ public void testParseVersionName() throws Exception { } } + } + +public static class NotParameterizedPart { + @Test public void testKeyMaterial() throws Exception { byte[] key1 = new byte[]{1,2,3,4}; @@ -284,6 +312,7 @@ public void testConfiguration() throws Exception { conf.set("a", "A"); MyKeyProvider kp = new MyKeyProvider(conf); Assert.assertEquals("A", kp.getConf().get("a")); + } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java index e3e20020e3242..535681dbb25f1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestQuotaUsage.java @@ -20,26 +20,43 @@ import static org.junit.Assert.assertEquals; import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; -public class TestQuotaUsage { +import java.util.Arrays; +import java.util.Collection; - // check the empty constructor correctly initialises the object - @Test - public void testConstructorEmpty() { - QuotaUsage quotaUsage = new QuotaUsage.Builder().build(); - assertEquals("getQuota", -1, quotaUsage.getQuota()); - assertEquals("getSpaceConsumed", 0, quotaUsage.getSpaceConsumed()); - assertEquals("getSpaceQuota", -1, quotaUsage.getSpaceQuota()); - } +@RunWith(Enclosed.class) +public class TestQuotaUsage { + @RunWith(Parameterized.class) + public static class TheParameterizedPart { + @Parameterized.Parameter(value = 0) + public long fileAndDirCount; + @Parameterized.Parameter(value = 1) + public long quota; + @Parameterized.Parameter(value = 2) + public long spaceConsumed; + @Parameterized.Parameter(value = 3) + public long spaceQuota; + @Parameterized.Parameter(value = 4) + public long SSDQuota; + + @Parameterized.Parameters + public static Collection testData() { + Object[][] data = new Object[][] { {22222, 44444, 55555, 66666, 300000}, + {22222, 3, 11111, 7, 444444}, + {-1, 1, -1, 1, -1}, + {222255555, 222256578, 1073741825, 1, 5} + }; + return Arrays.asList(data); + } + + // PUTs #19 // check the full constructor with quota information @Test public void testConstructorWithQuota() { - long fileAndDirCount = 22222; - long quota = 44444; - long spaceConsumed = 55555; - long spaceQuota = 66666; - QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); @@ -51,11 +68,10 @@ public void testConstructorWithQuota() { assertEquals("getSpaceQuota", spaceQuota, quotaUsage.getSpaceQuota()); } + // PUTs #20 // check the constructor with quota information @Test public void testConstructorNoQuota() { - long spaceConsumed = 11111; - long fileAndDirCount = 22222; QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount). spaceConsumed(spaceConsumed).build(); @@ -67,66 +83,33 @@ public void testConstructorNoQuota() { assertEquals("getSpaceQuota", -1, quotaUsage.getSpaceQuota()); } - // check the header - @Test - public void testGetHeader() { - String header = " QUOTA REM_QUOTA SPACE_QUOTA " - + "REM_SPACE_QUOTA "; - assertEquals(header, QuotaUsage.getHeader()); - } - + // PUTs #21 // check the toString method with quotas @Test public void testToStringWithQuota() { - long fileAndDirCount = 55555; - long quota = 44444; - long spaceConsumed = 55555; - long spaceQuota = 66665; - QuotaUsage quotaUsage = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - String expected =" 44444 -11111 66665" + - " 11110 "; + String expected = String.format(QuotaUsage.QUOTA_STRING_FORMAT + QuotaUsage.SPACE_QUOTA_STRING_FORMAT, + quota, (quota-fileAndDirCount), spaceQuota, (spaceQuota - spaceConsumed)); assertEquals(expected, quotaUsage.toString()); } + // PUTs #22 // check the toString method with quotas @Test public void testToStringNoQuota() { QuotaUsage quotaUsage = new QuotaUsage.Builder(). - fileAndDirectoryCount(1234).build(); + fileAndDirectoryCount(fileAndDirCount).build(); String expected = " none inf none" + " inf "; assertEquals(expected, quotaUsage.toString()); } - // check the toString method with quotas - @Test - public void testToStringHumanWithQuota() { - long fileAndDirCount = 222255555; - long quota = 222256578; - long spaceConsumed = 1073741825; - long spaceQuota = 1; - - QuotaUsage quotaUsage = new QuotaUsage.Builder(). - fileAndDirectoryCount(fileAndDirCount).quota(quota). - spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); - String expected = " 212.0 M 1023 1 " - + " -1 G "; - assertEquals(expected, quotaUsage.toString(true)); - } - + // PUTs #23 // check the equality @Test public void testCompareQuotaUsage() { - long fileAndDirCount = 222255555; - long quota = 222256578; - long spaceConsumed = 1073741825; - long spaceQuota = 1; - long SSDspaceConsumed = 100000; - long SSDQuota = 300000; - QuotaUsage quotaUsage1 = new QuotaUsage.Builder(). fileAndDirectoryCount(fileAndDirCount).quota(quota). spaceConsumed(spaceConsumed).spaceQuota(spaceQuota). @@ -143,4 +126,42 @@ public void testCompareQuotaUsage() { assertEquals(quotaUsage1, quotaUsage2); } + } + + + public static class NotParameterizedPart { + // check the empty constructor correctly initialises the object + @Test + public void testConstructorEmpty() { + QuotaUsage quotaUsage = new QuotaUsage.Builder().build(); + assertEquals("getQuota", -1, quotaUsage.getQuota()); + assertEquals("getSpaceConsumed", 0, quotaUsage.getSpaceConsumed()); + assertEquals("getSpaceQuota", -1, quotaUsage.getSpaceQuota()); + } + + // check the header + @Test + public void testGetHeader() { + String header = " QUOTA REM_QUOTA SPACE_QUOTA " + + "REM_SPACE_QUOTA "; + assertEquals(header, QuotaUsage.getHeader()); + } + + // check the toString method with quotas + // Can be removed #1, covered in PUTs #21 + @Test + public void testToStringHumanWithQuota() { + long fileAndDirCount = 222255555; + long quota = 222256578; + long spaceConsumed = 1073741825; + long spaceQuota = 1; + + QuotaUsage quotaUsage = new QuotaUsage.Builder(). + fileAndDirectoryCount(fileAndDirCount).quota(quota). + spaceConsumed(spaceConsumed).spaceQuota(spaceQuota).build(); + String expected = " 212.0 M 1023 1 " + + " -1 G "; + assertEquals(expected, quotaUsage.toString(true)); + } + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java index 20d4f08612964..9d1b2c73f6d32 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java @@ -23,26 +23,46 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; /** Unit tests for ArrayWritable */ +@RunWith(Enclosed.class) public class TestArrayWritable { static class TextArrayWritable extends ArrayWritable { public TextArrayWritable() { super(Text.class); } } - + +@RunWith(Parameterized.class) +public static class TheParameterizedPart { + @Parameterized.Parameter(value = 0) + public Text[] elements; + + @Parameterized.Parameters + public static Collection textData() { + Text[][][] data = new Text[][][] { {{new Text("zero"), new Text("one"), new Text("two")} }, + { {new Text("zero"), new Text("one")} }, + { {new Text("")} }, + { {new Text("$@#*(@&")} } + }; + return Arrays.asList(data); + } + /** * If valueClass is undefined, readFields should throw an exception indicating - * that the field is null. Otherwise, readFields should succeed. + * that the field is null. Otherwise, readFields should succeed. */ + // PUTs #1 @Test public void testThrowUndefinedValueException() throws IOException { - // Get a buffer containing a simple text array - Text[] elements = {new Text("zero"), new Text("one"), new Text("two")}; TextArrayWritable sourceArray = new TextArrayWritable(); sourceArray.set(elements); @@ -62,25 +82,27 @@ public void testThrowUndefinedValueException() throws IOException { assertEquals(destElements[i],elements[i]); } } - + /** - * test {@link ArrayWritable} toArray() method + * test {@link ArrayWritable} toArray() method */ + // PUTs #2 @Test public void testArrayWritableToArray() { - Text[] elements = {new Text("zero"), new Text("one"), new Text("two")}; TextArrayWritable arrayWritable = new TextArrayWritable(); arrayWritable.set(elements); Object array = arrayWritable.toArray(); - + assertTrue("TestArrayWritable testArrayWritableToArray error!!! ", array instanceof Text[]); Text[] destElements = (Text[]) array; - + for (int i = 0; i < elements.length; i++) { assertEquals(destElements[i], elements[i]); } } - + } + + public static class NotParameterizedPart { /** * test {@link ArrayWritable} constructor with null */ @@ -96,10 +118,10 @@ public void testNullArgument() { public void testArrayWritableStringConstructor() { String[] original = { "test1", "test2", "test3" }; ArrayWritable arrayWritable = new ArrayWritable(original); - assertEquals("testArrayWritableStringConstructor class error!!!", + assertEquals("testArrayWritableStringConstructor class error!!!", Text.class, arrayWritable.getValueClass()); assertArrayEquals("testArrayWritableStringConstructor toString error!!!", original, arrayWritable.toStrings()); } - + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java index 9fbb865c6e516..730abaf66bd26 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPC.java @@ -52,6 +52,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.event.Level; @@ -73,6 +75,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Collection; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; @@ -106,6 +109,7 @@ /** Unit tests for RPC. */ @SuppressWarnings("deprecation") +@RunWith(Parameterized.class) public class TestRPC extends TestRpcBase { public static final Logger LOG = LoggerFactory.getLogger(TestRPC.class); @@ -133,7 +137,7 @@ public interface TestProtocol extends VersionedProtocol { public static class TestImpl implements TestProtocol { int fastPingCounter = 0; - + @Override public long getProtocolVersion(String protocol, long clientVersion) { return TestProtocol.versionID; @@ -147,7 +151,7 @@ public ProtocolSignature getProtocolSignature(String protocol, long clientVersio @Override public void ping() {} - + @Override public void sleep(long delay) throws InterruptedException { Thread.sleep(delay); @@ -344,6 +348,27 @@ public int getCloseCalled() { } + @Parameterized.Parameter(value = 0) + public int testNumHandler1; // number of method handler threads to run + @Parameterized.Parameter(value = 1) + public int testNumHandler2; // number of method handler threads to run + @Parameterized.Parameter(value = 2) + public int numberOfStopProxy; // number of method handler threads to run + + @Parameterized.Parameters + public static Collection data() { + Object[][] data = new Object[][] { { 1, 1 , 0}, + { 3, 4 , 1}, + { 999, 99999 , 5}, +// { 0, 5 , 1}, // fails -> java.lang.IllegalArgumentException +// { 1, -1 , 1} // fails -> java.lang.IllegalArgumentException +// todo add assumes for above + }; + + return Arrays.asList(data); + } + + // PUTs #24 @Test public void testConfRpc() throws IOException { Server server = newServerBuilder(conf) @@ -361,15 +386,15 @@ public void testConfRpc() throws IOException { assertEquals(confReaders, server.getNumReaders()); server = newServerBuilder(conf) - .setNumHandlers(1).setnumReaders(3).setQueueSizePerHandler(200) + .setNumHandlers(testNumHandler1).setnumReaders(confReaders).setQueueSizePerHandler(confQ) .setVerbose(false).build(); - assertEquals(3, server.getNumReaders()); - assertEquals(200, server.getMaxQueueSize()); + assertEquals(confReaders, server.getNumReaders()); + assertEquals(testNumHandler1*confQ, server.getMaxQueueSize()); - server = newServerBuilder(conf).setQueueSizePerHandler(10) - .setNumHandlers(2).setVerbose(false).build(); - assertEquals(2 * 10, server.getMaxQueueSize()); + server = newServerBuilder(conf).setQueueSizePerHandler(confQ) + .setNumHandlers(testNumHandler2).setVerbose(false).build(); + assertEquals(testNumHandler2 * confQ, server.getMaxQueueSize()); } @Test @@ -602,7 +627,7 @@ private void doRPCs(Configuration myConf, boolean expectFailure) throws Exceptio } else { assertCounter("RpcAuthorizationSuccesses", 1L, rb); } - //since we don't have authentication turned ON, we should see + //since we don't have authentication turned ON, we should see // 0 for the authentication successes and 0 for failure assertCounter("RpcAuthenticationFailures", 0L, rb); assertCounter("RpcAuthenticationSuccesses", 0L, rb); @@ -680,6 +705,7 @@ public void testStopMockObject() throws IOException { RPC.stopProxy(MockitoUtil.mockProtocol(TestProtocol.class)); } + // PUTs #25 @Test public void testStopProxy() throws IOException { RPC.setProtocolEngine(conf, @@ -689,11 +715,13 @@ public void testStopProxy() throws IOException { StoppedProtocol.versionID, null, conf); StoppedInvocationHandler invocationHandler = (StoppedInvocationHandler) Proxy.getInvocationHandler(proxy); - assertEquals(0, invocationHandler.getCloseCalled()); - RPC.stopProxy(proxy); - assertEquals(1, invocationHandler.getCloseCalled()); + for(int i=0;i testData() { + Object[][] data = new Object[][] { {17177038848L,8589467648L,15232745472L,6400417792L,1,2805000L,6261812L,1234567L,2345678L,3456789L,4567890L}, + {17177038848L,8589467648L,15232745472L,6400417792L,12,2805000L,6261812L,1234567L,2345678L,3456789L,4567890L}, + }; + return Arrays.asList(data); + } + + // PUTs #29 @Test(timeout = 10000) public void parseSystemInfoString() { SysInfoWindowsMock tester = new SysInfoWindowsMock(); tester.setSysinfoString( - "17177038848,8589467648,15232745472,6400417792,1,2805000,6261812," + - "1234567,2345678,3456789,4567890\r\n"); + virtualMemorySize + COMMA + physicalMemorySize + COMMA + availableVirtualMemorySize + COMMA + + availablePhysicalMemorySize + COMMA + numProcessors + COMMA + cpuFrequency + COMMA + cumulativeCpuTime + + COMMA + storageBytesRead + COMMA + storageBytesWritten + COMMA + networkBytesRead + COMMA + + networkBytesWritten + "\r\n"); // info str derived from windows shell command has \r\n termination - assertEquals(17177038848L, tester.getVirtualMemorySize()); - assertEquals(8589467648L, tester.getPhysicalMemorySize()); - assertEquals(15232745472L, tester.getAvailableVirtualMemorySize()); - assertEquals(6400417792L, tester.getAvailablePhysicalMemorySize()); - assertEquals(1, tester.getNumProcessors()); - assertEquals(1, tester.getNumCores()); - assertEquals(2805000L, tester.getCpuFrequency()); - assertEquals(6261812L, tester.getCumulativeCpuTime()); - assertEquals(1234567L, tester.getStorageBytesRead()); - assertEquals(2345678L, tester.getStorageBytesWritten()); - assertEquals(3456789L, tester.getNetworkBytesRead()); - assertEquals(4567890L, tester.getNetworkBytesWritten()); + assertEquals(virtualMemorySize, tester.getVirtualMemorySize()); + assertEquals(physicalMemorySize, tester.getPhysicalMemorySize()); + assertEquals(availableVirtualMemorySize, tester.getAvailableVirtualMemorySize()); + assertEquals(availablePhysicalMemorySize, tester.getAvailablePhysicalMemorySize()); + assertEquals(numProcessors, tester.getNumProcessors()); + assertEquals(numProcessors, tester.getNumCores()); + assertEquals(cpuFrequency, tester.getCpuFrequency()); + assertEquals(cumulativeCpuTime, tester.getCumulativeCpuTime()); + assertEquals(storageBytesRead, tester.getStorageBytesRead()); + assertEquals(storageBytesWritten, tester.getStorageBytesWritten()); + assertEquals(networkBytesRead, tester.getNetworkBytesRead()); + assertEquals(networkBytesWritten, tester.getNetworkBytesWritten()); // undef on first call assertEquals((float)CpuTimeTracker.UNAVAILABLE, tester.getCpuUsagePercentage(), 0.0); @@ -69,27 +114,32 @@ public void parseSystemInfoString() { tester.getNumVCoresUsed(), 0.0); } + // PUTs #30 @Test(timeout = 10000) public void refreshAndCpuUsage() throws InterruptedException { SysInfoWindowsMock tester = new SysInfoWindowsMock(); tester.setSysinfoString( - "17177038848,8589467648,15232745472,6400417792,1,2805000,6261812," + - "1234567,2345678,3456789,4567890\r\n"); + virtualMemorySize + COMMA + physicalMemorySize + COMMA + availableVirtualMemorySize + COMMA + + availablePhysicalMemorySize + COMMA + numProcessors + COMMA + cpuFrequency + COMMA + cumulativeCpuTime + + COMMA + storageBytesRead + COMMA + storageBytesWritten + COMMA + networkBytesRead + COMMA + + networkBytesWritten + "\r\n"); // info str derived from windows shell command has \r\n termination tester.getAvailablePhysicalMemorySize(); // verify information has been refreshed - assertEquals(6400417792L, tester.getAvailablePhysicalMemorySize()); + assertEquals(availablePhysicalMemorySize, tester.getAvailablePhysicalMemorySize()); assertEquals((float)CpuTimeTracker.UNAVAILABLE, tester.getCpuUsagePercentage(), 0.0); assertEquals((float)CpuTimeTracker.UNAVAILABLE, tester.getNumVCoresUsed(), 0.0); tester.setSysinfoString( - "17177038848,8589467648,15232745472,5400417792,1,2805000,6263012," + - "1234567,2345678,3456789,4567890\r\n"); + virtualMemorySize + COMMA + physicalMemorySize + COMMA + availableVirtualMemorySize + COMMA + + (availablePhysicalMemorySize - 1000000000) + COMMA + numProcessors + COMMA + cpuFrequency + COMMA + + (cumulativeCpuTime + 1200) + COMMA + storageBytesRead + COMMA + storageBytesWritten + COMMA + + networkBytesRead + COMMA + networkBytesWritten + "\r\n"); tester.getAvailablePhysicalMemorySize(); // verify information has not been refreshed - assertEquals(6400417792L, tester.getAvailablePhysicalMemorySize()); + assertEquals(availablePhysicalMemorySize, tester.getAvailablePhysicalMemorySize()); assertEquals((float)CpuTimeTracker.UNAVAILABLE, tester.getCpuUsagePercentage(), 0.0); assertEquals((float)CpuTimeTracker.UNAVAILABLE, @@ -99,15 +149,18 @@ public void refreshAndCpuUsage() throws InterruptedException { tester.advance(SysInfoWindows.REFRESH_INTERVAL_MS + 1); // verify information has been refreshed - assertEquals(5400417792L, tester.getAvailablePhysicalMemorySize()); - assertEquals((6263012 - 6261812) * 100F / - (SysInfoWindows.REFRESH_INTERVAL_MS + 1f) / 1, + assertEquals(availablePhysicalMemorySize - 1000000000, tester.getAvailablePhysicalMemorySize()); + assertEquals((1200) * 100F / + (SysInfoWindows.REFRESH_INTERVAL_MS + 1f) / numProcessors, tester.getCpuUsagePercentage(), 0.0); - assertEquals((6263012 - 6261812) / + assertEquals((1200) / (SysInfoWindows.REFRESH_INTERVAL_MS + 1f) / 1, tester.getNumVCoresUsed(), 0.0); } + } + public static class NotParameterizedPart { + // Can Be Removed #2, covered in PUTs #30 @Test(timeout = 10000) public void refreshAndCpuUsageMulticore() throws InterruptedException { // test with 12 cores @@ -151,5 +204,5 @@ public void errorInGetSystemInfo() { // call a method to refresh values tester.getAvailablePhysicalMemorySize(); } - + } } diff --git a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsTime.java b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsTime.java index a46051aaab848..c12513ebe3d03 100644 --- a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsTime.java +++ b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/nfs/TestNfsTime.java @@ -22,19 +22,36 @@ import org.apache.hadoop.nfs.NfsTime; import org.apache.hadoop.oncrpc.XDR; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import java.util.Arrays; +import java.util.Collection; + +@RunWith(Parameterized.class) public class TestNfsTime { + + @Parameterized.Parameter(value = 0) + public int milliseconds; + + @Parameterized.Parameters + public static Collection testMilliSeconds() { + Object[] data = new Object[] {0, -1, -101000, 1001, 1000, 999999999, -4567654}; + return Arrays.asList(data); + } + + // PUTs #17 @Test public void testConstructor() { - NfsTime nfstime = new NfsTime(1001); - Assert.assertEquals(1, nfstime.getSeconds()); - Assert.assertEquals(1000000, nfstime.getNseconds()); + NfsTime nfstime = new NfsTime(milliseconds); + Assert.assertEquals(milliseconds/1000, nfstime.getSeconds()); + Assert.assertEquals((milliseconds - (milliseconds/1000)*1000) * 1000000, nfstime.getNseconds()); } - + // PUTs #18 @Test public void testSerializeDeserialize() { // Serialize NfsTime - NfsTime t1 = new NfsTime(1001); + NfsTime t1 = new NfsTime(milliseconds); XDR xdr = new XDR(); t1.serialize(xdr); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java index 6cb63a24306e0..9befef86b2322 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileAppend3.java @@ -22,6 +22,8 @@ import static org.junit.Assert.fail; import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; import java.util.EnumSet; import java.util.List; @@ -51,8 +53,13 @@ import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; +import org.junit.After; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; /** This class implements some of tests posted in HADOOP-2658. */ +@RunWith(Enclosed.class) public class TestFileAppend3 { { DFSTestUtil.setNameNodeLogLevel(Level.ALL); @@ -62,7 +69,6 @@ public class TestFileAppend3 { .event.Level.TRACE); } - static final long BLOCK_SIZE = 64 * 1024; static final short REPLICATION = 3; static final int DATANODE_NUM = 5; @@ -80,7 +86,7 @@ public static void setUp() throws java.lang.Exception { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(DATANODE_NUM).build(); fs = cluster.getFileSystem(); } - + @AfterClass public static void tearDown() throws Exception { AppendTestUtil.LOG.info("tearDown()"); @@ -88,17 +94,59 @@ public static void tearDown() throws Exception { if(cluster != null) cluster.shutdown(); } + @RunWith(Parameterized.class) + public static class TheParameterizedPart { + @Parameterized.Parameter(value = 0) + public long BLOCK_SIZE; // >0 && multiple of 512(checksum) & 4096(buffer size) + + @Parameterized.Parameters + public static Collection testData() { + Object[][] data = new Object[][] { {64 * 1024}, // 65536 +// {64*64}, // testTC12ForAppend2 fails expected 2 got 9 +// {-1}, //fails , not a multiple of 512 +// {777}, //fails , not a multiple of 512 +// {0} // fails, not a multiple of 512 +// {512}, // testTC12ForAppend2 fails expected 2 got 63 + {32768}, // passes +// {8192}, // testTC12ForAppend2 fails expected 2 got 5 +// {16384}, // testTC12ForAppend2 fails expected 2 got 3 +// {4096}, // testTC12ForAppend2 fails expected 2 got 9 +// {131072} // passes but takes really long +// {1024} // testTC12ForAppend2 fails expected 2 got 32 + }; + return Arrays.asList(data); + } + + @After // deleting created files after tests to avoid file already exist exception in re-run + public void endTest() throws IOException { + fs.delete(new Path("/TC12/foo0"), true); + // delete file, true for recursive + fs.delete(new Path("/TC1/foo2"), true); + fs.delete(new Path("/TC1/foo"), true); + fs.delete(new Path("/TC2/foo"), true); + fs.delete(new Path("/TC2/foo2"), true); + fs.delete(new Path("/TC5/foo"), true); + fs.delete(new Path("/TC7/foo1"), true); + fs.delete(new Path("/TC11/foo1"), true); + fs.delete(new Path("/TC12/foo1"), true); + fs.delete(new Path("/TC5/foo2"), true); + fs.delete(new Path("/TC11/foo0"), true); + fs.delete(new Path("/TC7/foo0"), true); + fs.delete(new Path("/TC7/foo2"), true); + } + /** * TC1: Append on block boundary. * @throws IOException an exception might be thrown */ + // PUTs #5 @Test public void testTC1() throws Exception { final Path p = new Path("/TC1/foo"); System.out.println("p=" + p); //a. Create file and write one block of data. Close file. - final int len1 = (int)BLOCK_SIZE; + final int len1 = (int)BLOCK_SIZE; { FSDataOutputStream out = fs.create(p, false, buffersize, REPLICATION, BLOCK_SIZE); AppendTestUtil.write(out, 0, len1); @@ -106,17 +154,18 @@ public void testTC1() throws Exception { } // Reopen file to append. Append half block of data. Close file. - final int len2 = (int)BLOCK_SIZE/2; + final int len2 = (int)BLOCK_SIZE/2; { FSDataOutputStream out = fs.append(p); AppendTestUtil.write(out, len1, len2); out.close(); } - + //b. Reopen file and read 1.5 blocks worth of data. Close file. AppendTestUtil.check(fs, p, len1 + len2); } + // PUTs #6 @Test public void testTC1ForAppend2() throws Exception { final Path p = new Path("/TC1/foo2"); @@ -147,13 +196,14 @@ public void testTC1ForAppend2() throws Exception { * TC2: Append on non-block boundary. * @throws IOException an exception might be thrown */ + // PUTs #7 @Test public void testTC2() throws Exception { final Path p = new Path("/TC2/foo"); System.out.println("p=" + p); //a. Create file with one and a half block of data. Close file. - final int len1 = (int)(BLOCK_SIZE + BLOCK_SIZE/2); + final int len1 = (int)(BLOCK_SIZE + BLOCK_SIZE/2); { FSDataOutputStream out = fs.create(p, false, buffersize, REPLICATION, BLOCK_SIZE); AppendTestUtil.write(out, 0, len1); @@ -163,7 +213,7 @@ public void testTC2() throws Exception { AppendTestUtil.check(fs, p, len1); // Reopen file to append quarter block of data. Close file. - final int len2 = (int)BLOCK_SIZE/4; + final int len2 = (int)BLOCK_SIZE/4; { FSDataOutputStream out = fs.append(p); AppendTestUtil.write(out, len1, len2); @@ -174,6 +224,7 @@ public void testTC2() throws Exception { AppendTestUtil.check(fs, p, len1 + len2); } + // PUTs #8 @Test public void testTC2ForAppend2() throws Exception { final Path p = new Path("/TC2/foo2"); @@ -212,6 +263,7 @@ public void testTC2ForAppend2() throws Exception { * TC5: Only one simultaneous append. * @throws IOException an exception might be thrown */ + // PUTs #9 @Test public void testTC5() throws Exception { final Path p = new Path("/TC5/foo"); @@ -245,9 +297,10 @@ public void testTC5() throws Exception { } //d. On Machine M1, close file. - out.close(); + out.close(); } + // PUTs #10 @Test public void testTC5ForAppend2() throws Exception { final Path p = new Path("/TC5/foo2"); @@ -293,9 +346,9 @@ private void testTC7(boolean appendToNewBlock) throws Exception { final short repl = 2; final Path p = new Path("/TC7/foo" + (appendToNewBlock ? "0" : "1")); System.out.println("p=" + p); - + //a. Create file with replication factor of 2. Write half block of data. Close file. - final int len1 = (int)(BLOCK_SIZE/2); + final int len1 = (int)(BLOCK_SIZE/2); { FSDataOutputStream out = fs.create(p, false, buffersize, repl, BLOCK_SIZE); AppendTestUtil.write(out, 0, len1); @@ -317,7 +370,7 @@ private void testTC7(boolean appendToNewBlock) throws Exception { cluster.getMaterializedReplica(dn, blk).truncateData(0); //c. Open file in "append mode". Append a new block worth of data. Close file. - final int len2 = (int)BLOCK_SIZE; + final int len2 = (int)BLOCK_SIZE; { FSDataOutputStream out = appendToNewBlock ? fs.append(p, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null) : fs.append(p); @@ -328,17 +381,76 @@ private void testTC7(boolean appendToNewBlock) throws Exception { //d. Reopen file and read two blocks worth of data. AppendTestUtil.check(fs, p, len1 + len2); } - + // PUTs #11 @Test public void testTC7() throws Exception { testTC7(false); } - + // PUTs #12 @Test public void testTC7ForAppend2() throws Exception { testTC7(true); } + /** + * TC12: Append to partial CRC chunk + */ + private void testTC12(boolean appendToNewBlock) throws Exception { + final Path p = new Path("/TC12/foo" + (appendToNewBlock ? "0" : "1")); + System.out.println("p=" + p); + + //a. Create file with a block size of 64KB + // and a default io.bytes.per.checksum of 512 bytes. + // Write 25687 bytes of data. Close file. + final int len1 = 25687; + { + FSDataOutputStream out = fs.create(p, false, buffersize, REPLICATION, BLOCK_SIZE); + AppendTestUtil.write(out, 0, len1); + out.close(); + } + + //b. Reopen file in "append" mode. Append another 5877 bytes of data. Close file. + final int len2 = 5877; + { + FSDataOutputStream out = appendToNewBlock ? + fs.append(p, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null) : + fs.append(p); + AppendTestUtil.write(out, len1, len2); + out.close(); + } + + //c. Reopen file and read 25687+5877 bytes of data from file. Close file. + AppendTestUtil.check(fs, p, len1 + len2); + if (appendToNewBlock) { + LocatedBlocks blks = fs.dfs.getLocatedBlocks(p.toString(), 0); + Assert.assertEquals(2, blks.getLocatedBlocks().size()); + Assert.assertEquals(len1, blks.getLocatedBlocks().get(0).getBlockSize()); + Assert.assertEquals(len2, blks.getLocatedBlocks().get(1).getBlockSize()); + AppendTestUtil.check(fs, p, 0, len1); + AppendTestUtil.check(fs, p, len1, len2); + } + } + + // PUTs #13 + @Test + public void testTC12() throws Exception { + testTC12(false); + } + + // Potential Bug #1 -> changing expected changes actual for + // Assert.assertEquals(2, blks.getLocatedBlocks().size()); in testTC12 + // expected equation can be -> (31564/BLOCK_SIZE + 2) but actual changes + // PUTs #14 + @Test + public void testTC12ForAppend2() throws Exception { + testTC12(true); + } + } + + public static class NotParameterizedPart { + + static final long BLOCK_SIZE = 64 * 1024; + /** * TC11: Racing rename */ @@ -347,7 +459,7 @@ private void testTC11(boolean appendToNewBlock) throws Exception { System.out.println("p=" + p); //a. Create file and write one block of data. Close file. - final int len1 = (int)BLOCK_SIZE; + final int len1 = (int)BLOCK_SIZE; { FSDataOutputStream out = fs.create(p, false, buffersize, REPLICATION, BLOCK_SIZE); AppendTestUtil.write(out, 0, len1); @@ -358,15 +470,15 @@ private void testTC11(boolean appendToNewBlock) throws Exception { FSDataOutputStream out = appendToNewBlock ? fs.append(p, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null) : fs.append(p); - final int len2 = (int)BLOCK_SIZE/2; + final int len2 = (int)BLOCK_SIZE/2; AppendTestUtil.write(out, len1, len2); out.hflush(); - + //c. Rename file to file.new. final Path pnew = new Path(p + ".new"); assertTrue(fs.rename(p, pnew)); - //d. Close file handle that was opened in (b). + //d. Close file handle that was opened in (b). out.close(); //check block sizes @@ -399,55 +511,6 @@ public void testTC11ForAppend2() throws Exception { testTC11(true); } - /** - * TC12: Append to partial CRC chunk - */ - private void testTC12(boolean appendToNewBlock) throws Exception { - final Path p = new Path("/TC12/foo" + (appendToNewBlock ? "0" : "1")); - System.out.println("p=" + p); - - //a. Create file with a block size of 64KB - // and a default io.bytes.per.checksum of 512 bytes. - // Write 25687 bytes of data. Close file. - final int len1 = 25687; - { - FSDataOutputStream out = fs.create(p, false, buffersize, REPLICATION, BLOCK_SIZE); - AppendTestUtil.write(out, 0, len1); - out.close(); - } - - //b. Reopen file in "append" mode. Append another 5877 bytes of data. Close file. - final int len2 = 5877; - { - FSDataOutputStream out = appendToNewBlock ? - fs.append(p, EnumSet.of(CreateFlag.APPEND, CreateFlag.NEW_BLOCK), 4096, null) : - fs.append(p); - AppendTestUtil.write(out, len1, len2); - out.close(); - } - - //c. Reopen file and read 25687+5877 bytes of data from file. Close file. - AppendTestUtil.check(fs, p, len1 + len2); - if (appendToNewBlock) { - LocatedBlocks blks = fs.dfs.getLocatedBlocks(p.toString(), 0); - Assert.assertEquals(2, blks.getLocatedBlocks().size()); - Assert.assertEquals(len1, blks.getLocatedBlocks().get(0).getBlockSize()); - Assert.assertEquals(len2, blks.getLocatedBlocks().get(1).getBlockSize()); - AppendTestUtil.check(fs, p, 0, len1); - AppendTestUtil.check(fs, p, len1, len2); - } - } - - @Test - public void testTC12() throws Exception { - testTC12(false); - } - - @Test - public void testTC12ForAppend2() throws Exception { - testTC12(true); - } - /** * Append to a partial CRC chunk and the first write does not fill up the * partial CRC trunk @@ -458,7 +521,7 @@ private void testAppendToPartialChunk(boolean appendToNewBlock) + (appendToNewBlock ? "0" : "1")); final int fileLen = 513; System.out.println("p=" + p); - + byte[] fileContents = AppendTestUtil.initBuffer(fileLen); // create a new file. @@ -592,4 +655,5 @@ public void testAppendToPartialChunk() throws IOException { public void testAppendToPartialChunkforAppend2() throws IOException { testAppendToPartialChunk(true); } + } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java index 0031598da5b33..2006f57b30d84 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java @@ -28,6 +28,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.Arrays; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -80,6 +81,8 @@ import org.apache.hadoop.yarn.util.SystemClock; import org.junit.Assert; import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -87,6 +90,7 @@ import static org.assertj.core.api.Assertions.offset; @SuppressWarnings({"unchecked", "rawtypes"}) +@RunWith(Parameterized.class) public class TestRuntimeEstimators { private static int INITIAL_NUMBER_FREE_SLOTS = 600; @@ -96,6 +100,27 @@ public class TestRuntimeEstimators { private static int MAP_TASKS = 200; private static int REDUCE_TASKS = 150; + @Parameterized.Parameter(value = 0) + public long speculativeRetryAfterNoSpeculate; + @Parameterized.Parameter(value = 1) // (any non negative) + public long speculativeRetryAfterSpeculate; + @Parameterized.Parameter(value = 2) // (any non negative) + public double speculativeCapRunningTasks; + @Parameterized.Parameter(value = 3) // [0-1] + public double speculativeCapTotalTasks; + @Parameterized.Parameter(value = 4) // [0-1] + public int speculativeMinimumAllowedTasks; // (>=3) + + @Parameterized.Parameters + public static Collection testData() { + Object[][] data = new Object[][] { {500L, 5000L, 0.1, 0.001, 5}, + {500L, 200L, 0.001, 0.00001, 1}, +// {1, 1, 1, 0.001, 5} +// {1, 1, 0, 0, 3} // Potential Bug #4 -> Todo: investigate + }; + return Arrays.asList(data); + } + ControlledClock clock; Job myJob; @@ -121,7 +146,7 @@ public class TestRuntimeEstimators { = new AtomicInteger(0); private final AtomicLong taskTimeSavedBySpeculation = new AtomicLong(0L); - + private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); private void coreTestEstimator @@ -146,22 +171,22 @@ public class TestRuntimeEstimators { estimator.contextualize(conf, myAppContext); - conf.setLong(MRJobConfig.SPECULATIVE_RETRY_AFTER_NO_SPECULATE, 500L); - conf.setLong(MRJobConfig.SPECULATIVE_RETRY_AFTER_SPECULATE, 5000L); - conf.setDouble(MRJobConfig.SPECULATIVECAP_RUNNING_TASKS, 0.1); - conf.setDouble(MRJobConfig.SPECULATIVECAP_TOTAL_TASKS, 0.001); - conf.setInt(MRJobConfig.SPECULATIVE_MINIMUM_ALLOWED_TASKS, 5); + conf.setLong(MRJobConfig.SPECULATIVE_RETRY_AFTER_NO_SPECULATE, speculativeRetryAfterNoSpeculate); + conf.setLong(MRJobConfig.SPECULATIVE_RETRY_AFTER_SPECULATE, speculativeRetryAfterSpeculate); + conf.setDouble(MRJobConfig.SPECULATIVECAP_RUNNING_TASKS, speculativeCapRunningTasks); + conf.setDouble(MRJobConfig.SPECULATIVECAP_TOTAL_TASKS, speculativeCapTotalTasks); + conf.setInt(MRJobConfig.SPECULATIVE_MINIMUM_ALLOWED_TASKS, speculativeMinimumAllowedTasks); speculator = new DefaultSpeculator(conf, myAppContext, estimator, clock); Assert.assertEquals("wrong SPECULATIVE_RETRY_AFTER_NO_SPECULATE value", - 500L, speculator.getSoonestRetryAfterNoSpeculate()); + speculativeRetryAfterNoSpeculate, speculator.getSoonestRetryAfterNoSpeculate()); Assert.assertEquals("wrong SPECULATIVE_RETRY_AFTER_SPECULATE value", - 5000L, speculator.getSoonestRetryAfterSpeculate()); + speculativeRetryAfterSpeculate, speculator.getSoonestRetryAfterSpeculate()); assertThat(speculator.getProportionRunningTasksSpeculatable()) - .isCloseTo(0.1, offset(0.00001)); + .isCloseTo(speculativeCapRunningTasks, offset(0.00001)); assertThat(speculator.getProportionTotalTasksSpeculatable()) - .isCloseTo(0.001, offset(0.00001)); + .isCloseTo(speculativeCapTotalTasks, offset(0.00001)); Assert.assertEquals("wrong SPECULATIVE_MINIMUM_ALLOWED_TASKS value", - 5, speculator.getMinimumAllowedSpeculativeTasks()); + speculativeMinimumAllowedTasks, speculator.getMinimumAllowedSpeculativeTasks()); dispatcher.register(Speculator.EventType.class, speculator); @@ -248,12 +273,14 @@ public class TestRuntimeEstimators { expectedSpeculations, successfulSpeculations.get()); } + // PUTs #26 @Test public void testLegacyEstimator() throws Exception { TaskRuntimeEstimator specificEstimator = new LegacyTaskRuntimeEstimator(); coreTestEstimator(specificEstimator, 3); } + // PUTs #27 @Test public void testExponentialEstimator() throws Exception { TaskRuntimeEstimator specificEstimator @@ -261,6 +288,7 @@ public void testExponentialEstimator() throws Exception { coreTestEstimator(specificEstimator, 3); } + // PUTs #28 @Test public void testSimpleExponentialEstimator() throws Exception { TaskRuntimeEstimator specificEstimator