diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/CompressDecompressTester.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/CompressDecompressTester.java index c016ff0378957..684b1743bc475 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/CompressDecompressTester.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/CompressDecompressTester.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.io.compress; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -37,13 +38,11 @@ import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.log4j.Logger; -import org.junit.Assert; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableMap; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; -import static org.junit.Assert.*; public class CompressDecompressTester { @@ -274,12 +273,10 @@ public void assertCompression(String name, Compressor compressor, int maxCompressedLength = 32 + rawData.length + rawData.length/6; byte[] compressedResult = new byte[maxCompressedLength]; byte[] decompressedBytes = new byte[rawData.length]; - assertTrue( - joiner.join(name, "compressor.needsInput before error !!!"), - compressor.needsInput()); - assertEquals( - joiner.join(name, "compressor.getBytesWritten before error !!!"), - 0, compressor.getBytesWritten()); + assertTrue(compressor.needsInput(), + joiner.join(name, "compressor.needsInput before error !!!")); + assertEquals(0, compressor.getBytesWritten(), + joiner.join(name, "compressor.getBytesWritten before error !!!")); compressor.setInput(rawData, 0, rawData.length); compressor.finish(); while (!compressor.finished()) { @@ -288,23 +285,20 @@ public void assertCompression(String name, Compressor compressor, } compressor.reset(); - assertTrue( - joiner.join(name, "decompressor.needsInput() before error !!!"), - decompressor.needsInput()); + assertTrue(decompressor.needsInput(), + joiner.join(name, "decompressor.needsInput() before error !!!")); decompressor.setInput(compressedResult, 0, cSize); - assertFalse( - joiner.join(name, "decompressor.needsInput() after error !!!"), - decompressor.needsInput()); + assertFalse(decompressor.needsInput(), + joiner.join(name, "decompressor.needsInput() after error !!!")); while (!decompressor.finished()) { decompressedSize = decompressor.decompress(decompressedBytes, 0, decompressedBytes.length); } decompressor.reset(); - assertEquals(joiner.join(name, " byte size not equals error !!!"), - rawData.length, decompressedSize); - assertArrayEquals( - joiner.join(name, " byte arrays not equals error !!!"), rawData, - decompressedBytes); + assertEquals(rawData.length, decompressedSize, + joiner.join(name, " byte size not equals error !!!")); + assertArrayEquals(rawData, decompressedBytes, + joiner.join(name, " byte arrays not equals error !!!")); } }), @@ -331,17 +325,16 @@ void assertCompression(String name, Compressor compressor, // check compressed output buf = bytesOut.toByteArray(); int emSize = emptySize.get(compressor.getClass()); - Assert.assertEquals( - joiner.join(name, "empty stream compressed output size != " - + emSize), emSize, buf.length); + assertEquals(emSize, buf.length, + joiner.join(name, "empty stream compressed output size != " + emSize)); // use compressed output as input for decompression bytesIn = new ByteArrayInputStream(buf); // create decompression stream blockDecompressorStream = new BlockDecompressorStream(bytesIn, decompressor, 1024); // no byte is available because stream was closed - assertEquals(joiner.join(name, " return value is not -1"), -1, - blockDecompressorStream.read()); + assertEquals(-1, + blockDecompressorStream.read(), joiner.join(name, " return value is not -1")); } catch (IOException e) { fail(joiner.join(name, e.getMessage())); } finally { @@ -407,9 +400,8 @@ public void assertCompression(String name, Compressor compressor, decompressor.reset(); off = off + step; } - assertArrayEquals( - joiner.join(name, "byte arrays not equals error !!!"), - originalRawData, decompressOut.toByteArray()); + assertArrayEquals(originalRawData, decompressOut.toByteArray(), + joiner.join(name, "byte arrays not equals error !!!")); } catch (Exception ex) { throw new AssertionError(name + ex, ex); } finally { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBZip2Codec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBZip2Codec.java index 9dd3215f90d5e..8b154c6a6cf04 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBZip2Codec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBZip2Codec.java @@ -22,9 +22,9 @@ import java.util.List; import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; @@ -41,8 +41,8 @@ import static org.apache.hadoop.util.Preconditions.checkArgument; import static org.assertj.core.api.Assertions.assertThatNullPointerException; import static org.assertj.core.api.AssertionsForClassTypes.assertThatExceptionOfType; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public final class TestBZip2Codec { @@ -54,7 +54,7 @@ public final class TestBZip2Codec { private Decompressor decompressor; private Path tempFile; - @Before + @BeforeEach public void setUp() throws Exception { conf = new Configuration(); @@ -71,7 +71,7 @@ public void setUp() throws Exception { decompressor = CodecPool.getDecompressor(codec); } - @After + @AfterEach public void tearDown() throws Exception { CodecPool.returnDecompressor(decompressor); fs.delete(tempFile, /* recursive */ false); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBlockDecompressorStream.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBlockDecompressorStream.java index cdab772e2fcd7..558a1989d6fc5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBlockDecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestBlockDecompressorStream.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.io.compress; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -29,7 +29,7 @@ import java.io.InputStream; import java.nio.ByteBuffer; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestBlockDecompressorStream { @@ -63,8 +63,8 @@ private void testRead(int bufLen) throws IOException { // check compressed output buf = bytesOut.toByteArray(); - assertEquals("empty file compressed output size is not " + (bufLen + 4), - bufLen + 4, buf.length); + assertEquals(bufLen + 4, buf.length, + "empty file compressed output size is not " + (bufLen + 4)); // use compressed output as input for decompression bytesIn = new ByteArrayInputStream(buf); @@ -72,8 +72,8 @@ private void testRead(int bufLen) throws IOException { // get decompression stream try (BlockDecompressorStream blockDecompressorStream = new BlockDecompressorStream(bytesIn, new FakeDecompressor(), 1024)) { - assertEquals("return value is not -1", - -1 , blockDecompressorStream.read()); + assertEquals(-1, blockDecompressorStream.read(), + "return value is not -1"); } catch (IOException e) { fail("unexpected IOException : " + e); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java index 5a63c06515e8b..7e97872dbfc4d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java @@ -18,13 +18,13 @@ package org.apache.hadoop.io.compress; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -77,8 +77,9 @@ import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.After; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -90,7 +91,7 @@ public class TestCodec { private int count = 10000; private int seed = new Random().nextInt(); - @After + @AfterEach public void after() { ZlibFactory.loadNativeZLib(); } @@ -114,7 +115,8 @@ public void testGzipCodec() throws IOException { codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec"); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testBZip2Codec() throws IOException { Configuration conf = new Configuration(); conf.set("io.compression.codec.bzip2.library", "java-builtin"); @@ -122,7 +124,8 @@ public void testBZip2Codec() throws IOException { codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec"); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testBZip2NativeCodec() throws IOException { Configuration conf = new Configuration(); conf.set("io.compression.codec.bzip2.library", "system-native"); @@ -216,8 +219,8 @@ private static void codecTest(Configuration conf, int seed, int count, deflateFilter.finish(); } if (leasedCompressorsBefore > -1) { - assertEquals("leased compressor not returned to the codec pool", - leasedCompressorsBefore, CodecPool.getLeasedCompressorsCount(codec)); + assertEquals(leasedCompressorsBefore, CodecPool.getLeasedCompressorsCount(codec), + "leased compressor not returned to the codec pool"); } LOG.info("Finished compressing data"); @@ -247,8 +250,8 @@ private static void codecTest(Configuration conf, int seed, int count, RandomDatum v2 = new RandomDatum(); k2.readFields(inflateIn); v2.readFields(inflateIn); - assertTrue("original and compressed-then-decompressed-output not equal", - k1.equals(k2) && v1.equals(v2)); + assertTrue(k1.equals(k2) && v1.equals(v2), + "original and compressed-then-decompressed-output not equal"); // original and compressed-then-decompressed-output have the same // hashCode @@ -256,14 +259,14 @@ private static void codecTest(Configuration conf, int seed, int count, m.put(k1, k1.toString()); m.put(v1, v1.toString()); String result = m.get(k2); - assertEquals("k1 and k2 hashcode not equal", result, k1.toString()); + assertEquals(result, k1.toString(), "k1 and k2 hashcode not equal"); result = m.get(v2); - assertEquals("v1 and v2 hashcode not equal", result, v1.toString()); + assertEquals(result, v1.toString(), "v1 and v2 hashcode not equal"); } } - assertEquals("leased decompressor not returned to the codec pool", - leasedDecompressorsBefore, - CodecPool.getLeasedDecompressorsCount(codec)); + assertEquals(leasedDecompressorsBefore, + CodecPool.getLeasedDecompressorsCount(codec), + "leased decompressor not returned to the codec pool"); // De-compress data byte-at-a-time originalData.reset(data.getData(), 0, data.getLength()); @@ -278,8 +281,8 @@ private static void codecTest(Configuration conf, int seed, int count, int expected; do { expected = originalIn.read(); - assertEquals("Inflated stream read by byte does not match", - expected, inflateFilter.read()); + assertEquals(expected, inflateFilter.read(), + "Inflated stream read by byte does not match"); } while (expected != -1); } @@ -334,7 +337,7 @@ private void testSplitableCodec( break; } final int seq2 = readLeadingInt(line); - assertEquals("Mismatched lines", seq1 + 1, seq2); + assertEquals(seq1 + 1, seq2, "Mismatched lines"); } } finally { CodecPool.returnDecompressor(dcmp); @@ -396,7 +399,7 @@ public void testCodecPoolGzipReuse() throws Exception { Compressor c2 = CodecPool.getCompressor(dfc); CodecPool.returnCompressor(c1); CodecPool.returnCompressor(c2); - assertTrue("Got mismatched ZlibCompressor", c2 != CodecPool.getCompressor(gzc)); + assertTrue(c2 != CodecPool.getCompressor(gzc), "Got mismatched ZlibCompressor"); } private static void gzipReinitTest(Configuration conf, CompressionCodec codec) @@ -411,7 +414,7 @@ private static void gzipReinitTest(Configuration conf, CompressionCodec codec) ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION); Compressor c2 = CodecPool.getCompressor(codec, conf); // ensure same compressor placed earlier - assertTrue("Got mismatched ZlibCompressor", c1 == c2); + assertTrue(c1 == c2, "Got mismatched ZlibCompressor"); ByteArrayOutputStream bos = new ByteArrayOutputStream(); CompressionOutputStream cos = null; // write trivially compressable data @@ -428,8 +431,8 @@ private static void gzipReinitTest(Configuration conf, CompressionCodec codec) } byte[] outbytes = bos.toByteArray(); // verify data were not compressed - assertTrue("Compressed bytes contrary to configuration", - outbytes.length >= b.length); + assertTrue(outbytes.length >= b.length, + "Compressed bytes contrary to configuration"); } private static void codecTestWithNOCompression (Configuration conf, @@ -463,8 +466,8 @@ private static void codecTestWithNOCompression (Configuration conf, } byte[] outbytes = bos.toByteArray(); // verify data were not compressed - assertTrue("Compressed bytes contrary to configuration(NO_COMPRESSION)", - outbytes.length >= b.length); + assertTrue(outbytes.length >= b.length, + "Compressed bytes contrary to configuration(NO_COMPRESSION)"); } @Test @@ -509,7 +512,8 @@ public void testSequenceFileDefaultCodec() throws IOException, ClassNotFoundExce sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.DefaultCodec", 1000000); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException { Configuration conf = new Configuration(); @@ -519,7 +523,8 @@ public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundExcept sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testSequenceFileZStandardCodec() throws Exception { assumeTrue(ZStandardCodec.isNativeCodeLoaded()); Configuration conf = new Configuration(); @@ -531,7 +536,8 @@ public void testSequenceFileZStandardCodec() throws Exception { "org.apache.hadoop.io.compress.ZStandardCodec", 1000000); } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testSequenceFileBZip2NativeCodec() throws IOException, ClassNotFoundException, InstantiationException, IllegalAccessException { @@ -667,7 +673,7 @@ private static void createMapFile(Configuration conf, FileSystem fs, Path path, writer.close(); } - public static void main(String[] args) throws IOException { + /*public static void main(String[] args) throws IOException { int count = 10000; String codecClass = "org.apache.hadoop.io.compress.DefaultCodec"; @@ -691,7 +697,7 @@ public static void main(String[] args) throws IOException { int seed = 0; // Note that exceptions will propagate out. codecTest(conf, seed, count, codecClass); - } + }*/ @Test public void testGzipCompatibility() throws IOException { @@ -954,9 +960,9 @@ public void testGzipCodecRead() throws IOException { ZlibFactory.setNativeZlibLoaded(false); // Ensure that the CodecPool has a BuiltInZlibInflater in it. Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf); - assertNotNull("zlibDecompressor is null!", zlibDecompressor); - assertTrue("ZlibFactory returned unexpected inflator", - zlibDecompressor instanceof BuiltInZlibInflater); + assertNotNull(zlibDecompressor, "zlibDecompressor is null!"); + assertTrue(zlibDecompressor instanceof BuiltInZlibInflater, + "ZlibFactory returned unexpected inflator"); CodecPool.returnDecompressor(zlibDecompressor); // Now create a GZip text file. @@ -977,7 +983,7 @@ public void testGzipCodecRead() throws IOException { is = codec.createInputStream(is, decompressor); BufferedReader br = new BufferedReader(new InputStreamReader(is)); String line = br.readLine(); - assertEquals("Didn't get the same message back!", msg, line); + assertEquals(msg, line, "Didn't get the same message back!"); br.close(); } @@ -986,7 +992,7 @@ private void verifyGzipFile(String filename, String msg) throws IOException { new GZIPInputStream(new FileInputStream(filename)))); try { String line = r.readLine(); - assertEquals("Got invalid line back from " + filename, msg, line); + assertEquals(msg, line, "Got invalid line back from " + filename); } finally { r.close(); new File(filename).delete(); @@ -1000,14 +1006,14 @@ public void testGzipLongOverflow() throws IOException { // Don't use native libs for this test. Configuration conf = new Configuration(); ZlibFactory.setNativeZlibLoaded(false); - assertFalse("ZlibFactory is using native libs against request", - ZlibFactory.isNativeZlibLoaded(conf)); + assertFalse(ZlibFactory.isNativeZlibLoaded(conf), + "ZlibFactory is using native libs against request"); // Ensure that the CodecPool has a BuiltInZlibInflater in it. Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf); - assertNotNull("zlibDecompressor is null!", zlibDecompressor); - assertTrue("ZlibFactory returned unexpected inflator", - zlibDecompressor instanceof BuiltInZlibInflater); + assertNotNull(zlibDecompressor, "zlibDecompressor is null!"); + assertTrue(zlibDecompressor instanceof BuiltInZlibInflater, + "ZlibFactory returned unexpected inflator"); CodecPool.returnDecompressor(zlibDecompressor); // Now create a GZip text file. @@ -1034,9 +1040,9 @@ public void testGzipLongOverflow() throws IOException { BufferedReader br = new BufferedReader(new InputStreamReader(is)); for (int j = 0; j < NBUF; j++) { int n = br.read(buf); - assertEquals("got wrong read length!", n, buf.length); + assertEquals(n, buf.length, "got wrong read length!"); for (int i = 0; i < buf.length; i++) - assertEquals("got wrong byte!", buf[i], '\0'); + assertEquals(buf[i], '\0', "got wrong byte!"); } br.close(); } @@ -1050,24 +1056,24 @@ private void testGzipCodecWrite(boolean useNative) throws IOException { if (useNative) { assumeTrue(ZlibFactory.isNativeZlibLoaded(hadoopConf)); } else { - assertFalse("ZlibFactory is using native libs against request", - ZlibFactory.isNativeZlibLoaded(hadoopConf)); + assertFalse(ZlibFactory.isNativeZlibLoaded(hadoopConf), + "ZlibFactory is using native libs against request"); } // Ensure that the CodecPool has a BuiltInZlibDeflater in it. Compressor zlibCompressor = ZlibFactory.getZlibCompressor(hadoopConf); - assertNotNull("zlibCompressor is null!", zlibCompressor); - assertTrue("ZlibFactory returned unexpected deflator", - useNative ? zlibCompressor instanceof ZlibCompressor - : zlibCompressor instanceof BuiltInZlibDeflater); + assertNotNull(zlibCompressor, "zlibCompressor is null!"); + assertTrue(useNative ? zlibCompressor instanceof ZlibCompressor + : zlibCompressor instanceof BuiltInZlibDeflater, + "ZlibFactory returned unexpected deflator"); CodecPool.returnCompressor(zlibCompressor); // Create a GZIP text file via the Compressor interface. CompressionCodecFactory ccf = new CompressionCodecFactory(hadoopConf); CompressionCodec codec = ccf.getCodec(new Path("foo.gz")); - assertTrue("Codec for .gz file is not GzipCodec", - codec instanceof GzipCodec); + assertTrue(codec instanceof GzipCodec, + "Codec for .gz file is not GzipCodec"); final String fileName = new Path(GenericTestUtils.getTempPath( "testGzipCodecWrite.txt.gz")).toString(); @@ -1127,20 +1133,20 @@ public void testCodecPoolAndGzipCompressor() { // Don't use native libs for this test. Configuration conf = new Configuration(); ZlibFactory.setNativeZlibLoaded(false); - assertFalse("ZlibFactory is using native libs against request", - ZlibFactory.isNativeZlibLoaded(conf)); + assertFalse(ZlibFactory.isNativeZlibLoaded(conf), + "ZlibFactory is using native libs against request"); // This should give us a BuiltInZlibDeflater. Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf); - assertNotNull("zlibCompressor is null!", zlibCompressor); - assertTrue("ZlibFactory returned unexpected deflator", - zlibCompressor instanceof BuiltInZlibDeflater); + assertNotNull(zlibCompressor, "zlibCompressor is null!"); + assertTrue(zlibCompressor instanceof BuiltInZlibDeflater, + "ZlibFactory returned unexpected deflator"); // its createOutputStream() just wraps the existing stream in a // java.util.zip.GZIPOutputStream. CompressionCodecFactory ccf = new CompressionCodecFactory(conf); CompressionCodec codec = ccf.getCodec(new Path("foo.gz")); - assertTrue("Codec for .gz file is not GzipCodec", - codec instanceof GzipCodec); + assertTrue(codec instanceof GzipCodec, + "Codec for .gz file is not GzipCodec"); // make sure we don't get a null compressor Compressor codecCompressor = codec.createCompressor(); @@ -1177,20 +1183,20 @@ public void testCodecPoolAndGzipDecompressor() { // Don't use native libs for this test. Configuration conf = new Configuration(); ZlibFactory.setNativeZlibLoaded(false); - assertFalse("ZlibFactory is using native libs against request", - ZlibFactory.isNativeZlibLoaded(conf)); + assertFalse(ZlibFactory.isNativeZlibLoaded(conf), + "ZlibFactory is using native libs against request"); // This should give us a BuiltInZlibInflater. Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf); - assertNotNull("zlibDecompressor is null!", zlibDecompressor); - assertTrue("ZlibFactory returned unexpected inflator", - zlibDecompressor instanceof BuiltInZlibInflater); + assertNotNull(zlibDecompressor, "zlibDecompressor is null!"); + assertTrue(zlibDecompressor instanceof BuiltInZlibInflater, + "ZlibFactory returned unexpected inflator"); // its createOutputStream() just wraps the existing stream in a // java.util.zip.GZIPOutputStream. CompressionCodecFactory ccf = new CompressionCodecFactory(conf); CompressionCodec codec = ccf.getCodec(new Path("foo.gz")); - assertTrue("Codec for .gz file is not GzipCodec", - codec instanceof GzipCodec); + assertTrue(codec instanceof GzipCodec, + "Codec for .gz file is not GzipCodec"); // make sure we don't get a null decompressor Decompressor codecDecompressor = codec.createDecompressor(); @@ -1219,7 +1225,8 @@ public void testCodecPoolAndGzipDecompressor() { } } - @Test(timeout=20000) + @Test + @Timeout(value = 20) public void testGzipCompressorWithEmptyInput() throws IOException { // don't use native libs ZlibFactory.setNativeZlibLoaded(false); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java index 7461ea36f59a3..b370e8a2f284f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecFactory.java @@ -27,10 +27,10 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.conf.Configuration; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; public class TestCodecFactory { @@ -139,13 +139,12 @@ private static CompressionCodecFactory setClasses(Class[] classes) { private static void checkCodec(String msg, Class expected, CompressionCodec actual) { if (expected == null) { - assertNull(msg, actual); + assertNull(actual, msg); } else if (actual == null) { fail(msg + " result was null"); } else { - assertEquals(msg + " unexpected codec found", - expected.getName(), - actual.getClass().getName()); + assertEquals(expected.getName(), + actual.getClass().getName(), msg + " unexpected codec found"); } } @@ -154,9 +153,9 @@ public void testFinding() { CompressionCodecFactory factory = new CompressionCodecFactory(new Configuration()); CompressionCodec codec = factory.getCodec(new Path("/tmp/foo.bar")); - assertEquals("default factory foo codec", null, codec); + assertEquals(null, codec, "default factory foo codec"); codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName()); - assertEquals("default factory foo codec", null, codec); + assertEquals(null, codec, "default factory foo codec"); codec = factory.getCodec(new Path("/tmp/foo.gz")); checkCodec("default factory for .gz", GzipCodec.class, codec); @@ -204,9 +203,9 @@ public void testFinding() { factory = setClasses(new Class[0]); // gz, bz2, snappy, lz4 are picked up by service loader, but bar isn't codec = factory.getCodec(new Path("/tmp/foo.bar")); - assertEquals("empty factory bar codec", null, codec); + assertEquals(null, codec, "empty factory bar codec"); codec = factory.getCodecByClassName(BarCodec.class.getCanonicalName()); - assertEquals("empty factory bar codec", null, codec); + assertEquals(null, codec, "empty factory bar codec"); codec = factory.getCodec(new Path("/tmp/foo.gz")); checkCodec("empty factory gz codec", GzipCodec.class, codec); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java index ac6aff7427e4a..fb21e4738159f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodecPool.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.io.compress; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -38,8 +38,9 @@ import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.test.LambdaTestUtils; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import java.util.HashSet; import java.util.Set; @@ -49,34 +50,40 @@ public class TestCodecPool { "Incorrect number of leased (de)compressors"; DefaultCodec codec; - @Before + @BeforeEach public void setup() { this.codec = new DefaultCodec(); this.codec.setConf(new Configuration()); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCompressorPoolCounts() { // Get two compressors and return them Compressor comp1 = CodecPool.getCompressor(codec); Compressor comp2 = CodecPool.getCompressor(codec); - assertEquals(LEASE_COUNT_ERR, 2, - CodecPool.getLeasedCompressorsCount(codec)); + assertEquals(2, + CodecPool.getLeasedCompressorsCount(codec), + LEASE_COUNT_ERR); CodecPool.returnCompressor(comp2); - assertEquals(LEASE_COUNT_ERR, 1, - CodecPool.getLeasedCompressorsCount(codec)); + assertEquals(1, + CodecPool.getLeasedCompressorsCount(codec), + LEASE_COUNT_ERR); CodecPool.returnCompressor(comp1); - assertEquals(LEASE_COUNT_ERR, 0, - CodecPool.getLeasedCompressorsCount(codec)); + assertEquals(0, + CodecPool.getLeasedCompressorsCount(codec), + LEASE_COUNT_ERR); CodecPool.returnCompressor(comp1); - assertEquals(LEASE_COUNT_ERR, 0, - CodecPool.getLeasedCompressorsCount(codec)); + assertEquals(0, + CodecPool.getLeasedCompressorsCount(codec), + LEASE_COUNT_ERR); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCompressorNotReturnSameInstance() { Compressor comp = CodecPool.getCompressor(codec); CodecPool.returnCompressor(comp); @@ -91,7 +98,8 @@ public void testCompressorNotReturnSameInstance() { } } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCompressorConf() throws Exception { DefaultCodec codec1 = new DefaultCodec(); Configuration conf = new Configuration(); @@ -121,28 +129,30 @@ public void testCompressorConf() throws Exception { CodecPool.returnCompressor(comp2); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDecompressorPoolCounts() { // Get two decompressors and return them Decompressor decomp1 = CodecPool.getDecompressor(codec); Decompressor decomp2 = CodecPool.getDecompressor(codec); - assertEquals(LEASE_COUNT_ERR, 2, - CodecPool.getLeasedDecompressorsCount(codec)); + assertEquals(2, + CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR); CodecPool.returnDecompressor(decomp2); - assertEquals(LEASE_COUNT_ERR, 1, - CodecPool.getLeasedDecompressorsCount(codec)); + assertEquals(1, + CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR); CodecPool.returnDecompressor(decomp1); - assertEquals(LEASE_COUNT_ERR, 0, - CodecPool.getLeasedDecompressorsCount(codec)); + assertEquals(0, + CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR); CodecPool.returnDecompressor(decomp1); - assertEquals(LEASE_COUNT_ERR, 0, - CodecPool.getLeasedCompressorsCount(codec)); + assertEquals(0, + CodecPool.getLeasedCompressorsCount(codec), LEASE_COUNT_ERR); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testMultiThreadedCompressorPool() throws InterruptedException { final int iterations = 4; ExecutorService threadpool = Executors.newFixedThreadPool(3); @@ -176,10 +186,12 @@ public Boolean call() throws Exception { threadpool.shutdown(); threadpool.awaitTermination(1000, TimeUnit.SECONDS); - assertEquals(LEASE_COUNT_ERR, 0, CodecPool.getLeasedCompressorsCount(codec)); + assertEquals(0, CodecPool.getLeasedCompressorsCount(codec), + LEASE_COUNT_ERR); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testMultiThreadedDecompressorPool() throws InterruptedException { final int iterations = 4; ExecutorService threadpool = Executors.newFixedThreadPool(3); @@ -213,11 +225,12 @@ public Boolean call() throws Exception { threadpool.shutdown(); threadpool.awaitTermination(1000, TimeUnit.SECONDS); - assertEquals(LEASE_COUNT_ERR, 0, - CodecPool.getLeasedDecompressorsCount(codec)); + assertEquals(0, + CodecPool.getLeasedDecompressorsCount(codec), LEASE_COUNT_ERR); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDecompressorNotReturnSameInstance() { Decompressor decomp = CodecPool.getDecompressor(codec); CodecPool.returnDecompressor(decomp); @@ -232,7 +245,8 @@ public void testDecompressorNotReturnSameInstance() { } } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDoNotPoolCompressorNotUseableAfterReturn() throws Exception { final GzipCodec gzipCodec = new GzipCodec(); @@ -252,7 +266,8 @@ public void testDoNotPoolCompressorNotUseableAfterReturn() throws Exception { () -> outputStream.write(1)); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDoNotPoolDecompressorNotUseableAfterReturn() throws Exception { final GzipCodec gzipCodec = new GzipCodec(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java index d56b4e1e6e652..f02a374d3550a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressionStreamReuse.java @@ -33,12 +33,12 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; public class TestCompressionStreamReuse { private static final Logger LOG = LoggerFactory @@ -165,9 +165,8 @@ private void resetStateTest(Configuration conf, int seed, int count, RandomDatum v2 = new RandomDatum(); k2.readFields(inflateIn); v2.readFields(inflateIn); - assertTrue( - "original and compressed-then-decompressed-output not equal", - k1.equals(k2) && v1.equals(v2)); + assertTrue(k1.equals(k2) && v1.equals(v2), + "original and compressed-then-decompressed-output not equal"); } LOG.info("SUCCESS! Completed checking " + count + " records"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorDecompressor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorDecompressor.java index 43cb4df1105b2..c4f73ef4743e5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorDecompressor.java @@ -26,7 +26,7 @@ import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater; import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorStream.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorStream.java index c3f10bf13caf6..9c9fcdf3e22da 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCompressorStream.java @@ -18,13 +18,14 @@ package org.apache.hadoop.io.compress; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TestCompressorStream extends CompressorStream{ +public class TestCompressorStream extends CompressorStream { private static FileOutputStream fop = null; private static File file = null; @@ -67,8 +68,8 @@ public void testClose() { catch(IOException e) { System.out.println("Expected IOException"); } - Assert.assertTrue("closed shoud be true", - ((CompressorStream)testCompressorStream).closed); + assertTrue( + ((CompressorStream)testCompressorStream).closed, "closed shoud be true"); //cleanup after test case file.delete(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestDecompressorStream.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestDecompressorStream.java index 1e9f59b7a51ee..a1d75bda25b26 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestDecompressorStream.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestDecompressorStream.java @@ -18,15 +18,15 @@ package org.apache.hadoop.io.compress; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayInputStream; import java.io.EOFException; import java.io.IOException; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestDecompressorStream { private static final String TEST_STRING = @@ -36,7 +36,7 @@ public class TestDecompressorStream { private Decompressor decompressor; private DecompressorStream decompressorStream; - @Before + @BeforeEach public void setUp() throws IOException { bytesIn = new ByteArrayInputStream(TEST_STRING.getBytes()); decompressor = new FakeDecompressor(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestGzipCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestGzipCodec.java index c8c1a4786e099..b0ee910ba2240 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestGzipCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestGzipCodec.java @@ -32,10 +32,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; /** * Verify resettable compressor. @@ -49,7 +49,7 @@ public class TestGzipCodec { private static final String DATA2 = "It's baconnnn!!\n"; private GzipCodec codec = new GzipCodec(); - @Before + @BeforeEach public void setUp() { codec.setConf(new Configuration(false)); } @@ -68,7 +68,7 @@ public void testSingleCompress() throws IOException { byte[] buf = new byte[1024]; int len = cmpIn.read(buf); String result = new String(buf, 0, len, StandardCharsets.UTF_8); - assertEquals("Input must match output", DATA1, result); + assertEquals(DATA1, result, "Input must match output"); } // Test multi-member gzip file created via finish(), resetState(). @@ -97,7 +97,7 @@ public void testResetCompress() throws IOException { } result.append(new String(buf, 0, len, StandardCharsets.UTF_8)); } - assertEquals("Output must match input", DATA1 + DATA2, result.toString()); + assertEquals(DATA1 + DATA2, result.toString(), "Output must match input"); } // ensure all necessary methods are overwritten @@ -164,6 +164,6 @@ public void testIdempotentResetState() throws IOException { } result.append(new String(buf, 0, len, StandardCharsets.UTF_8)); } - assertEquals("Output must match input", DATA1, result.toString()); + assertEquals(DATA1, result.toString(), "Output must match input"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBZip2TextFileWriter.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBZip2TextFileWriter.java index 7d92e07f01b6a..401df759e5bf9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBZip2TextFileWriter.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBZip2TextFileWriter.java @@ -22,12 +22,12 @@ import java.io.IOException; import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter.BLOCK_SIZE; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public final class TestBZip2TextFileWriter { @@ -36,13 +36,13 @@ public final class TestBZip2TextFileWriter { private ByteArrayOutputStream rawOut; private BZip2TextFileWriter writer; - @Before + @BeforeEach public void setUp() throws Exception { rawOut = new ByteArrayOutputStream(); writer = new BZip2TextFileWriter(rawOut); } - @After + @AfterEach public void tearDown() throws Exception { rawOut = null; writer.close(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBzip2CompressorDecompressor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBzip2CompressorDecompressor.java index fae5ce6de40a4..348b7e77641c2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBzip2CompressorDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/bzip2/TestBzip2CompressorDecompressor.java @@ -21,20 +21,23 @@ import org.apache.hadoop.io.compress.bzip2.Bzip2Compressor; import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor; import org.apache.hadoop.test.MultithreadedTestUtil; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.*; import java.util.Random; -import static org.junit.Assert.*; -import static org.junit.Assume.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; public class TestBzip2CompressorDecompressor { private static final Random rnd = new Random(12345l); - @Before + @BeforeEach public void before() { assumeTrue(Bzip2Factory.isNativeBzip2Loaded(new Configuration())); } @@ -49,25 +52,24 @@ public void testCompressDecompress() { try { Bzip2Compressor compressor = new Bzip2Compressor(); Bzip2Decompressor decompressor = new Bzip2Decompressor(); - assertFalse("testBzip2CompressDecompress finished error", - compressor.finished()); + assertFalse(compressor.finished(), + "testBzip2CompressDecompress finished error"); compressor.setInput(rawData, 0, rawData.length); - assertTrue("testBzip2CompressDecompress getBytesRead before error", - compressor.getBytesRead() == 0); + assertTrue(compressor.getBytesRead() == 0, + "testBzip2CompressDecompress getBytesRead before error"); compressor.finish(); byte[] compressedResult = new byte[rawDataSize]; int cSize = compressor.compress(compressedResult, 0, rawDataSize); - assertTrue("testBzip2CompressDecompress getBytesRead after error", - compressor.getBytesRead() == rawDataSize); - assertTrue( - "testBzip2CompressDecompress compressed size no less than original size", - cSize < rawDataSize); + assertTrue(compressor.getBytesRead() == rawDataSize, + "testBzip2CompressDecompress getBytesRead after error"); + assertTrue(cSize < rawDataSize, + "testBzip2CompressDecompress compressed size no less than original size"); decompressor.setInput(compressedResult, 0, cSize); byte[] decompressedBytes = new byte[rawDataSize]; decompressor.decompress(decompressedBytes, 0, decompressedBytes.length); - assertArrayEquals("testBzip2CompressDecompress arrays not equals ", - rawData, decompressedBytes); + assertArrayEquals(rawData, decompressedBytes, + "testBzip2CompressDecompress arrays not equals "); compressor.reset(); decompressor.reset(); } catch (IOException ex) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/lz4/TestLz4CompressorDecompressor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/lz4/TestLz4CompressorDecompressor.java index 8be5ec3d3f78f..5040271ebca2b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/lz4/TestLz4CompressorDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/lz4/TestLz4CompressorDecompressor.java @@ -17,7 +17,11 @@ */ package org.apache.hadoop.io.compress.lz4; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; @@ -41,8 +45,7 @@ import org.apache.hadoop.io.compress.lz4.Lz4Compressor; import org.apache.hadoop.io.compress.lz4.Lz4Decompressor; import org.apache.hadoop.test.MultithreadedTestUtil; -import org.junit.Test; -import static org.junit.Assume.*; +import org.junit.jupiter.api.Test; public class TestLz4CompressorDecompressor { @@ -175,13 +178,12 @@ public void testSetInputWithBytesSizeMoreThenDefaultLz4CompressorByfferSize() { try { Lz4Compressor compressor = new Lz4Compressor(); byte[] bytes = generate(BYTES_SIZE); - assertTrue("needsInput error !!!", compressor.needsInput()); + assertTrue(compressor.needsInput(), "needsInput error !!!"); compressor.setInput(bytes, 0, bytes.length); byte[] emptyBytes = new byte[BYTES_SIZE]; int csize = compressor.compress(emptyBytes, 0, bytes.length); - assertTrue( - "testSetInputWithBytesSizeMoreThenDefaultLz4CompressorByfferSize error !!!", - csize != 0); + assertTrue(csize != 0, + "testSetInputWithBytesSizeMoreThenDefaultLz4CompressorByfferSize error !!!"); } catch (Exception ex) { fail("testSetInputWithBytesSizeMoreThenDefaultLz4CompressorByfferSize ex error !!!"); } @@ -195,28 +197,27 @@ public void testCompressDecompress() { Lz4Compressor compressor = new Lz4Compressor(); try { compressor.setInput(bytes, 0, bytes.length); - assertTrue("Lz4CompressDecompress getBytesRead error !!!", - compressor.getBytesRead() > 0); - assertTrue( - "Lz4CompressDecompress getBytesWritten before compress error !!!", - compressor.getBytesWritten() == 0); + assertTrue(compressor.getBytesRead() > 0, + "Lz4CompressDecompress getBytesRead error !!!"); + assertTrue(compressor.getBytesWritten() == 0, + "Lz4CompressDecompress getBytesWritten before compress error !!!"); byte[] compressed = new byte[BYTE_SIZE]; int cSize = compressor.compress(compressed, 0, compressed.length); - assertTrue( - "Lz4CompressDecompress getBytesWritten after compress error !!!", - compressor.getBytesWritten() > 0); + assertTrue(compressor.getBytesWritten() > 0, + "Lz4CompressDecompress getBytesWritten after compress error !!!"); Lz4Decompressor decompressor = new Lz4Decompressor(); // set as input for decompressor only compressed data indicated with cSize decompressor.setInput(compressed, 0, cSize); byte[] decompressed = new byte[BYTE_SIZE]; decompressor.decompress(decompressed, 0, decompressed.length); - assertTrue("testLz4CompressDecompress finished error !!!", decompressor.finished()); + assertTrue(decompressor.finished(), "testLz4CompressDecompress finished error !!!"); assertArrayEquals(bytes, decompressed); compressor.reset(); decompressor.reset(); - assertTrue("decompressor getRemaining error !!!",decompressor.getRemaining() == 0); + assertTrue(decompressor.getRemaining() == 0, + "decompressor getRemaining error !!!"); } catch (Exception e) { fail("testLz4CompressDecompress ex error!!!"); } @@ -238,14 +239,14 @@ public void testCompressorDecompressorEmptyStreamLogic() { blockCompressorStream.close(); // check compressed output buf = bytesOut.toByteArray(); - assertEquals("empty stream compressed output size != 4", 4, buf.length); + assertEquals(4, buf.length, "empty stream compressed output size != 4"); // use compressed output as input for decompression bytesIn = new ByteArrayInputStream(buf); // create decompression stream blockDecompressorStream = new BlockDecompressorStream(bytesIn, new Lz4Decompressor(), 1024); // no byte is available because stream was closed - assertEquals("return value is not -1", -1, blockDecompressorStream.read()); + assertEquals(-1, blockDecompressorStream.read(), "return value is not -1"); } catch (Exception e) { fail("testCompressorDecompressorEmptyStreamLogic ex error !!!" + e.getMessage()); @@ -291,8 +292,8 @@ compressedDataBuffer, new Lz4Compressor(bufferSize), bufferSize, byte[] result = new byte[BYTE_SIZE]; inflateIn.read(result); - assertArrayEquals("original array not equals compress/decompressed array", result, - bytes); + assertArrayEquals(result, + bytes, "original array not equals compress/decompressed array"); } catch (IOException e) { fail("testLz4CompressorDecopressorLogicWithCompressionStreams ex error !!!"); } finally { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/snappy/TestSnappyCompressorDecompressor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/snappy/TestSnappyCompressorDecompressor.java index 93c24835f2206..d737b48bbbe1e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/snappy/TestSnappyCompressorDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/snappy/TestSnappyCompressorDecompressor.java @@ -18,9 +18,10 @@ package org.apache.hadoop.io.compress.snappy; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -42,9 +43,8 @@ import org.apache.hadoop.io.compress.CompressionOutputStream; import org.apache.hadoop.io.compress.snappy.SnappyDecompressor.SnappyDirectDecompressor; import org.apache.hadoop.test.MultithreadedTestUtil; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,7 +53,7 @@ public class TestSnappyCompressorDecompressor { public static final Logger LOG = LoggerFactory.getLogger(TestSnappyCompressorDecompressor.class); - @Before + @BeforeEach public void before() { } @@ -175,11 +175,10 @@ public void testSnappyCompressDecompress() throws Exception { byte[] bytes = BytesGenerator.get(BYTE_SIZE); SnappyCompressor compressor = new SnappyCompressor(); compressor.setInput(bytes, 0, bytes.length); - assertTrue("SnappyCompressDecompress getBytesRead error !!!", - compressor.getBytesRead() > 0); - assertEquals( - "SnappyCompressDecompress getBytesWritten before compress error !!!", - 0, compressor.getBytesWritten()); + assertTrue(compressor.getBytesRead() > 0, + "SnappyCompressDecompress getBytesRead error !!!"); + assertEquals(0, compressor.getBytesWritten(), + "SnappyCompressDecompress getBytesWritten before compress error !!!"); // snappy compression may increase data size. // This calculation comes from "Snappy::MaxCompressedLength(size_t)" @@ -188,9 +187,8 @@ public void testSnappyCompressDecompress() throws Exception { int cSize = compressor.compress(compressed, 0, compressed.length); LOG.info("input size: {}", BYTE_SIZE); LOG.info("compressed size: {}", cSize); - assertTrue( - "SnappyCompressDecompress getBytesWritten after compress error !!!", - compressor.getBytesWritten() > 0); + assertTrue(compressor.getBytesWritten() > 0, + "SnappyCompressDecompress getBytesWritten after compress error !!!"); SnappyDecompressor decompressor = new SnappyDecompressor(); // set as input for decompressor only compressed data indicated with cSize @@ -198,13 +196,13 @@ public void testSnappyCompressDecompress() throws Exception { byte[] decompressed = new byte[BYTE_SIZE]; decompressor.decompress(decompressed, 0, decompressed.length); - assertTrue("testSnappyCompressDecompress finished error !!!", - decompressor.finished()); - Assert.assertArrayEquals(bytes, decompressed); + assertTrue(decompressor.finished(), + "testSnappyCompressDecompress finished error !!!"); + assertArrayEquals(bytes, decompressed); compressor.reset(); decompressor.reset(); - assertEquals("decompressor getRemaining error !!!", - 0, decompressor.getRemaining()); + assertEquals(0, decompressor.getRemaining(), + "decompressor getRemaining error !!!"); } @Test @@ -223,7 +221,7 @@ public void testCompressorDecompressorEmptyStreamLogic() { // check compressed output buf = bytesOut.toByteArray(); - assertEquals("empty stream compressed output size != 4", 4, buf.length); + assertEquals(4, buf.length, "empty stream compressed output size != 4"); // use compressed output as input for decompression bytesIn = new ByteArrayInputStream(buf); @@ -233,7 +231,7 @@ public void testCompressorDecompressorEmptyStreamLogic() { new SnappyDecompressor(), 1024); // no byte is available because stream was closed - assertEquals("return value is not -1", -1, blockDecompressorStream.read()); + assertEquals(-1, blockDecompressorStream.read(), "return value is not -1"); } catch (Exception e) { fail("testCompressorDecompressorEmptyStreamLogic ex error !!!" + e.getMessage()); @@ -276,8 +274,8 @@ public void testSnappyBlockCompression() { len -= bufLen; } while (len > 0); } - assertTrue("testSnappyBlockCompression error !!!", - out.toByteArray().length > 0); + assertTrue(out.toByteArray().length > 0, + "testSnappyBlockCompression error !!!"); } catch (Exception ex) { fail("testSnappyBlockCompression ex error !!!"); } @@ -397,9 +395,8 @@ deCompressedDataBuffer, new SnappyDecompressor(bufferSize), byte[] result = new byte[BYTE_SIZE]; inflateIn.read(result); - Assert.assertArrayEquals( - "original array not equals compress/decompressed array", result, - bytes); + assertArrayEquals(result, bytes, + "original array not equals compress/decompressed array"); } catch (IOException e) { fail("testSnappyCompressorDecopressorLogicWithCompressionStreams ex error !!!"); } finally { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java index 25da4fe2375ed..e72ea661e9214 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zlib/TestZlibCompressorDecompressor.java @@ -17,8 +17,12 @@ */ package org.apache.hadoop.io.compress.zlib; -import static org.junit.Assert.*; -import static org.junit.Assume.*; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -39,15 +43,15 @@ import org.apache.hadoop.io.compress.zlib.ZlibDecompressor.ZlibDirectDecompressor; import org.apache.hadoop.test.MultithreadedTestUtil; import org.apache.hadoop.util.NativeCodeLoader; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableSet; public class TestZlibCompressorDecompressor { private static final Random random = new Random(12345L); - @Before + @BeforeEach public void before() { assumeTrue(ZlibFactory.isNativeZlibLoaded(new Configuration())); } @@ -115,8 +119,8 @@ public void testZlibCompressorDecompressorWithConfiguration() { fail("testZlibCompressorDecompressorWithConfiguration ex error " + ex); } } else { - assertTrue("ZlibFactory is using native libs against request", - ZlibFactory.isNativeZlibLoaded(conf)); + assertTrue(ZlibFactory.isNativeZlibLoaded(conf), + "ZlibFactory is using native libs against request"); } } @@ -140,8 +144,8 @@ public void testZlibCompressorDecompressorWithCompressionLevels() { fail("testZlibCompressorDecompressorWithConfiguration ex error " + ex); } } else { - assertTrue("ZlibFactory is using native libs against request", - ZlibFactory.isNativeZlibLoaded(conf)); + assertTrue(ZlibFactory.isNativeZlibLoaded(conf), + "ZlibFactory is using native libs against request"); } } @@ -154,33 +158,31 @@ public void testZlibCompressDecompress() { try { ZlibCompressor compressor = new ZlibCompressor(); ZlibDecompressor decompressor = new ZlibDecompressor(); - assertFalse("testZlibCompressDecompress finished error", - compressor.finished()); + assertFalse(compressor.finished(), + "testZlibCompressDecompress finished error"); compressor.setInput(rawData, 0, rawData.length); - assertTrue("testZlibCompressDecompress getBytesRead before error", - compressor.getBytesRead() == 0); + assertTrue(compressor.getBytesRead() == 0, + "testZlibCompressDecompress getBytesRead before error"); compressor.finish(); byte[] compressedResult = new byte[rawDataSize]; int cSize = compressor.compress(compressedResult, 0, rawDataSize); - assertTrue("testZlibCompressDecompress getBytesRead ather error", - compressor.getBytesRead() == rawDataSize); - assertTrue( - "testZlibCompressDecompress compressed size no less then original size", - cSize < rawDataSize); + assertTrue(compressor.getBytesRead() == rawDataSize, + "testZlibCompressDecompress getBytesRead ather error"); + assertTrue(cSize < rawDataSize, + "testZlibCompressDecompress compressed size no less then original size"); decompressor.setInput(compressedResult, 0, cSize); byte[] decompressedBytes = new byte[rawDataSize]; decompressor.decompress(decompressedBytes, 0, decompressedBytes.length); - assertArrayEquals("testZlibCompressDecompress arrays not equals ", - rawData, decompressedBytes); + assertArrayEquals(rawData, decompressedBytes, + "testZlibCompressDecompress arrays not equals "); compressor.reset(); decompressor.reset(); } catch (IOException ex) { fail("testZlibCompressDecompress ex !!!" + ex); } } - - + private void compressDecompressLoop(int rawDataSize) throws IOException { byte[] rawData = null; rawData = generate(rawDataSize); @@ -247,8 +249,8 @@ public void testZlibCompressorDecompressorSetDictionary() { checkSetDictionaryArrayIndexOutOfBoundsException(zlibDecompressor); checkSetDictionaryArrayIndexOutOfBoundsException(zlibCompressor); } else { - assertTrue("ZlibFactory is using native libs against request", - ZlibFactory.isNativeZlibLoaded(conf)); + assertTrue(ZlibFactory.isNativeZlibLoaded(conf), + "ZlibFactory is using native libs against request"); } } @@ -256,22 +258,22 @@ public void testZlibCompressorDecompressorSetDictionary() { public void testZlibFactory() { Configuration cfg = new Configuration(); - assertTrue("testZlibFactory compression level error !!!", - CompressionLevel.DEFAULT_COMPRESSION == ZlibFactory - .getCompressionLevel(cfg)); + assertTrue(CompressionLevel.DEFAULT_COMPRESSION == ZlibFactory + .getCompressionLevel(cfg), + "testZlibFactory compression level error !!!"); - assertTrue("testZlibFactory compression strategy error !!!", - CompressionStrategy.DEFAULT_STRATEGY == ZlibFactory - .getCompressionStrategy(cfg)); + assertTrue(CompressionStrategy.DEFAULT_STRATEGY == ZlibFactory + .getCompressionStrategy(cfg), + "testZlibFactory compression strategy error !!!"); ZlibFactory.setCompressionLevel(cfg, CompressionLevel.BEST_COMPRESSION); - assertTrue("testZlibFactory compression strategy error !!!", - CompressionLevel.BEST_COMPRESSION == ZlibFactory - .getCompressionLevel(cfg)); + assertTrue(CompressionLevel.BEST_COMPRESSION == ZlibFactory + .getCompressionLevel(cfg), + "testZlibFactory compression strategy error !!!"); ZlibFactory.setCompressionStrategy(cfg, CompressionStrategy.FILTERED); - assertTrue("testZlibFactory compression strategy error !!!", - CompressionStrategy.FILTERED == ZlibFactory.getCompressionStrategy(cfg)); + assertTrue(CompressionStrategy.FILTERED == ZlibFactory.getCompressionStrategy(cfg), + "testZlibFactory compression strategy error !!!"); } @@ -344,9 +346,8 @@ private byte[] compressDecompressZlib(byte[] rawData, assertTrue(zlibDecompressor.getBytesRead() == cSize); zlibDecompressor.reset(); assertTrue(zlibDecompressor.getRemaining() == 0); - assertArrayEquals( - "testZlibCompressorDecompressorWithConfiguration array equals error", - rawData, decompressedRawData); + assertArrayEquals(rawData, decompressedRawData, + "testZlibCompressorDecompressorWithConfiguration array equals error"); return decompressedRawData; } @@ -370,10 +371,10 @@ public void testBuiltInGzipDecompressorExceptions() { fail("testBuiltInGzipDecompressorExceptions aioob error" + ex); } - assertTrue("decompresser.getBytesRead error", - decompresser.getBytesRead() == 0); - assertTrue("decompresser.getRemaining error", - decompresser.getRemaining() == 0); + assertTrue(decompresser.getBytesRead() == 0, + "decompresser.getBytesRead error"); + assertTrue(decompresser.getRemaining() == 0, + "decompresser.getRemaining error"); decompresser.reset(); decompresser.end(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java index d4c0718220a20..389efdac78005 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/zstd/TestZStandardCompressorDecompressor.java @@ -28,9 +28,9 @@ import org.apache.hadoop.io.compress.DecompressorStream; import org.apache.hadoop.io.compress.ZStandardCodec; import org.apache.hadoop.test.MultithreadedTestUtil; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; @@ -46,11 +46,12 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; public class TestZStandardCompressorDecompressor { private final static char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray(); @@ -59,7 +60,7 @@ public class TestZStandardCompressorDecompressor { private static File compressedFile; private static File uncompressedFile; - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { CONFIGURATION.setInt(IO_FILE_BUFFER_SIZE_KEY, 1024 * 64); uncompressedFile = new File(TestZStandardCompressorDecompressor.class @@ -68,7 +69,7 @@ public static void beforeClass() throws Exception { .getResource("/zstd/test_file.txt.zst").toURI()); } - @Before + @BeforeEach public void before() throws Exception { assumeTrue(ZStandardCodec.isNativeCodeLoaded()); } @@ -112,71 +113,87 @@ public void testCompressionCompressesCorrectly() throws Exception { assertArrayEquals(bytes, byteArrayOutputStream.toByteArray()); } - @Test(expected = NullPointerException.class) + @Test public void testCompressorSetInputNullPointerException() { - ZStandardCompressor compressor = new ZStandardCompressor(); - compressor.setInput(null, 0, 10); + assertThrows(NullPointerException.class, () -> { + ZStandardCompressor compressor = new ZStandardCompressor(); + compressor.setInput(null, 0, 10); + }); } //test on NullPointerException in {@code decompressor.setInput()} - @Test(expected = NullPointerException.class) + @Test public void testDecompressorSetInputNullPointerException() { - ZStandardDecompressor decompressor = - new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); - decompressor.setInput(null, 0, 10); + assertThrows(NullPointerException.class, () -> { + ZStandardDecompressor decompressor = + new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); + decompressor.setInput(null, 0, 10); + }); } //test on ArrayIndexOutOfBoundsException in {@code compressor.setInput()} - @Test(expected = ArrayIndexOutOfBoundsException.class) + @Test public void testCompressorSetInputAIOBException() { - ZStandardCompressor compressor = new ZStandardCompressor(); - compressor.setInput(new byte[] {}, -5, 10); + assertThrows(ArrayIndexOutOfBoundsException.class, () -> { + ZStandardCompressor compressor = new ZStandardCompressor(); + compressor.setInput(new byte[] {}, -5, 10); + }); } //test on ArrayIndexOutOfBoundsException in {@code decompressor.setInput()} - @Test(expected = ArrayIndexOutOfBoundsException.class) + @Test public void testDecompressorSetInputAIOUBException() { - ZStandardDecompressor decompressor = - new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); - decompressor.setInput(new byte[] {}, -5, 10); + assertThrows(ArrayIndexOutOfBoundsException.class, () -> { + ZStandardDecompressor decompressor = + new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); + decompressor.setInput(new byte[] {}, -5, 10); + }); } //test on NullPointerException in {@code compressor.compress()} - @Test(expected = NullPointerException.class) + @Test public void testCompressorCompressNullPointerException() throws Exception { - ZStandardCompressor compressor = new ZStandardCompressor(); - byte[] bytes = generate(1024 * 6); - compressor.setInput(bytes, 0, bytes.length); - compressor.compress(null, 0, 0); + assertThrows(NullPointerException.class, () -> { + ZStandardCompressor compressor = new ZStandardCompressor(); + byte[] bytes = generate(1024 * 6); + compressor.setInput(bytes, 0, bytes.length); + compressor.compress(null, 0, 0); + }); } //test on NullPointerException in {@code decompressor.decompress()} - @Test(expected = NullPointerException.class) + @Test public void testDecompressorCompressNullPointerException() throws Exception { - ZStandardDecompressor decompressor = - new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); - byte[] bytes = generate(1024 * 6); - decompressor.setInput(bytes, 0, bytes.length); - decompressor.decompress(null, 0, 0); + assertThrows(NullPointerException.class, () -> { + ZStandardDecompressor decompressor = + new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); + byte[] bytes = generate(1024 * 6); + decompressor.setInput(bytes, 0, bytes.length); + decompressor.decompress(null, 0, 0); + }); } //test on ArrayIndexOutOfBoundsException in {@code compressor.compress()} - @Test(expected = ArrayIndexOutOfBoundsException.class) + @Test public void testCompressorCompressAIOBException() throws Exception { - ZStandardCompressor compressor = new ZStandardCompressor(); - byte[] bytes = generate(1024 * 6); - compressor.setInput(bytes, 0, bytes.length); - compressor.compress(new byte[] {}, 0, -1); + assertThrows(ArrayIndexOutOfBoundsException.class, () -> { + ZStandardCompressor compressor = new ZStandardCompressor(); + byte[] bytes = generate(1024 * 6); + compressor.setInput(bytes, 0, bytes.length); + compressor.compress(new byte[] {}, 0, -1); + }); } //test on ArrayIndexOutOfBoundsException in decompressor.decompress() - @Test(expected = ArrayIndexOutOfBoundsException.class) + @Test public void testDecompressorCompressAIOBException() throws Exception { - ZStandardDecompressor decompressor = - new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); - byte[] bytes = generate(1024 * 6); - decompressor.setInput(bytes, 0, bytes.length); - decompressor.decompress(new byte[] {}, 0, -1); + assertThrows(ArrayIndexOutOfBoundsException.class, () -> { + ZStandardDecompressor decompressor = + new ZStandardDecompressor(IO_FILE_BUFFER_SIZE_DEFAULT); + byte[] bytes = generate(1024 * 6); + decompressor.setInput(bytes, 0, bytes.length); + decompressor.decompress(new byte[] {}, 0, -1); + }); } // test ZStandardCompressor compressor.compress() @@ -186,7 +203,7 @@ public void testSetInputWithBytesSizeMoreThenDefaultZStandardBufferSize() int bytesSize = 1024 * 2056 + 1; ZStandardCompressor compressor = new ZStandardCompressor(); byte[] bytes = generate(bytesSize); - assertTrue("needsInput error !!!", compressor.needsInput()); + assertTrue(compressor.needsInput(), "needsInput error !!!"); compressor.setInput(bytes, 0, bytes.length); byte[] emptyBytes = new byte[bytesSize]; int cSize = compressor.compress(emptyBytes, 0, bytes.length); @@ -224,8 +241,8 @@ public void testCompressorDecompressorLogicWithCompressionStreams() byte[] result = new byte[byteSize]; inflateIn.read(result); - assertArrayEquals("original array not equals compress/decompressed array", - result, bytes); + assertArrayEquals(result, bytes, + "original array not equals compress/decompressed array"); } finally { IOUtils.closeStream(inflateIn); } @@ -281,9 +298,8 @@ public void testCompressorDecompressorWithFinish() throws Exception { byte[] result = new byte[byteSize]; inflateIn.read(result); - assertArrayEquals( - "original array not equals compress/decompressed array", bytes, - result); + assertArrayEquals(bytes, result, + "original array not equals compress/decompressed array"); } finally { IOUtils.closeStream(deflateOut); IOUtils.closeStream(inflateIn); @@ -383,16 +399,16 @@ public void testZStandardCompressDecompress() throws Exception { ZStandardCompressor compressor = new ZStandardCompressor(); ZStandardDecompressor decompressor = new ZStandardDecompressor(rawDataSize); assertTrue(compressor.needsInput()); - assertFalse("testZStandardCompressDecompress finished error", - compressor.finished()); + assertFalse(compressor.finished(), + "testZStandardCompressDecompress finished error"); compressor.setInput(rawData, 0, rawData.length); compressor.finish(); byte[] compressedResult = new byte[rawDataSize]; int cSize = compressor.compress(compressedResult, 0, rawDataSize); assertEquals(rawDataSize, compressor.getBytesRead()); - assertTrue("compressed size no less then original size", - cSize < rawDataSize); + assertTrue(cSize < rawDataSize, + "compressed size no less then original size"); decompressor.setInput(compressedResult, 0, cSize); byte[] decompressedBytes = new byte[rawDataSize]; decompressor.decompress(decompressedBytes, 0, decompressedBytes.length); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java index 3e7541b8dadea..1c4eb74d0bb82 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRawCoderMapping.java @@ -34,12 +34,12 @@ import org.apache.hadoop.io.erasurecode.rawcoder.NativeXORRawDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.XORRawErasureCoderFactory; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; /** * Test the codec to raw coder mapping. @@ -49,7 +49,7 @@ public class TestCodecRawCoderMapping { private static final int numDataUnit = 6; private static final int numParityUnit = 3; - @Before + @BeforeEach public void setup() { conf = new Configuration(); } @@ -64,20 +64,20 @@ public void testRSDefaultRawCoder() { RawErasureDecoder decoder = CodecUtil.createRawDecoder( conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions); if (ErasureCodeNative.isNativeCodeLoaded()) { - Assert.assertTrue(encoder instanceof NativeRSRawEncoder); - Assert.assertTrue(decoder instanceof NativeRSRawDecoder); + assertTrue(encoder instanceof NativeRSRawEncoder); + assertTrue(decoder instanceof NativeRSRawDecoder); } else { - Assert.assertTrue(encoder instanceof RSRawEncoder); - Assert.assertTrue(decoder instanceof RSRawDecoder); + assertTrue(encoder instanceof RSRawEncoder); + assertTrue(decoder instanceof RSRawDecoder); } // should return default raw coder of rs-legacy codec encoder = CodecUtil.createRawEncoder(conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions); - Assert.assertTrue(encoder instanceof RSLegacyRawEncoder); + assertTrue(encoder instanceof RSLegacyRawEncoder); decoder = CodecUtil.createRawDecoder(conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions); - Assert.assertTrue(decoder instanceof RSLegacyRawDecoder); + assertTrue(decoder instanceof RSLegacyRawDecoder); } @Test @@ -92,7 +92,7 @@ public void testDedicatedRawCoderKey() { try { CodecUtil.createRawEncoder(conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions); - Assert.fail(); + fail(); } catch (Exception e) { GenericTestUtils.assertExceptionContains( "Fail to create raw erasure encoder with given codec: rs", e); @@ -104,7 +104,7 @@ public void testDedicatedRawCoderKey() { try { CodecUtil.createRawEncoder(conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions); - Assert.fail(); + fail(); } catch (Exception e) { GenericTestUtils.assertExceptionContains( "Fail to create raw erasure encoder with given codec: rs", e); @@ -121,10 +121,10 @@ public void testFallbackCoders() { // should return default raw coder of rs codec RawErasureEncoder encoder = CodecUtil.createRawEncoder( conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions); - Assert.assertTrue(encoder instanceof RSRawEncoder); + assertTrue(encoder instanceof RSRawEncoder); RawErasureDecoder decoder = CodecUtil.createRawDecoder( conf, ErasureCodeConstants.RS_CODEC_NAME, coderOptions); - Assert.assertTrue(decoder instanceof RSRawDecoder); + assertTrue(decoder instanceof RSRawDecoder); } @Test @@ -134,10 +134,10 @@ public void testLegacyCodecFallback() { // should return default raw coder of rs-legacy codec RawErasureEncoder encoder = CodecUtil.createRawEncoder( conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions); - Assert.assertTrue(encoder instanceof RSLegacyRawEncoder); + assertTrue(encoder instanceof RSLegacyRawEncoder); RawErasureDecoder decoder = CodecUtil.createRawDecoder( conf, ErasureCodeConstants.RS_LEGACY_CODEC_NAME, coderOptions); - Assert.assertTrue(decoder instanceof RSLegacyRawDecoder); + assertTrue(decoder instanceof RSLegacyRawDecoder); } @Test @@ -149,10 +149,10 @@ public void testIgnoreInvalidCodec() { // should return second coder specified by IO_ERASURECODE_CODEC_CODERS RawErasureEncoder encoder = CodecUtil.createRawEncoder( conf, ErasureCodeConstants.XOR_CODEC_NAME, coderOptions); - Assert.assertTrue(encoder instanceof XORRawEncoder); + assertTrue(encoder instanceof XORRawEncoder); RawErasureDecoder decoder = CodecUtil.createRawDecoder( conf, ErasureCodeConstants.XOR_CODEC_NAME, coderOptions); - Assert.assertTrue(decoder instanceof XORRawDecoder); + assertTrue(decoder instanceof XORRawDecoder); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRegistry.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRegistry.java index 5f17024d210b0..da40f67c8e592 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRegistry.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCodecRegistry.java @@ -25,15 +25,15 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder; import org.apache.hadoop.io.erasurecode.rawcoder.XORRawErasureCoderFactory; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; import java.util.Set; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test CodecRegistry. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java index 811148464b7cb..d5bf6c081aea3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestCoderBase.java @@ -26,7 +26,7 @@ import java.util.Arrays; import java.util.Random; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test base of common utilities for tests not only raw coders but also block @@ -159,7 +159,7 @@ protected void compareAndVerify(ECChunk[] erasedChunks, byte[][] recovered = toArrays(recoveredChunks); boolean result = Arrays.deepEquals(erased, recovered); if (!result) { - assertTrue("Decoding and comparing failed.", result); + assertTrue(result, "Decoding and comparing failed."); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestECSchema.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestECSchema.java index 2a3c590ae2339..327d3d56d4c39 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestECSchema.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestECSchema.java @@ -17,23 +17,19 @@ */ package org.apache.hadoop.io.erasurecode; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import java.util.HashMap; import java.util.Map; import java.util.TreeMap; -import java.util.concurrent.TimeUnit; +@Timeout(300) public class TestECSchema { - @Rule - public Timeout globalTimeout = new Timeout(300000, TimeUnit.MILLISECONDS); - @Test public void testGoodSchema() { int numDataUnits = 6; @@ -60,7 +56,7 @@ public void testGoodSchema() { extraMap.put(extraOption, extraOptionValue); ECSchema sameSchema = new ECSchema(codec, numDataUnits, numParityUnits, extraMap); - assertEquals("Different constructors not equal", sameSchema, schema); + assertEquals(sameSchema, schema, "Different constructors not equal"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestErasureCodingEncodeAndDecode.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestErasureCodingEncodeAndDecode.java index e61f64e423f30..68daa9769c701 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestErasureCodingEncodeAndDecode.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/TestErasureCodingEncodeAndDecode.java @@ -21,11 +21,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.util.Random; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; public class TestErasureCodingEncodeAndDecode { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/codec/TestHHXORErasureCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/codec/TestHHXORErasureCodec.java index d5a338431d95d..d854da91a0941 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/codec/TestHHXORErasureCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/codec/TestHHXORErasureCodec.java @@ -21,9 +21,9 @@ import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.io.erasurecode.ErasureCodecOptions; import org.apache.hadoop.io.erasurecode.coder.ErasureCoder; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestHHXORErasureCodec { private ECSchema schema = new ECSchema("hhxor", 10, 4); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java index 753c16a4b56b4..ab1c300845319 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java @@ -26,7 +26,7 @@ import java.io.IOException; import java.lang.reflect.Constructor; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; /** * Erasure coder test base with utilities. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java index c27672a07a3f7..e52387d873bc6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java @@ -22,7 +22,7 @@ import java.io.IOException; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java index 094ed0801e0a0..9a0461ae85eb8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHXORErasureCoder.java @@ -20,12 +20,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.erasurecode.CodecUtil; import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestHHXORErasureCoder extends TestHHErasureCoderBase { - @Before + @BeforeEach public void setup() { this.encoderClass = HHXORErasureEncoder.class; this.decoderClass = HHXORErasureDecoder.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java index 726d2c1284e60..3eb8e6976be72 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestRSErasureCoder.java @@ -20,21 +20,17 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.erasurecode.CodecUtil; import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; - -import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * Test Reed-Solomon encoding and decoding. */ +@Timeout(300) public class TestRSErasureCoder extends TestErasureCoderBase { - @Rule - public Timeout globalTimeout = new Timeout(300000, TimeUnit.MILLISECONDS); - @Before + @BeforeEach public void setup() { this.encoderClass = RSErasureEncoder.class; this.decoderClass = RSErasureDecoder.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java index d1ceec8121acd..5cbae07ead791 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestXORCoder.java @@ -17,22 +17,17 @@ */ package org.apache.hadoop.io.erasurecode.coder; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; - -import java.util.concurrent.TimeUnit; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * Test XOR encoding and decoding. */ +@Timeout(300) public class TestXORCoder extends TestErasureCoderBase { - @Rule - public Timeout globalTimeout = new Timeout(300000, TimeUnit.MILLISECONDS); - - @Before + @BeforeEach public void setup() { this.encoderClass = XORErasureEncoder.class; this.decoderClass = XORErasureDecoder.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestCoderUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestCoderUtil.java index 126d605f316a2..04268a524e9ff 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestCoderUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestCoderUtil.java @@ -19,11 +19,12 @@ package org.apache.hadoop.io.erasurecode.rawcoder; import org.apache.hadoop.HadoopIllegalArgumentException; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.nio.ByteBuffer; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; /** * Test of the utility of raw erasure coder. @@ -116,9 +117,11 @@ public void testFindFirstValidInput() { assertEquals(firstValidInput, inputs[8]); } - @Test(expected = HadoopIllegalArgumentException.class) + @Test public void testNoValidInput() { - byte[][] inputs = new byte[numInputs][]; - CoderUtil.findFirstValidInput(inputs); + assertThrows(HadoopIllegalArgumentException.class, () -> { + byte[][] inputs = new byte[numInputs][]; + CoderUtil.findFirstValidInput(inputs); + }); } } \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDecodingValidator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDecodingValidator.java index 06744cccc0a54..dc141fd766a52 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDecodingValidator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDecodingValidator.java @@ -20,12 +20,9 @@ import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Assert; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import java.io.IOException; import java.util.Arrays; @@ -34,17 +31,19 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; /** * Test {@link DecodingValidator} under various decoders. */ -@RunWith(Parameterized.class) public class TestDecodingValidator extends TestRawCoderBase { private DecodingValidator validator; - @Parameterized.Parameters public static Collection data() { return Arrays.asList(new Object[][] { {RSRawErasureCoderFactory.class, 6, 3, new int[]{1}, new int[]{}}, @@ -57,7 +56,7 @@ public static Collection data() { }); } - public TestDecodingValidator( + public void initTestDecodingValidator( Class factoryClass, int numDataUnits, int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes) { this.encoderFactoryClass = factoryClass; @@ -66,13 +65,13 @@ public TestDecodingValidator( this.numParityUnits = numParityUnits; this.erasedDataIndexes = erasedDataIndexes; this.erasedParityIndexes = erasedParityIndexes; + setup(); } - @Before public void setup() { if (encoderFactoryClass == NativeRSRawErasureCoderFactory.class || encoderFactoryClass == NativeXORRawErasureCoderFactory.class) { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); } setAllowDump(false); } @@ -80,8 +79,12 @@ public void setup() { /** * Test if the same validator can process direct and non-direct buffers. */ - @Test - public void testValidate() { + @ParameterizedTest + @MethodSource("data") + public void testValidate(Class factoryClass, + int numDataUnits, int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes) { + initTestDecodingValidator(factoryClass, numDataUnits, numParityUnits, + erasedDataIndexes, erasedParityIndexes); prepare(null, numDataUnits, numParityUnits, erasedDataIndexes, erasedParityIndexes); testValidate(true); @@ -119,7 +122,7 @@ protected void performTestValidate(int chunkSize) { try { encoder.encode(dataChunks, parityChunks); } catch (Exception e) { - Assert.fail("Should not get Exception: " + e.getMessage()); + fail("Should not get Exception: " + e.getMessage()); } // decode @@ -133,7 +136,7 @@ protected void performTestValidate(int chunkSize) { try { decoder.decode(inputChunks, erasedIndexes, recoveredChunks); } catch (Exception e) { - Assert.fail("Should not get Exception: " + e.getMessage()); + fail("Should not get Exception: " + e.getMessage()); } // validate @@ -146,7 +149,7 @@ protected void performTestValidate(int chunkSize) { validator.validate(clonedInputChunks, clonedErasedIndexes, clonedRecoveredChunks); } catch (Exception e) { - Assert.fail("Should not get Exception: " + e.getMessage()); + fail("Should not get Exception: " + e.getMessage()); } // Check if input buffers' positions are moved to the end @@ -154,8 +157,8 @@ protected void performTestValidate(int chunkSize) { // Check if validator does not change recovered chunks and erased indexes verifyChunksEqual(recoveredChunks, clonedRecoveredChunks); - Assert.assertArrayEquals("Erased indexes should not be changed", - erasedIndexes, clonedErasedIndexes); + assertArrayEquals(erasedIndexes, clonedErasedIndexes, + "Erased indexes should not be changed"); // Check if validator uses correct indexes for validation List validIndexesList = @@ -167,31 +170,33 @@ protected void performTestValidate(int chunkSize) { List erasedIndexesList = IntStream.of(erasedIndexes).boxed().collect(Collectors.toList()); int newErasedIndex = validator.getNewErasedIndex(); - Assert.assertTrue( + assertTrue(newValidIndexesList.containsAll(erasedIndexesList), "Valid indexes for validation should contain" - + " erased indexes for decoding", - newValidIndexesList.containsAll(erasedIndexesList)); - Assert.assertTrue( + + " erased indexes for decoding"); + assertTrue(validIndexesList.contains(newErasedIndex), "An erased index for validation should be contained" - + " in valid indexes for decoding", - validIndexesList.contains(newErasedIndex)); - Assert.assertFalse( + + " in valid indexes for decoding"); + assertFalse(newValidIndexesList.contains(newErasedIndex), "An erased index for validation should not be contained" - + " in valid indexes for validation", - newValidIndexesList.contains(newErasedIndex)); + + " in valid indexes for validation"); } private void verifyChunksEqual(ECChunk[] chunks1, ECChunk[] chunks2) { boolean result = Arrays.deepEquals(toArrays(chunks1), toArrays(chunks2)); - assertTrue("Recovered chunks should not be changed", result); + assertTrue(result, "Recovered chunks should not be changed"); } /** * Test if validator throws {@link InvalidDecodingException} when * a decoded output buffer is polluted. */ - @Test - public void testValidateWithBadDecoding() throws IOException { + @ParameterizedTest + @MethodSource("data") + public void testValidateWithBadDecoding(Class factoryClass, + int numDataUnits, int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes) + throws IOException { + initTestDecodingValidator(factoryClass, numDataUnits, numParityUnits, + erasedDataIndexes, erasedParityIndexes); prepare(null, numDataUnits, numParityUnits, erasedDataIndexes, erasedParityIndexes); this.usingDirectBuffer = true; @@ -206,7 +211,7 @@ public void testValidateWithBadDecoding() throws IOException { try { encoder.encode(dataChunks, parityChunks); } catch (Exception e) { - Assert.fail("Should not get Exception: " + e.getMessage()); + fail("Should not get Exception: " + e.getMessage()); } // decode @@ -220,7 +225,7 @@ public void testValidateWithBadDecoding() throws IOException { try { decoder.decode(inputChunks, erasedIndexes, recoveredChunks); } catch (Exception e) { - Assert.fail("Should not get Exception: " + e.getMessage()); + fail("Should not get Exception: " + e.getMessage()); } // validate @@ -228,10 +233,28 @@ public void testValidateWithBadDecoding() throws IOException { polluteSomeChunk(recoveredChunks); try { validator.validate(inputChunks, erasedIndexes, recoveredChunks); - Assert.fail("Validation should fail due to bad decoding"); + fail("Validation should fail due to bad decoding"); } catch (InvalidDecodingException e) { String expected = "Failed to validate decoding"; GenericTestUtils.assertExceptionContains(expected, e); } } + + @ParameterizedTest + @MethodSource("data") + public void testIdempotentReleases(Class factoryClass, + int numDataUnits, int numParityUnits, int[] erasedDataIndexes, int[] erasedParityIndexes) { + initTestDecodingValidator(factoryClass, numDataUnits, numParityUnits, + erasedDataIndexes, erasedParityIndexes); + prepareCoders(true); + + for (int i = 0; i < 3; i++) { + encoder.release(); + decoder.release(); + } + } + + @Test + public void testIdempotentReleases() { + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java index b936ff8b5d4ec..bb419e7103681 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assertions.fail; + import org.apache.hadoop.io.erasurecode.ECChunk; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import java.io.IOException; import java.nio.ByteBuffer; @@ -29,7 +30,7 @@ * Test dummy raw coder. */ public class TestDummyRawCoder extends TestRawCoderBase { - @Before + @BeforeEach public void setup() { encoderFactoryClass = DummyRawErasureCoderFactory.class; decoderFactoryClass = DummyRawErasureCoderFactory.class; @@ -64,7 +65,7 @@ protected void testCoding(boolean usingDirectBuffer) { try { encoder.encode(dataChunks, parityChunks); } catch (IOException e) { - Assert.fail("Unexpected IOException: " + e.getMessage()); + fail("Unexpected IOException: " + e.getMessage()); } compareAndVerify(parityChunks, getEmptyChunks(parityChunks.length)); @@ -79,7 +80,7 @@ protected void testCoding(boolean usingDirectBuffer) { decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks); } catch (IOException e) { - Assert.fail("Unexpected IOException: " + e.getMessage()); + fail("Unexpected IOException: " + e.getMessage()); } compareAndVerify(recoveredChunks, getEmptyChunks(recoveredChunks.length)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java index d56045e78edbd..6d9f1f7baa67b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java @@ -17,19 +17,20 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import org.apache.hadoop.io.erasurecode.ErasureCodeNative; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * Test native raw Reed-solomon encoding and decoding. */ public class TestNativeRSRawCoder extends TestRSRawCoderBase { - @Before + @BeforeEach public void setup() { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); this.encoderFactoryClass = NativeRSRawErasureCoderFactory.class; this.decoderFactoryClass = NativeRSRawErasureCoderFactory.class; setAllowDump(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java index 90e94107c4502..d45a829e3d6ec 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java @@ -17,19 +17,20 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import org.apache.hadoop.io.erasurecode.ErasureCodeNative; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * Test NativeXOR encoding and decoding. */ public class TestNativeXORRawCoder extends TestXORRawCoderBase { - @Before + @BeforeEach public void setup() { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); this.encoderFactoryClass = NativeXORRawErasureCoderFactory.class; this.decoderFactoryClass = NativeXORRawErasureCoderFactory.class; setAllowDump(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java index c01aed95efaf9..5987f6bd61079 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSLegacyRawCoder.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test the legacy raw Reed-solomon coder implemented in Java. */ public class TestRSLegacyRawCoder extends TestRSRawCoderBase { - @Before + @BeforeEach public void setup() { this.encoderFactoryClass = RSLegacyRawErasureCoderFactory.class; this.decoderFactoryClass = RSLegacyRawErasureCoderFactory.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java index c613ee1d45f47..b45eb6dcf5d51 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoder.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test the new raw Reed-solomon coder implemented in Java. */ public class TestRSRawCoder extends TestRSRawCoderBase { - @Before + @BeforeEach public void setup() { this.encoderFactoryClass = RSRawErasureCoderFactory.class; this.decoderFactoryClass = RSRawErasureCoderFactory.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java index b03b051dd05ce..49dd1db2f7efe 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderBase.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Test base for raw Reed-solomon coders. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java index c39c4e0592721..e1cb9f2af5eae 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable1.java @@ -17,18 +17,19 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import org.apache.hadoop.io.erasurecode.ErasureCodeNative; -import org.junit.Assume; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test raw Reed-solomon coder implemented in Java. */ public class TestRSRawCoderInteroperable1 extends TestRSRawCoderBase { - @Before + @BeforeEach public void setup() { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); this.encoderFactoryClass = RSRawErasureCoderFactory.class; this.decoderFactoryClass = NativeRSRawErasureCoderFactory.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java index 3c97521d1b8ec..e1ffb884f97cd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRSRawCoderInteroperable2.java @@ -17,18 +17,19 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import org.apache.hadoop.io.erasurecode.ErasureCodeNative; -import org.junit.Assume; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test raw Reed-solomon coder implemented in Java. */ public class TestRSRawCoderInteroperable2 extends TestRSRawCoderBase { - @Before + @BeforeEach public void setup() { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); this.encoderFactoryClass = NativeRSRawErasureCoderFactory.class; this.decoderFactoryClass = RSRawErasureCoderFactory.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java index eb63494507eaf..28b2936f20f3c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; + import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; import org.apache.hadoop.io.erasurecode.TestCoderBase; import org.apache.hadoop.test.LambdaTestUtils; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; @@ -85,7 +87,7 @@ protected void testCodingWithBadInput(boolean usingDirectBuffer) { try { performTestCoding(baseChunkSize, false, true, false, true); - Assert.fail("Encoding test with bad input should fail"); + fail("Encoding test with bad input should fail"); } catch (Exception e) { // Expected } @@ -101,7 +103,7 @@ protected void testCodingWithBadOutput(boolean usingDirectBuffer) { try { performTestCoding(baseChunkSize, false, false, true, true); - Assert.fail("Decoding test with bad output should fail"); + fail("Decoding test with bad output should fail"); } catch (Exception e) { // Expected } @@ -133,14 +135,14 @@ void testAfterRelease() throws Exception { public void testCodingWithErasingTooMany() { try { testCoding(true); - Assert.fail("Decoding test erasing too many should fail"); + fail("Decoding test erasing too many should fail"); } catch (Exception e) { // Expected } try { testCoding(false); - Assert.fail("Decoding test erasing too many should fail"); + fail("Decoding test erasing too many should fail"); } catch (Exception e) { // Expected } @@ -182,7 +184,7 @@ private void performTestCoding(int chunkSize, boolean usingSlicedBuffer, try { encoder.encode(dataChunks, parityChunks); } catch (IOException e) { - Assert.fail("Should not get IOException: " + e.getMessage()); + fail("Should not get IOException: " + e.getMessage()); } dumpChunks("Encoded parity chunks", parityChunks); @@ -217,7 +219,7 @@ private void performTestCoding(int chunkSize, boolean usingSlicedBuffer, decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks); } catch (IOException e) { - Assert.fail("Should not get IOException: " + e.getMessage()); + fail("Should not get IOException: " + e.getMessage()); } dumpChunks("Decoded/recovered chunks", recoveredChunks); @@ -315,7 +317,7 @@ protected void testInputPosition(boolean usingDirectBuffer) { try { encoder.encode(dataChunks, parityChunks); } catch (IOException e) { - Assert.fail("Should not get IOException: " + e.getMessage()); + fail("Should not get IOException: " + e.getMessage()); } verifyBufferPositionAtEnd(dataChunks); @@ -329,7 +331,7 @@ protected void testInputPosition(boolean usingDirectBuffer) { decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks); } catch (IOException e) { - Assert.fail("Should not get IOException: " + e.getMessage()); + fail("Should not get IOException: " + e.getMessage()); } verifyBufferPositionAtEnd(inputChunks); } @@ -337,7 +339,7 @@ protected void testInputPosition(boolean usingDirectBuffer) { void verifyBufferPositionAtEnd(ECChunk[] inputChunks) { for (ECChunk chunk : inputChunks) { if (chunk != null) { - Assert.assertEquals(0, chunk.getBuffer().remaining()); + assertEquals(0, chunk.getBuffer().remaining()); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawErasureCoderBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawErasureCoderBenchmark.java index 3ba0260b1a79f..48d817bb7c3b3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawErasureCoderBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawErasureCoderBenchmark.java @@ -17,9 +17,10 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import org.apache.hadoop.io.erasurecode.ErasureCodeNative; -import org.junit.Assume; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Tests for the raw erasure coder benchmark tool. @@ -55,7 +56,7 @@ public void testRSCoder() throws Exception { @Test public void testISALCoder() throws Exception { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); // ISA-L coder RawErasureCoderBenchmark.performBench("encode", RawErasureCoderBenchmark.CODER.ISAL_CODER, 5, 300, 64); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java index b29cd4cb22872..d24ff92ab7168 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoder.java @@ -17,14 +17,14 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test pure Java XOR encoding and decoding. */ public class TestXORRawCoder extends TestXORRawCoderBase { - @Before + @BeforeEach public void setup() { this.encoderFactoryClass = XORRawErasureCoderFactory.class; this.decoderFactoryClass = XORRawErasureCoderFactory.class; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderBase.java index 27de379f0b063..2180f3e033660 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderBase.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** * Test base for raw XOR coders. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java index 5238a8601ea20..b2c8321885691 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable1.java @@ -17,18 +17,19 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import org.apache.hadoop.io.erasurecode.ErasureCodeNative; -import org.junit.Assume; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test raw XOR coder implemented in Java. */ public class TestXORRawCoderInteroperable1 extends TestXORRawCoderBase { - @Before + @BeforeEach public void setup() { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); this.encoderFactoryClass = XORRawErasureCoderFactory.class; this.decoderFactoryClass = NativeXORRawErasureCoderFactory.class; setAllowDump(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java index b835107e8498a..7bf5ec2e1f53e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestXORRawCoderInteroperable2.java @@ -17,18 +17,19 @@ */ package org.apache.hadoop.io.erasurecode.rawcoder; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import org.apache.hadoop.io.erasurecode.ErasureCodeNative; -import org.junit.Assume; -import org.junit.Before; +import org.junit.jupiter.api.BeforeEach; /** * Test raw XOR coder implemented in Java. */ public class TestXORRawCoderInteroperable2 extends TestXORRawCoderBase { - @Before + @BeforeEach public void setup() { - Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); + assumeTrue(ErasureCodeNative.isNativeCodeLoaded()); this.encoderFactoryClass = NativeXORRawErasureCoderFactory.class; this.decoderFactoryClass = XORRawErasureCoderFactory.class; setAllowDump(true); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestCompression.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestCompression.java index 6b4c698551359..c7d577290d1a3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestCompression.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestCompression.java @@ -18,20 +18,22 @@ package org.apache.hadoop.io.file.tfile; import org.apache.hadoop.test.LambdaTestUtils; -import org.junit.*; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; import java.io.IOException; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestCompression { - @BeforeClass + @BeforeAll public static void resetConfigBeforeAll() { Compression.Algorithm.LZO.conf.setBoolean("test.reload.lzo.codec", true); } - @AfterClass + @AfterAll public static void resetConfigAfterAll() { Compression.Algorithm.LZO.conf.setBoolean("test.reload.lzo.codec", false); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java index ea20fbeda3d05..34c430be73041 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFile.java @@ -32,12 +32,12 @@ import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; /** * test tfile features. @@ -51,13 +51,13 @@ public class TestTFile { private static final int largeVal = 3 * 1024 * 1024; private static final String localFormatter = "%010d"; - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); fs = FileSystem.get(conf); } - @After + @AfterEach public void tearDown() throws IOException { // do nothing } @@ -109,18 +109,19 @@ private int readAndCheckbytes(Scanner scanner, int start, int n) byte[] val = readValue(scanner); String keyStr = String.format(localFormatter, i); String valStr = value + keyStr; - assertTrue("bytes for keys do not match " + keyStr + " " - + new String(key), Arrays.equals(keyStr.getBytes(), key)); - assertTrue("bytes for vals do not match " + valStr + " " - + new String(val), Arrays.equals( - valStr.getBytes(), val)); + assertTrue(Arrays.equals(keyStr.getBytes(), key), + "bytes for keys do not match " + keyStr + " " + + new String(key)); + assertTrue(Arrays.equals( + valStr.getBytes(), val), "bytes for vals do not match " + valStr + " " + + new String(val)); assertTrue(scanner.advance()); key = readKey(scanner); val = readValue(scanner); - assertTrue("bytes for keys do not match", Arrays.equals( - keyStr.getBytes(), key)); - assertTrue("bytes for vals do not match", Arrays.equals( - valStr.getBytes(), val)); + assertTrue(Arrays.equals( + keyStr.getBytes(), key), "bytes for keys do not match"); + assertTrue(Arrays.equals( + valStr.getBytes(), val), "bytes for vals do not match"); assertTrue(scanner.advance()); } return (start + n); @@ -146,12 +147,12 @@ private int readLargeRecords(Scanner scanner, int start, int n) for (int i = start; i < (start + n); i++) { byte[] key = readKey(scanner); String keyStr = String.format(localFormatter, i); - assertTrue("bytes for keys do not match", Arrays.equals( - keyStr.getBytes(), key)); + assertTrue(Arrays.equals( + keyStr.getBytes(), key), "bytes for keys do not match"); scanner.advance(); key = readKey(scanner); - assertTrue("bytes for keys do not match", Arrays.equals( - keyStr.getBytes(), key)); + assertTrue(Arrays.equals( + keyStr.getBytes(), key), "bytes for keys do not match"); scanner.advance(); } return (start + n); @@ -175,9 +176,9 @@ private void readEmptyRecords(Scanner scanner, int n) throws IOException { for (int i = 0; i < n; i++) { readKey = readKey(scanner); readValue = readValue(scanner); - assertTrue("failed to match keys", Arrays.equals(readKey, key)); - assertTrue("failed to match values", Arrays.equals(readValue, value)); - assertTrue("failed to advance cursor", scanner.advance()); + assertTrue(Arrays.equals(readKey, key), "failed to match keys"); + assertTrue(Arrays.equals(readValue, value), "failed to match values"); + assertTrue(scanner.advance(), "failed to advance cursor"); } } @@ -206,10 +207,10 @@ private int readPrepWithKnownLength(Scanner scanner, int start, int n) for (int i = start; i < (start + n); i++) { String key = String.format(localFormatter, i); byte[] read = readKey(scanner); - assertTrue("keys not equal", Arrays.equals(key.getBytes(), read)); + assertTrue(Arrays.equals(key.getBytes(), read), "keys not equal"); String value = "value" + key; read = readValue(scanner); - assertTrue("values not equal", Arrays.equals(value.getBytes(), read)); + assertTrue(Arrays.equals(value.getBytes(), read), "values not equal"); scanner.advance(); } return (start + n); @@ -235,7 +236,7 @@ private int readPrepWithUnknownLength(Scanner scanner, int start, int n) for (int i = start; i < start; i++) { String key = String.format(localFormatter, i); byte[] read = readKey(scanner); - assertTrue("keys not equal", Arrays.equals(key.getBytes(), read)); + assertTrue(Arrays.equals(key.getBytes(), read), "keys not equal"); try { read = readValue(scanner); assertTrue(false); @@ -245,7 +246,7 @@ private int readPrepWithUnknownLength(Scanner scanner, int start, int n) } String value = "value" + key; read = readLongValue(scanner, value.getBytes().length); - assertTrue("values nto equal", Arrays.equals(read, value.getBytes())); + assertTrue(Arrays.equals(read, value.getBytes()), "values nto equal"); scanner.advance(); } return (start + n); @@ -294,11 +295,11 @@ void basicWithSomeCodec(String codec) throws IOException { Scanner scanner = reader.createScanner(); readAllRecords(scanner); scanner.seekTo(getSomeKey(50)); - assertTrue("location lookup failed", scanner.seekTo(getSomeKey(50))); + assertTrue(scanner.seekTo(getSomeKey(50)), "location lookup failed"); // read the key and see if it matches byte[] readKey = readKey(scanner); - assertTrue("seeked key does not match", Arrays.equals(getSomeKey(50), - readKey)); + assertTrue(Arrays.equals(getSomeKey(50), + readKey), "seeked key does not match"); scanner.seekTo(new byte[0]); byte[] val1 = readValue(scanner); @@ -308,19 +309,19 @@ void basicWithSomeCodec(String codec) throws IOException { // check for lowerBound scanner.lowerBound(getSomeKey(50)); - assertTrue("locaton lookup failed", scanner.currentLocation - .compareTo(reader.end()) < 0); + assertTrue(scanner.currentLocation + .compareTo(reader.end()) < 0, "locaton lookup failed"); readKey = readKey(scanner); - assertTrue("seeked key does not match", Arrays.equals(readKey, - getSomeKey(50))); + assertTrue(Arrays.equals(readKey, + getSomeKey(50)), "seeked key does not match"); // check for upper bound scanner.upperBound(getSomeKey(50)); - assertTrue("location lookup failed", scanner.currentLocation - .compareTo(reader.end()) < 0); + assertTrue(scanner.currentLocation + .compareTo(reader.end()) < 0, "location lookup failed"); readKey = readKey(scanner); - assertTrue("seeked key does not match", Arrays.equals(readKey, - getSomeKey(51))); + assertTrue(Arrays.equals(readKey, + getSomeKey(51)), "seeked key does not match"); scanner.close(); // test for a range of scanner @@ -398,8 +399,8 @@ private void readNumMetablocks(Reader reader, int n) throws IOException { DataInputStream din = reader.getMetaBlock("TfileMeta" + i); byte b[] = new byte[len]; din.readFully(b); - assertTrue("faield to match metadata", Arrays.equals( - ("something to test" + i).getBytes(), b)); + assertTrue(Arrays.equals( + ("something to test" + i).getBytes(), b), "faield to match metadata"); din.close(); } } @@ -416,7 +417,7 @@ private void someReadingWithMetaBlock(Reader reader) throws IOException { } din = reader.getMetaBlock("TFileMeta100"); int read = din.read(); - assertTrue("check for status", (read == -1)); + assertTrue((read == -1), "check for status"); din.close(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java index 7051f00213457..2661ce1018620 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileByteArrays.java @@ -17,14 +17,18 @@ package org.apache.hadoop.io.file.tfile; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; import java.io.IOException; import java.util.Random; -import org.junit.Assert; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; @@ -36,9 +40,9 @@ import org.apache.hadoop.io.file.tfile.TFile.Reader.Location; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; /** * @@ -87,7 +91,7 @@ public void init(String compression, String comparator) { this.comparator = comparator; } - @Before + @BeforeEach public void setUp() throws IOException { path = new Path(ROOT, outputFile); fs = path.getFileSystem(conf); @@ -95,7 +99,7 @@ public void setUp() throws IOException { writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf); } - @After + @AfterEach public void tearDown() throws IOException { if (!skip) fs.delete(path, true); @@ -108,9 +112,9 @@ public void testNoDataEntry() throws IOException { closeOutput(); Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); - Assert.assertTrue(reader.isSorted()); + assertTrue(reader.isSorted()); Scanner scanner = reader.createScanner(); - Assert.assertTrue(scanner.atEnd()); + assertTrue(scanner.atEnd()); scanner.close(); reader.close(); } @@ -242,7 +246,7 @@ public void testLocate() throws IOException { locate(scanner, composeSortedKey(KEY, records1stBlock - 1).getBytes()); locate(scanner, composeSortedKey(KEY, records1stBlock).getBytes()); Location locX = locate(scanner, "keyX".getBytes()); - Assert.assertEquals(scanner.endLocation, locX); + assertEquals(scanner.endLocation, locX); scanner.close(); reader.close(); } @@ -254,7 +258,7 @@ public void testFailureWriterNotClosed() throws IOException { Reader reader = null; try { reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); - Assert.fail("Cannot read before closing the writer."); + fail("Cannot read before closing the writer."); } catch (IOException e) { // noop, expecting exceptions } finally { @@ -279,7 +283,7 @@ public void testFailureWriteMetaBlocksWithSameName() throws IOException { // add the same metablock try { writer.prepareMetaBlock("testX", Compression.Algorithm.GZ.getName()); - Assert.fail("Cannot create metablocks with the same name."); + fail("Cannot create metablocks with the same name."); } catch (Exception e) { // noop, expecting exceptions } @@ -302,11 +306,11 @@ public void testFailureGetNonExistentMetaBlock() throws IOException { Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); DataInputStream mb = reader.getMetaBlock("testX"); - Assert.assertNotNull(mb); + assertNotNull(mb); mb.close(); try { DataInputStream mbBad = reader.getMetaBlock("testY"); - Assert.fail("Error on handling non-existent metablocks."); + fail("Error on handling non-existent metablocks."); } catch (Exception e) { // noop, expecting exceptions } @@ -328,7 +332,7 @@ public void testFailureWriteRecordAfterMetaBlock() throws IOException { // add more key/value try { writer.append("keyY".getBytes(), "valueY".getBytes()); - Assert.fail("Cannot add key/value after start adding meta blocks."); + fail("Cannot add key/value after start adding meta blocks."); } catch (Exception e) { // noop, expecting exceptions } @@ -347,10 +351,10 @@ public void testFailureReadValueManyTimes() throws IOException { byte[] vbuf = new byte[BUF_SIZE]; int vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); - Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + 0); + assertEquals(new String(vbuf, 0, vlen), VALUE + 0); try { scanner.entry().getValue(vbuf); - Assert.fail("Cannot get the value mlutiple times."); + fail("Cannot get the value mlutiple times."); } catch (Exception e) { // noop, expecting exceptions } @@ -367,7 +371,7 @@ public void testFailureBadCompressionCodec() throws IOException { out = fs.create(path); try { writer = new Writer(out, BLOCK_SIZE, "BAD", comparator, conf); - Assert.fail("Error on handling invalid compression codecs."); + fail("Error on handling invalid compression codecs."); } catch (Exception e) { // noop, expecting exceptions // e.printStackTrace(); @@ -385,7 +389,7 @@ public void testFailureOpenEmptyFile() throws IOException { out.close(); try { new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); - Assert.fail("Error on handling empty files."); + fail("Error on handling empty files."); } catch (EOFException e) { // noop, expecting exceptions } @@ -409,7 +413,7 @@ public void testFailureOpenRandomFile() throws IOException { out.close(); try { new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); - Assert.fail("Error on handling random files."); + fail("Error on handling random files."); } catch (IOException e) { // noop, expecting exceptions } @@ -437,7 +441,7 @@ public void testFailureOutOfOrderKeys() throws IOException { try { writer.append("keyM".getBytes(), "valueM".getBytes()); writer.append("keyA".getBytes(), "valueA".getBytes()); - Assert.fail("Error on handling out of order keys."); + fail("Error on handling out of order keys."); } catch (Exception e) { // noop, expecting exceptions // e.printStackTrace(); @@ -452,7 +456,7 @@ public void testFailureNegativeOffset() throws IOException { return; try { writer.append("keyX".getBytes(), -1, 4, "valueX".getBytes(), 0, 6); - Assert.fail("Error on handling negative offset."); + fail("Error on handling negative offset."); } catch (Exception e) { // noop, expecting exceptions } @@ -469,7 +473,7 @@ public void testFailureNegativeOffset_2() throws IOException { Scanner scanner = reader.createScanner(); try { scanner.lowerBound("keyX".getBytes(), -1, 4); - Assert.fail("Error on handling negative offset."); + fail("Error on handling negative offset."); } catch (Exception e) { // noop, expecting exceptions } finally { @@ -485,7 +489,7 @@ public void testFailureNegativeLength() throws IOException { return; try { writer.append("keyX".getBytes(), 0, -1, "valueX".getBytes(), 0, 6); - Assert.fail("Error on handling negative length."); + fail("Error on handling negative length."); } catch (Exception e) { // noop, expecting exceptions } @@ -502,7 +506,7 @@ public void testFailureNegativeLength_2() throws IOException { Scanner scanner = reader.createScanner(); try { scanner.lowerBound("keyX".getBytes(), 0, -1); - Assert.fail("Error on handling negative length."); + fail("Error on handling negative length."); } catch (Exception e) { // noop, expecting exceptions } finally { @@ -525,7 +529,7 @@ public void testFailureNegativeLength_3() throws IOException { // test negative array offset try { scanner.seekTo("keyY".getBytes(), -1, 4); - Assert.fail("Failed to handle negative offset."); + fail("Failed to handle negative offset."); } catch (Exception e) { // noop, expecting exceptions } @@ -533,7 +537,7 @@ public void testFailureNegativeLength_3() throws IOException { // test negative array length try { scanner.seekTo("keyY".getBytes(), 0, -2); - Assert.fail("Failed to handle negative key length."); + fail("Failed to handle negative key length."); } catch (Exception e) { // noop, expecting exceptions } @@ -549,7 +553,7 @@ public void testFailureCompressionNotWorking() throws IOException { return; long rawDataSize = writeRecords(10 * records1stBlock, false); if (!compression.equalsIgnoreCase(Compression.Algorithm.NONE.getName())) { - Assert.assertTrue(out.getPos() < rawDataSize); + assertTrue(out.getPos() < rawDataSize); } closeOutput(); } @@ -564,7 +568,7 @@ public void testFailureFileWriteNotAt0Position() throws IOException { try { writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf); - Assert.fail("Failed to catch file write not at position 0."); + fail("Failed to catch file write not at position 0."); } catch (Exception e) { // noop, expecting exceptions } @@ -620,23 +624,23 @@ static void readRecords(FileSystem fs, Path path, int count, try { for (int nx = 0; nx < count; nx++, scanner.advance()) { - Assert.assertFalse(scanner.atEnd()); - // Assert.assertTrue(scanner.next()); + assertFalse(scanner.atEnd()); + // assertTrue(scanner.next()); byte[] kbuf = new byte[BUF_SIZE]; int klen = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf); - Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, + assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, nx)); byte[] vbuf = new byte[BUF_SIZE]; int vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); - Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + nx); + assertEquals(new String(vbuf, 0, vlen), VALUE + nx); } - Assert.assertTrue(scanner.atEnd()); - Assert.assertFalse(scanner.advance()); + assertTrue(scanner.atEnd()); + assertFalse(scanner.advance()); } finally { scanner.close(); reader.close(); @@ -647,7 +651,7 @@ private void checkBlockIndex(int recordIndex, int blockIndexExpected) throws IOE Reader reader = new Reader(fs.open(path), fs.getFileStatus(path).getLen(), conf); Scanner scanner = reader.createScanner(); scanner.seekTo(composeSortedKey(KEY, recordIndex).getBytes()); - Assert.assertEquals(blockIndexExpected, scanner.currentLocation + assertEquals(blockIndexExpected, scanner.currentLocation .getBlockIndex()); scanner.close(); reader.close(); @@ -665,12 +669,12 @@ private void readValueBeforeKey(int recordIndex) byte[] vbuf = new byte[BUF_SIZE]; int vlen = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf); - Assert.assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex); + assertEquals(new String(vbuf, 0, vlen), VALUE + recordIndex); byte[] kbuf = new byte[BUF_SIZE]; int klen = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf); - Assert.assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, + assertEquals(new String(kbuf, 0, klen), composeSortedKey(KEY, recordIndex)); } finally { scanner.close(); @@ -690,7 +694,7 @@ private void readKeyWithoutValue(int recordIndex) byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); - Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, + assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); if (scanner.advance() && !scanner.atEnd()) { @@ -698,7 +702,7 @@ private void readKeyWithoutValue(int recordIndex) byte[] kbuf2 = new byte[BUF_SIZE]; int klen2 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf2); - Assert.assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, + assertEquals(new String(kbuf2, 0, klen2), composeSortedKey(KEY, recordIndex + 1)); } } finally { @@ -718,13 +722,13 @@ private void readValueWithoutKey(int recordIndex) byte[] vbuf1 = new byte[BUF_SIZE]; int vlen1 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf1); - Assert.assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex); + assertEquals(new String(vbuf1, 0, vlen1), VALUE + recordIndex); if (scanner.advance() && !scanner.atEnd()) { byte[] vbuf2 = new byte[BUF_SIZE]; int vlen2 = scanner.entry().getValueLength(); scanner.entry().getValue(vbuf2); - Assert.assertEquals(new String(vbuf2, 0, vlen2), VALUE + assertEquals(new String(vbuf2, 0, vlen2), VALUE + (recordIndex + 1)); } @@ -743,17 +747,17 @@ private void readKeyManyTimes(int recordIndex) throws IOException { byte[] kbuf1 = new byte[BUF_SIZE]; int klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); - Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, + assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); - Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, + assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); klen1 = scanner.entry().getKeyLength(); scanner.entry().getKey(kbuf1); - Assert.assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, + assertEquals(new String(kbuf1, 0, klen1), composeSortedKey(KEY, recordIndex)); scanner.close(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java index 5a8b5b30fd4e9..4177d7362a1b9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparator2.java @@ -28,9 +28,9 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; public class TestTFileComparator2 { private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java index e46006296f74f..f349b58146196 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileComparators.java @@ -17,13 +17,13 @@ package org.apache.hadoop.io.file.tfile; -import java.io.IOException; +import static org.junit.jupiter.api.Assertions.fail; -import org.junit.Assert; +import java.io.IOException; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -58,7 +58,7 @@ public class TestTFileComparators { private int records1stBlock = 4480; private int records2ndBlock = 4263; - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -66,7 +66,7 @@ public void setUp() throws IOException { out = fs.create(path); } - @After + @AfterEach public void tearDown() throws IOException { fs.delete(path, true); } @@ -76,7 +76,7 @@ public void tearDown() throws IOException { public void testFailureBadComparatorNames() throws IOException { try { writer = new Writer(out, BLOCK_SIZE, compression, "badcmp", conf); - Assert.fail("Failed to catch unsupported comparator names"); + fail("Failed to catch unsupported comparator names"); } catch (Exception e) { // noop, expecting exceptions @@ -91,7 +91,7 @@ public void testFailureBadJClassNames() throws IOException { writer = new Writer(out, BLOCK_SIZE, compression, "jclass: some.non.existence.clazz", conf); - Assert.fail("Failed to catch unsupported comparator names"); + fail("Failed to catch unsupported comparator names"); } catch (Exception e) { // noop, expecting exceptions @@ -106,7 +106,7 @@ public void testFailureBadJClasses() throws IOException { writer = new Writer(out, BLOCK_SIZE, compression, "jclass:org.apache.hadoop.io.file.tfile.Chunk", conf); - Assert.fail("Failed to catch unsupported comparator names"); + fail("Failed to catch unsupported comparator names"); } catch (Exception e) { // noop, expecting exceptions diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java index ebb0fd163a9c5..a444ec59cc720 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileJClassComparatorByteArrays.java @@ -22,6 +22,7 @@ import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.WritableComparator; +import org.junit.jupiter.api.BeforeEach; /** * @@ -34,6 +35,7 @@ public class TestTFileJClassComparatorByteArrays extends TestTFileByteArrays { /** * Test non-compression codec, using the same test cases as in the ByteArrays. */ + @BeforeEach @Override public void setUp() throws IOException { init(Compression.Algorithm.GZ.getName(), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsByteArrays.java index 1b3a351cd75ea..3965b5b4ab7f8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsByteArrays.java @@ -21,11 +21,13 @@ import java.io.IOException; import org.apache.hadoop.io.file.tfile.Compression.Algorithm; +import org.junit.jupiter.api.BeforeEach; public class TestTFileLzoCodecsByteArrays extends TestTFileByteArrays { /** * Test LZO compression codec, using the same test cases as in the ByteArrays. */ + @BeforeEach @Override public void setUp() throws IOException { skip = !(Algorithm.LZO.isSupported()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java index 9436a7aa00424..7144ea619fd69 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileLzoCodecsStreams.java @@ -21,11 +21,13 @@ import java.io.IOException; import org.apache.hadoop.io.file.tfile.Compression.Algorithm; +import org.junit.jupiter.api.BeforeEach; public class TestTFileLzoCodecsStreams extends TestTFileStreams { /** * Test LZO compression codec, using the same test cases as in the ByteArrays. */ + @BeforeEach @Override public void setUp() throws IOException { skip = !(Algorithm.LZO.isSupported()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsByteArrays.java index 081c8273d7700..97e7cbf1a920d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsByteArrays.java @@ -17,12 +17,15 @@ package org.apache.hadoop.io.file.tfile; +import org.junit.jupiter.api.BeforeEach; + import java.io.IOException; public class TestTFileNoneCodecsByteArrays extends TestTFileByteArrays { /** * Test non-compression codec, using the same test cases as in the ByteArrays. */ + @BeforeEach @Override public void setUp() throws IOException { init(Compression.Algorithm.NONE.getName(), "memcmp", 24, 24); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java index d7a579016d527..d3f7f64b6c09e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsJClassComparatorByteArrays.java @@ -17,6 +17,8 @@ package org.apache.hadoop.io.file.tfile; +import org.junit.jupiter.api.BeforeEach; + import java.io.IOException; /** @@ -30,6 +32,7 @@ public class TestTFileNoneCodecsJClassComparatorByteArrays extends TestTFileByte /** * Test non-compression codec, using the same test cases as in the ByteArrays. */ + @BeforeEach @Override public void setUp() throws IOException { init(Compression.Algorithm.NONE.getName(), diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsStreams.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsStreams.java index e06f51968e6fd..78d77698b686d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsStreams.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileNoneCodecsStreams.java @@ -18,12 +18,15 @@ package org.apache.hadoop.io.file.tfile; +import org.junit.jupiter.api.BeforeEach; + import java.io.IOException; public class TestTFileNoneCodecsStreams extends TestTFileStreams { /** * Test non-compression codec, using the same test cases as in the ByteArrays. */ + @BeforeEach @Override public void setUp() throws IOException { init(Compression.Algorithm.NONE.getName(), "memcmp"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java index fc9273163e03b..37f7154062038 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeek.java @@ -22,9 +22,9 @@ import java.util.StringTokenizer; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.apache.commons.cli.CommandLine; @@ -60,7 +60,7 @@ public class TestTFileSeek { private DiscreteRNG keyLenGen; private KVGenerator kvGen; - @Before + @BeforeEach public void setUp() throws IOException { if (options == null) { options = new MyOptions(new String[0]); @@ -87,7 +87,7 @@ public void setUp() throws IOException { options.dictSize); } - @After + @AfterEach public void tearDown() throws IOException { fs.delete(path, true); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java index 361623cfda82d..34716720c68fb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java @@ -24,9 +24,9 @@ import java.util.StringTokenizer; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -57,7 +57,7 @@ public class TestTFileSeqFileComparison { private DateFormat formatter; byte[][] dictionary; - @Before + @BeforeEach public void setUp() throws IOException { if (options == null) { options = new MyOptions(new String[0]); @@ -84,7 +84,7 @@ private void setUpDictionary() { } } - @After + @AfterEach public void tearDown() throws IOException { // do nothing } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java index 613ae4fbcef1b..bb42914ede695 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSplit.java @@ -19,10 +19,10 @@ import java.io.IOException; import java.util.Random; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -116,10 +116,10 @@ void readRowSplits(int numSplits) throws IOException { BytesWritable value = new BytesWritable(); long x=startRec; while (!scanner.atEnd()) { - assertEquals("Incorrect RecNum returned by scanner", scanner.getRecordNum(), x); + assertEquals(scanner.getRecordNum(), x, "Incorrect RecNum returned by scanner"); scanner.entry().get(key, value); ++count; - assertEquals("Incorrect RecNum returned by scanner", scanner.getRecordNum(), x); + assertEquals(scanner.getRecordNum(), x, "Incorrect RecNum returned by scanner"); scanner.advance(); ++x; } @@ -147,34 +147,34 @@ void checkRecNums() throws IOException { end += (totalRecs / 2); end += (totalRecs / 2) + 1; - assertEquals("RecNum for offset=0 should be 0", 0, reader - .getRecordNumNear(0)); + assertEquals(0, reader.getRecordNumNear(0), + "RecNum for offset=0 should be 0"); for (long x : new long[] { fileLen, fileLen + 1, 2 * fileLen }) { - assertEquals("RecNum for offset>=fileLen should be total entries", - totalRecs, reader.getRecordNumNear(x)); + assertEquals(totalRecs, reader.getRecordNumNear(x), + "RecNum for offset>=fileLen should be total entries"); } for (long i = 0; i < 100; ++i) { - assertEquals("Locaton to RecNum conversion not symmetric", i, reader - .getRecordNumByLocation(reader.getLocationByRecordNum(i))); + assertEquals(i, reader.getRecordNumByLocation(reader.getLocationByRecordNum(i)), + "Locaton to RecNum conversion not symmetric"); } for (long i = 1; i < 100; ++i) { long x = totalRecs - i; - assertEquals("Locaton to RecNum conversion not symmetric", x, reader - .getRecordNumByLocation(reader.getLocationByRecordNum(x))); + assertEquals(x, reader.getRecordNumByLocation(reader.getLocationByRecordNum(x)), + "Locaton to RecNum conversion not symmetric"); } for (long i = begin; i < end; ++i) { - assertEquals("Locaton to RecNum conversion not symmetric", i, reader - .getRecordNumByLocation(reader.getLocationByRecordNum(i))); + assertEquals(i, reader.getRecordNumByLocation(reader.getLocationByRecordNum(i)), + "Locaton to RecNum conversion not symmetric"); } for (int i = 0; i < 1000; ++i) { long x = random.nextLong() % totalRecs; if (x < 0) x += totalRecs; - assertEquals("Locaton to RecNum conversion not symmetric", x, reader - .getRecordNumByLocation(reader.getLocationByRecordNum(x))); + assertEquals(x, reader.getRecordNumByLocation(reader.getLocationByRecordNum(x)), + "Locaton to RecNum conversion not symmetric"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java index a108408f507f6..664cb88ea2ef7 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileStreams.java @@ -22,12 +22,12 @@ import java.io.IOException; import java.util.Random; -import static org.junit.Assert.fail; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -68,7 +68,7 @@ public void init(String compression, String comparator) { this.comparator = comparator; } - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -77,7 +77,7 @@ public void setUp() throws IOException { writer = new Writer(out, BLOCK_SIZE, compression, comparator, conf); } - @After + @AfterEach public void tearDown() throws IOException { if (!skip) { try { @@ -307,7 +307,7 @@ public void testFailureCloseKeyStreamManyTimesInWriter() throws IOException { } outKey.close(); outKey.close(); - assertTrue("Multiple close should have no effect.", true); + assertTrue(true, "Multiple close should have no effect."); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java index f849d538d6d61..f7a0ad5b1f345 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileUnsortedByteArrays.java @@ -19,7 +19,7 @@ import java.io.IOException; -import org.junit.After; +import org.junit.jupiter.api.AfterEach; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; @@ -29,11 +29,11 @@ import org.apache.hadoop.io.file.tfile.TFile.Writer; import org.apache.hadoop.io.file.tfile.TFile.Reader.Scanner; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; public class TestTFileUnsortedByteArrays { private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath(); @@ -64,7 +64,7 @@ public void init(String compression, String outputFile, this.records2ndBlock = numRecords2ndBlock; } - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -78,7 +78,7 @@ public void setUp() throws IOException { closeOutput(); } - @After + @AfterEach public void tearDown() throws IOException { fs.delete(path, true); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java index b7550f9d584d2..8b18464af4150 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java @@ -21,8 +21,7 @@ import java.io.IOException; import java.util.Random; -import org.junit.After; -import org.junit.Assert; +import org.junit.jupiter.api.AfterEach; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; @@ -30,10 +29,11 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestVLong { private static String ROOT = GenericTestUtils.getTestDir().getAbsolutePath(); @@ -42,7 +42,7 @@ public class TestVLong { private Path path; private String outputFile = "TestVLong"; - @Before + @BeforeEach public void setUp() throws IOException { conf = new Configuration(); path = new Path(ROOT, outputFile); @@ -52,7 +52,7 @@ public void setUp() throws IOException { } } - @After + @AfterEach public void tearDown() throws IOException { if (fs.exists(path)) { fs.delete(path, false); @@ -66,9 +66,9 @@ public void testVLongByte() throws IOException { Utils.writeVLong(out, i); } out.close(); - Assert.assertEquals("Incorrect encoded size", (1 << Byte.SIZE) + 96, fs + assertEquals((1 << Byte.SIZE) + 96, fs .getFileStatus( - path).getLen()); + path).getLen(), "Incorrect encoded size"); FSDataInputStream in = fs.open(path); for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; ++i) { @@ -97,36 +97,35 @@ private long writeAndVerify(int shift) throws IOException { @Test public void testVLongShort() throws IOException { long size = writeAndVerify(0); - Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 2 + assertEquals((1 << Short.SIZE) * 2 + ((1 << Byte.SIZE) - 40) - * (1 << Byte.SIZE) - 128 - 32, size); + * (1 << Byte.SIZE) - 128 - 32, size, "Incorrect encoded size"); } @Test public void testVLong3Bytes() throws IOException { long size = writeAndVerify(Byte.SIZE); - Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 3 - + ((1 << Byte.SIZE) - 32) * (1 << Byte.SIZE) - 40 - 1, size); + assertEquals((1 << Short.SIZE) * 3 + + ((1 << Byte.SIZE) - 32) * (1 << Byte.SIZE) - 40 - 1, size, "Incorrect encoded size"); } @Test public void testVLong4Bytes() throws IOException { long size = writeAndVerify(Byte.SIZE * 2); - Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 4 - + ((1 << Byte.SIZE) - 16) * (1 << Byte.SIZE) - 32 - 2, size); + assertEquals((1 << Short.SIZE) * 4 + + ((1 << Byte.SIZE) - 16) * (1 << Byte.SIZE) - 32 - 2, size, "Incorrect encoded size"); } @Test public void testVLong5Bytes() throws IOException { long size = writeAndVerify(Byte.SIZE * 3); - Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) * 6 - 256 - - 16 - 3, size); + assertEquals((1 << Short.SIZE) * 6 - 256 - 16 - 3, size, "Incorrect encoded size"); } private void verifySixOrMoreBytes(int bytes) throws IOException { long size = writeAndVerify(Byte.SIZE * (bytes - 2)); - Assert.assertEquals("Incorrect encoded size", (1 << Short.SIZE) - * (bytes + 1) - 256 - bytes + 1, size); + assertEquals((1 << Short.SIZE) + * (bytes + 1) - 256 - bytes + 1, size, "Incorrect encoded size"); } @Test diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index c21fa443ddcc4..7d4f24efd52ce 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -61,11 +61,16 @@ import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assume.*; -import static org.junit.Assert.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -75,18 +80,19 @@ public class TestNativeIO { static final File TEST_DIR = GenericTestUtils.getTestDir("testnativeio"); - @Before + @BeforeEach public void checkLoaded() { assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); } - @Before + @BeforeEach public void setupTestDir() { FileUtil.fullyDelete(TEST_DIR); TEST_DIR.mkdirs(); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFstat() throws Exception { FileOutputStream fos = new FileOutputStream( new File(TEST_DIR, "testfstat")); @@ -107,8 +113,8 @@ public void testFstat() throws Exception { assertEquals(expectedOwner, owner); assertNotNull(stat.getGroup()); assertTrue(!stat.getGroup().isEmpty()); - assertEquals("Stat mode field should indicate a regular file", S_IFREG, - stat.getMode() & S_IFMT); + assertEquals(S_IFREG, + stat.getMode() & S_IFMT, "Stat mode field should indicate a regular file"); } /** @@ -117,7 +123,8 @@ public void testFstat() throws Exception { * NOTE: this test is likely to fail on RHEL 6.0 which has a non-threadsafe * implementation of getpwuid_r. */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testMultiThreadedFstat() throws Exception { assumeNotWindows(); @@ -138,8 +145,8 @@ public void run() { assertEquals(System.getProperty("user.name"), stat.getOwner()); assertNotNull(stat.getGroup()); assertTrue(!stat.getGroup().isEmpty()); - assertEquals("Stat mode field should indicate a regular file", - S_IFREG, stat.getMode() & S_IFMT); + assertEquals(S_IFREG, stat.getMode() & S_IFMT, + "Stat mode field should indicate a regular file"); } catch (Throwable t) { thrown.set(t); } @@ -160,7 +167,8 @@ public void run() { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFstatClosedFd() throws Exception { FileOutputStream fos = new FileOutputStream( new File(TEST_DIR, "testfstat2")); @@ -173,7 +181,8 @@ public void testFstatClosedFd() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testStat() throws Exception { Configuration conf = new Configuration(); FileSystem fileSystem = FileSystem.getLocal(conf).getRawFileSystem(); @@ -232,7 +241,8 @@ public void testStatOnError() throws Exception { () -> NativeIO.POSIX.getStat(testInvalidFilePath)); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testMultiThreadedStat() throws Exception { Configuration conf = new Configuration(); FileSystem fileSystem = FileSystem.getLocal(conf).getRawFileSystem(); @@ -277,15 +287,16 @@ public void testMultiThreadedStatOnError() throws Exception { executorService.shutdown(); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testSetFilePointer() throws Exception { assumeWindows(); LOG.info("Set a file pointer on Windows"); try { File testfile = new File(TEST_DIR, "testSetFilePointer"); - assertTrue("Create test subject", - testfile.exists() || testfile.createNewFile()); + assertTrue(testfile.exists() || testfile.createNewFile(), + "Create test subject"); FileWriter writer = new FileWriter(testfile); try { for (int i = 0; i < 200; i++) @@ -311,7 +322,7 @@ public void testSetFilePointer() throws Exception { FileReader reader = new FileReader(fd); try { int c = reader.read(); - assertTrue("Unexpected character: " + c, c == 'b'); + assertTrue(c == 'b', "Unexpected character: " + c); } catch (Exception readerException) { fail("Got unexpected exception: " + readerException.getMessage()); } finally { @@ -322,15 +333,16 @@ public void testSetFilePointer() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testCreateFile() throws Exception { assumeWindows(); LOG.info("Open a file on Windows with SHARE_DELETE shared mode"); try { File testfile = new File(TEST_DIR, "testCreateFile"); - assertTrue("Create test subject", - testfile.exists() || testfile.createNewFile()); + assertTrue(testfile.exists() || testfile.createNewFile(), + "Create test subject"); FileDescriptor fd = NativeIO.Windows.createFile( testfile.getCanonicalPath(), @@ -347,7 +359,7 @@ public void testCreateFile() throws Exception { File newfile = new File(TEST_DIR, "testRenamedFile"); boolean renamed = testfile.renameTo(newfile); - assertTrue("Rename failed.", renamed); + assertTrue(renamed, "Rename failed."); fin.read(); } catch (Exception e) { @@ -363,7 +375,8 @@ public void testCreateFile() throws Exception { } /** Validate access checks on Windows */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testAccess() throws Exception { assumeWindows(); @@ -437,7 +450,8 @@ public void testAccess() throws Exception { NativeIO.Windows.AccessRight.ACCESS_EXECUTE)); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testOpenMissingWithoutCreate() throws Exception { assumeNotWindows(); @@ -452,7 +466,8 @@ public void testOpenMissingWithoutCreate() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testOpenWithCreate() throws Exception { assumeNotWindows(); @@ -484,7 +499,8 @@ public void testOpenWithCreate() throws Exception { * Test that opens and closes a file 10000 times - this would crash with * "Too many open files" if we leaked fds using this access pattern. */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testFDDoesntLeak() throws IOException { assumeNotWindows(); @@ -503,7 +519,8 @@ public void testFDDoesntLeak() throws IOException { /** * Test basic chmod operation */ - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testChmod() throws Exception { assumeNotWindows(); @@ -515,8 +532,7 @@ public void testChmod() throws Exception { } File toChmod = new File(TEST_DIR, "testChmod"); - assertTrue("Create test subject", - toChmod.exists() || toChmod.mkdir()); + assertTrue(toChmod.exists() || toChmod.mkdir(), "Create test subject"); NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0777); assertPermissions(toChmod, 0777); NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0000); @@ -526,7 +542,8 @@ public void testChmod() throws Exception { } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testPosixFadvise() throws Exception { assumeNotWindows(); @@ -560,7 +577,8 @@ public void testPosixFadvise() throws Exception { } } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testSyncFileRange() throws Exception { FileOutputStream fos = new FileOutputStream( new File(TEST_DIR, "testSyncFileRange")); @@ -593,19 +611,22 @@ private void assertPermissions(File f, int expected) throws IOException { assertEquals(expected, perms.toShort()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetUserName() throws IOException { assumeNotWindows(); assertFalse(NativeIO.POSIX.getUserName(0).isEmpty()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testGetGroupName() throws IOException { assumeNotWindows(); assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty()); } - @Test (timeout = 30000) + @Test + @Timeout(value = 30) public void testRenameTo() throws Exception { final File TEST_DIR = GenericTestUtils.getTestDir("renameTest") ; assumeTrue(TEST_DIR.mkdirs()); @@ -614,20 +635,20 @@ public void testRenameTo() throws Exception { // Test attempting to rename a nonexistent file. try { NativeIO.renameTo(nonExistentFile, targetFile); - Assert.fail(); + fail(); } catch (NativeIOException e) { if (Path.WINDOWS) { - Assert.assertEquals( + assertEquals( String.format("The system cannot find the file specified.%n"), e.getMessage()); } else { - Assert.assertEquals(Errno.ENOENT, e.getErrno()); + assertEquals(Errno.ENOENT, e.getErrno()); } } // Test renaming a file to itself. It should succeed and do nothing. File sourceFile = new File(TEST_DIR, "source"); - Assert.assertTrue(sourceFile.createNewFile()); + assertTrue(sourceFile.createNewFile()); NativeIO.renameTo(sourceFile, sourceFile); // Test renaming a source to a destination. @@ -635,18 +656,18 @@ public void testRenameTo() throws Exception { // Test renaming a source to a path which uses a file as a directory. sourceFile = new File(TEST_DIR, "source"); - Assert.assertTrue(sourceFile.createNewFile()); + assertTrue(sourceFile.createNewFile()); File badTarget = new File(targetFile, "subdir"); try { NativeIO.renameTo(sourceFile, badTarget); - Assert.fail(); + fail(); } catch (NativeIOException e) { if (Path.WINDOWS) { - Assert.assertEquals( + assertEquals( String.format("The parameter is incorrect.%n"), e.getMessage()); } else { - Assert.assertEquals(Errno.ENOTDIR, e.getErrno()); + assertEquals(Errno.ENOTDIR, e.getErrno()); } } @@ -655,7 +676,8 @@ public void testRenameTo() throws Exception { NativeIO.renameTo(sourceFile, targetFile); } - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testMlock() throws Exception { assumeTrue(NativeIO.isAvailable()); final File TEST_FILE = GenericTestUtils.getTestDir("testMlockFile"); @@ -689,7 +711,7 @@ public void testMlock() throws Exception { for (int i=0; i= 0); - assertTrue("Native 0_WRONLY const not set", O_WRONLY >= 0); - assertTrue("Native 0_RDWR const not set", O_RDWR >= 0); - assertTrue("Native 0_CREAT const not set", O_CREAT >= 0); - assertTrue("Native 0_EXCL const not set", O_EXCL >= 0); - assertTrue("Native 0_NOCTTY const not set", O_NOCTTY >= 0); - assertTrue("Native 0_TRUNC const not set", O_TRUNC >= 0); - assertTrue("Native 0_APPEND const not set", O_APPEND >= 0); - assertTrue("Native 0_NONBLOCK const not set", O_NONBLOCK >= 0); - assertTrue("Native 0_SYNC const not set", O_SYNC >= 0); - assertTrue("Native S_IFMT const not set", S_IFMT >= 0); - assertTrue("Native S_IFIFO const not set", S_IFIFO >= 0); - assertTrue("Native S_IFCHR const not set", S_IFCHR >= 0); - assertTrue("Native S_IFDIR const not set", S_IFDIR >= 0); - assertTrue("Native S_IFBLK const not set", S_IFBLK >= 0); - assertTrue("Native S_IFREG const not set", S_IFREG >= 0); - assertTrue("Native S_IFLNK const not set", S_IFLNK >= 0); - assertTrue("Native S_IFSOCK const not set", S_IFSOCK >= 0); - assertTrue("Native S_ISUID const not set", S_ISUID >= 0); - assertTrue("Native S_ISGID const not set", S_ISGID >= 0); - assertTrue("Native S_ISVTX const not set", S_ISVTX >= 0); - assertTrue("Native S_IRUSR const not set", S_IRUSR >= 0); - assertTrue("Native S_IWUSR const not set", S_IWUSR >= 0); - assertTrue("Native S_IXUSR const not set", S_IXUSR >= 0); - } - - @Test (timeout=10000) + assertTrue(O_RDONLY >= 0, "Native 0_RDONLY const not set"); + assertTrue(O_WRONLY >= 0, "Native 0_WRONLY const not set"); + assertTrue(O_RDWR >= 0, "Native 0_RDWR const not set"); + assertTrue(O_CREAT >= 0, "Native 0_CREAT const not set"); + assertTrue(O_EXCL >= 0, "Native 0_EXCL const not set"); + assertTrue(O_NOCTTY >= 0, "Native 0_NOCTTY const not set"); + assertTrue(O_TRUNC >= 0, "Native 0_TRUNC const not set"); + assertTrue(O_APPEND >= 0, "Native 0_APPEND const not set"); + assertTrue(O_NONBLOCK >= 0, "Native 0_NONBLOCK const not set"); + assertTrue(O_SYNC >= 0, "Native 0_SYNC const not set"); + assertTrue(S_IFMT >= 0, "Native S_IFMT const not set"); + assertTrue(S_IFIFO >= 0, "Native S_IFIFO const not set"); + assertTrue(S_IFCHR >= 0, "Native S_IFCHR const not set"); + assertTrue(S_IFDIR >= 0, "Native S_IFDIR const not set"); + assertTrue(S_IFBLK >= 0, "Native S_IFBLK const not set"); + assertTrue(S_IFREG >= 0, "Native S_IFREG const not set"); + assertTrue(S_IFLNK >= 0, "Native S_IFLNK const not set"); + assertTrue(S_IFSOCK >= 0, "Native S_IFSOCK const not set"); + assertTrue(S_ISUID >= 0, "Native S_ISUID const not set"); + assertTrue(S_ISGID >= 0, "Native S_ISGID const not set"); + assertTrue(S_ISVTX >= 0, "Native S_ISVTX const not set"); + assertTrue(S_IRUSR >= 0, "Native S_IRUSR const not set"); + assertTrue(S_IWUSR >= 0, "Native S_IWUSR const not set"); + assertTrue(S_IXUSR >= 0, "Native S_IXUSR const not set"); + } + + @Test + @Timeout(value = 10) public void testNativeFadviseConsts() { - assumeTrue("Fadvise constants not supported", fadvisePossible); - assertTrue("Native POSIX_FADV_NORMAL const not set", - POSIX_FADV_NORMAL >= 0); - assertTrue("Native POSIX_FADV_RANDOM const not set", - POSIX_FADV_RANDOM >= 0); - assertTrue("Native POSIX_FADV_SEQUENTIAL const not set", - POSIX_FADV_SEQUENTIAL >= 0); - assertTrue("Native POSIX_FADV_WILLNEED const not set", - POSIX_FADV_WILLNEED >= 0); - assertTrue("Native POSIX_FADV_DONTNEED const not set", - POSIX_FADV_DONTNEED >= 0); - assertTrue("Native POSIX_FADV_NOREUSE const not set", - POSIX_FADV_NOREUSE >= 0); - } - - - @Test (timeout=10000) + assumeTrue(fadvisePossible, "Fadvise constants not supported"); + assertTrue(POSIX_FADV_NORMAL >= 0, + "Native POSIX_FADV_NORMAL const not set"); + assertTrue(POSIX_FADV_RANDOM >= 0, + "Native POSIX_FADV_RANDOM const not set"); + assertTrue(POSIX_FADV_SEQUENTIAL >= 0, + "Native POSIX_FADV_SEQUENTIAL const not set"); + assertTrue(POSIX_FADV_WILLNEED >= 0, + "Native POSIX_FADV_WILLNEED const not set"); + assertTrue(POSIX_FADV_DONTNEED >= 0, + "Native POSIX_FADV_DONTNEED const not set"); + assertTrue(POSIX_FADV_NOREUSE >= 0, + "Native POSIX_FADV_NOREUSE const not set"); + } + + + @Test + @Timeout(value = 10) public void testPmemCheckParameters() { assumeNotWindows("Native PMDK not supported on Windows"); // Skip testing while the build or environment does not support PMDK @@ -817,7 +844,8 @@ public void testPmemCheckParameters() { } } - @Test (timeout=10000) + @Test + @Timeout(value = 10) public void testPmemMapMultipleFiles() { assumeNotWindows("Native PMDK not supported on Windows"); // Skip testing while the build or environment does not support PMDK @@ -847,7 +875,8 @@ public void testPmemMapMultipleFiles() { } } - @Test (timeout=10000) + @Test + @Timeout(value = 10) public void testPmemMapBigFile() { assumeNotWindows("Native PMDK not supported on Windows"); // Skip testing while the build or environment does not support PMDK @@ -871,7 +900,8 @@ public void testPmemMapBigFile() { } } - @Test (timeout=10000) + @Test + @Timeout(value = 10) public void testPmemCopy() throws IOException { assumeNotWindows("Native PMDK not supported on Windows"); // Skip testing while the build or environment does not support PMDK diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIoInit.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIoInit.java index bdc295f252bf9..d44727b4b65b6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIoInit.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIoInit.java @@ -17,12 +17,13 @@ */ package org.apache.hadoop.io.nativeio; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.io.IOException; import org.apache.hadoop.fs.Path; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * Separate class to ensure forked Tests load the static blocks again. @@ -40,7 +41,8 @@ public class TestNativeIoInit { * Expected: Loading these two static blocks separately should not result in * deadlock. */ - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDeadlockLinux() throws Exception { Thread one = new Thread() { @Override @@ -60,9 +62,10 @@ public void run() { two.join(); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testDeadlockWindows() throws Exception { - assumeTrue("Expected windows", Path.WINDOWS); + assumeTrue(Path.WINDOWS, "Expected windows"); Thread one = new Thread() { @Override public void run() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java index 17be5874c5771..b0e801e1f90aa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestSharedFileDescriptorFactory.java @@ -17,15 +17,19 @@ */ package org.apache.hadoop.io.nativeio; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; + import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; -import org.junit.Assert; -import org.junit.Assume; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import org.apache.commons.lang3.SystemUtils; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -39,13 +43,14 @@ public class TestSharedFileDescriptorFactory { private static final File TEST_BASE = GenericTestUtils.getTestDir(); - @Before + @BeforeEach public void setup() throws Exception { - Assume.assumeTrue(null == + assumeTrue(null == SharedFileDescriptorFactory.getLoadingFailureReason()); } - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testReadAndWrite() throws Exception { File path = new File(TEST_BASE, "testReadAndWrite"); path.mkdirs(); @@ -57,7 +62,7 @@ public void testReadAndWrite() throws Exception { FileOutputStream outStream = new FileOutputStream(inStream.getFD()); outStream.write(101); inStream.getChannel().position(0); - Assert.assertEquals(101, inStream.read()); + assertEquals(101, inStream.read()); inStream.close(); outStream.close(); FileUtil.fullyDelete(path); @@ -69,10 +74,11 @@ static private void createTempFile(String path) throws Exception { fos.close(); } - @Test(timeout=10000) + @Test + @Timeout(value = 10) public void testCleanupRemainders() throws Exception { - Assume.assumeTrue(NativeIO.isAvailable()); - Assume.assumeTrue(SystemUtils.IS_OS_UNIX); + assumeTrue(NativeIO.isAvailable()); + assumeTrue(SystemUtils.IS_OS_UNIX); File path = new File(TEST_BASE, "testCleanupRemainders"); path.mkdirs(); String remainder1 = path.getAbsolutePath() + @@ -85,12 +91,13 @@ public void testCleanupRemainders() throws Exception { new String[] { path.getAbsolutePath() }); // creating the SharedFileDescriptorFactory should have removed // the remainders - Assert.assertFalse(new File(remainder1).exists()); - Assert.assertFalse(new File(remainder2).exists()); + assertFalse(new File(remainder1).exists()); + assertFalse(new File(remainder2).exists()); FileUtil.fullyDelete(path); } - @Test(timeout=60000) + @Test + @Timeout(value = 60) public void testDirectoryFallbacks() throws Exception { File nonExistentPath = new File(TEST_BASE, "nonexistent"); File permissionDeniedPath = new File("/"); @@ -100,7 +107,7 @@ public void testDirectoryFallbacks() throws Exception { SharedFileDescriptorFactory.create("shm_", new String[] { nonExistentPath.getAbsolutePath(), permissionDeniedPath.getAbsolutePath() }); - Assert.fail(); + fail(); } catch (IOException e) { } SharedFileDescriptorFactory factory = @@ -108,7 +115,7 @@ public void testDirectoryFallbacks() throws Exception { new String[] { nonExistentPath.getAbsolutePath(), permissionDeniedPath.getAbsolutePath(), goodPath.getAbsolutePath() } ); - Assert.assertEquals(goodPath.getAbsolutePath(), factory.getPath()); + assertEquals(goodPath.getAbsolutePath(), factory.getPath()); FileUtil.fullyDelete(goodPath); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestConnectionRetryPolicy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestConnectionRetryPolicy.java index 05a309d52b9a2..cd7d0d440f6b1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestConnectionRetryPolicy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestConnectionRetryPolicy.java @@ -18,14 +18,16 @@ package org.apache.hadoop.io.retry; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.RpcNoSuchMethodException; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * This class mainly tests behaviors of various retry policies in connection @@ -67,7 +69,8 @@ public static RetryPolicy getDefaultRetryPolicy( ""); } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testDefaultRetryPolicyEquivalence() { RetryPolicy rp1 = null; RetryPolicy rp2 = null; @@ -103,34 +106,29 @@ public void testDefaultRetryPolicyEquivalence() { /* test enabled and different specifications */ rp1 = getDefaultRetryPolicy(true, "20000,3"); rp2 = getDefaultRetryPolicy(true, "30000,4"); - assertNotEquals("should not be equal", rp1, rp2); - assertNotEquals( - "should not have the same hash code", - rp1.hashCode(), - rp2.hashCode()); + assertNotEquals(rp1, rp2, "should not be equal"); + assertNotEquals(rp1.hashCode(), rp2.hashCode(), + "should not have the same hash code"); /* test disabled and the same specifications */ rp1 = getDefaultRetryPolicy(false, "40000,5"); rp2 = getDefaultRetryPolicy(false, "40000,5"); - assertEquals("should be equal", rp1, rp2); - assertEquals( - "should have the same hash code", - rp1, rp2); + assertEquals(rp1, rp2, "should be equal"); + assertEquals(rp1, rp2, "should have the same hash code"); /* test the disabled and different specifications */ rp1 = getDefaultRetryPolicy(false, "50000,6"); rp2 = getDefaultRetryPolicy(false, "60000,7"); - assertEquals("should be equal", rp1, rp2); - assertEquals( - "should have the same hash code", - rp1, rp2); + assertEquals(rp1, rp2, "should be equal"); + assertEquals(rp1, rp2, "should have the same hash code"); } public static RetryPolicy newTryOnceThenFail() { return new RetryPolicies.TryOnceThenFail(); } - @Test(timeout = 60000) + @Test + @Timeout(value = 60) public void testTryOnceThenFailEquivalence() throws Exception { final RetryPolicy rp1 = newTryOnceThenFail(); final RetryPolicy rp2 = newTryOnceThenFail(); @@ -142,11 +140,9 @@ private void verifyRetryPolicyEquivalence(RetryPolicy[] polices) { for (int i = 0; i < polices.length; i++) { for (int j = 0; j < polices.length; j++) { if (i != j) { - assertEquals("should be equal", polices[i], polices[j]); - assertEquals( - "should have the same hash code", - polices[i].hashCode(), - polices[j].hashCode()); + assertEquals(polices[i], polices[j], "should be equal"); + assertEquals(polices[i].hashCode(), + polices[j].hashCode(), "should have the same hash code"); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java index 1a934f4ed86ed..0120dbb05a941 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestDefaultRetryPolicy.java @@ -21,31 +21,28 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RetriableException; -import org.junit.Assert; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; import java.io.IOException; -import java.util.concurrent.TimeUnit; import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test the behavior of the default retry policy. */ +@Timeout(30) public class TestDefaultRetryPolicy { - @Rule - public Timeout timeout = new Timeout(30000, TimeUnit.MILLISECONDS); /** Verify FAIL < RETRY < FAILOVER_AND_RETRY. */ @Test public void testRetryDecisionOrdering() throws Exception { - Assert.assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo( + assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo( RetryPolicy.RetryAction.RetryDecision.RETRY) < 0); - Assert.assertTrue(RetryPolicy.RetryAction.RetryDecision.RETRY.compareTo( + assertTrue(RetryPolicy.RetryAction.RetryDecision.RETRY.compareTo( RetryPolicy.RetryAction.RetryDecision.FAILOVER_AND_RETRY) < 0); - Assert.assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo( + assertTrue(RetryPolicy.RetryAction.RetryDecision.FAIL.compareTo( RetryPolicy.RetryAction.RetryDecision.FAILOVER_AND_RETRY) < 0); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java index 7d55fe1c13ca8..a541ea99fcfdf 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestFailoverProxy.java @@ -17,7 +17,9 @@ */ package org.apache.hadoop.io.retry; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.concurrent.CountDownLatch; @@ -26,7 +28,7 @@ import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException; import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.util.ThreadUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestFailoverProxy { @@ -354,8 +356,8 @@ public void testExpectedIOException() { unreliable.failsIfIdentifierDoesntMatch("no-such-identifier"); fail("Should have thrown *some* exception"); } catch (Exception e) { - assertTrue("Expected IOE but got " + e.getClass(), - e instanceof IOException); + assertTrue(e instanceof IOException, + "Expected IOE but got " + e.getClass()); } } } \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java index 59b9b13fbff55..25888d67ae2ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java @@ -26,8 +26,8 @@ import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.AccessControlException; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -43,7 +43,7 @@ import javax.security.sasl.SaslException; import static org.apache.hadoop.io.retry.RetryPolicies.*; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.ArgumentMatchers.anyInt; @@ -62,7 +62,7 @@ public class TestRetryProxy { private UnreliableImplementation unreliableImpl; private RetryAction caughtRetryAction = null; - @Before + @BeforeEach public void setUp() throws Exception { unreliableImpl = new UnreliableImplementation(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java index d9a00090689fb..15f3e2bc91898 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestSerializationFactory.java @@ -19,10 +19,10 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.BeforeClass; -import org.junit.Test; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertNotNull; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; @@ -38,7 +38,7 @@ public class TestSerializationFactory { static Configuration conf; static SerializationFactory factory; - @BeforeClass + @BeforeAll public static void setup() throws Exception { conf = new Configuration(); factory = new SerializationFactory(conf); @@ -74,21 +74,21 @@ public void testSerializationKeyIsInvalid() { @Test public void testGetSerializer() { // Test that a valid serializer class is returned when its present - assertNotNull("A valid class must be returned for default Writable SerDe", - factory.getSerializer(Writable.class)); + assertNotNull(factory.getSerializer(Writable.class), + "A valid class must be returned for default Writable SerDe"); // Test that a null is returned when none can be found. - assertNull("A null should be returned if there are no serializers found.", - factory.getSerializer(TestSerializationFactory.class)); + assertNull(factory.getSerializer(TestSerializationFactory.class), + "A null should be returned if there are no serializers found."); } @Test public void testGetDeserializer() { // Test that a valid serializer class is returned when its present - assertNotNull("A valid class must be returned for default Writable SerDe", - factory.getDeserializer(Writable.class)); + assertNotNull(factory.getDeserializer(Writable.class), + "A valid class must be returned for default Writable SerDe"); // Test that a null is returned when none can be found. - assertNull("A null should be returned if there are no deserializers found", - factory.getDeserializer(TestSerializationFactory.class)); + assertNull(factory.getDeserializer(TestSerializationFactory.class), + "A null should be returned if there are no deserializers found"); } @Test @@ -96,7 +96,7 @@ public void testSerializationKeyIsTrimmed() { Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, " org.apache.hadoop.io.serializer.WritableSerialization "); SerializationFactory factory = new SerializationFactory(conf); - assertNotNull("Valid class must be returned", - factory.getSerializer(LongWritable.class)); + assertNotNull(factory.getSerializer(LongWritable.class), + "Valid class must be returned"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java index 7ef5749bfb66b..beea0cf4bb76f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/TestWritableSerialization.java @@ -31,8 +31,9 @@ import org.apache.hadoop.io.TestGenericWritable.FooGenericWritable; import org.apache.hadoop.io.WritableComparator; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; public class TestWritableSerialization { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java index 97e281ba85ea2..dc040d16e7ea0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java @@ -18,13 +18,13 @@ package org.apache.hadoop.io.serializer.avro; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.SerializationTestUtil; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TestAvroSerialization {