diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java index 9c9b75fa76e6c..2cca43dac3e8a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/AvroTestUtil.java @@ -28,7 +28,7 @@ import org.apache.avro.reflect.ReflectDatumReader; import org.apache.avro.io.DecoderFactory; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class AvroTestUtil { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java index 2f69093d2654e..56aad813baf60 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java @@ -26,16 +26,16 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.conf.*; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; /** Support for flat files of binary key/value pairs. */ public class TestArrayFile { @@ -134,7 +134,7 @@ public void testArrayFileIteration() { FileSystem fs = FileSystem.get(conf); ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE, LongWritable.class, CompressionType.RECORD, defaultProgressable); - assertNotNull("testArrayFileIteration error !!!", writer); + assertNotNull(writer, "testArrayFileIteration error !!!"); for (int i = 0; i < SIZE; i++) writer.append(new LongWritable(i)); @@ -149,15 +149,15 @@ public void testArrayFileIteration() { assertThat(nextWritable.get()).isEqualTo(i); } - assertTrue("testArrayFileIteration seek error !!!", - reader.seek(new LongWritable(6))); + assertTrue(reader.seek(new LongWritable(6)), + "testArrayFileIteration seek error !!!"); nextWritable = (LongWritable) reader.next(nextWritable); assertThat(reader.key()).withFailMessage( "testArrayFileIteration error !!!").isEqualTo(7); assertThat(nextWritable).withFailMessage( "testArrayFileIteration error !!!").isEqualTo(new LongWritable(7)); - assertFalse("testArrayFileIteration error !!!", - reader.seek(new LongWritable(SIZE + 5))); + assertFalse(reader.seek(new LongWritable(SIZE + 5)), + "testArrayFileIteration error !!!"); reader.close(); } catch (Exception ex) { fail("testArrayFileWriterConstruction error !!!"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java index b75d1654511a7..df7aa6ce93c03 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayPrimitiveWritable.java @@ -22,11 +22,11 @@ import java.util.Arrays; import org.apache.hadoop.util.StringUtils; -import org.junit.Test; -import org.junit.Before; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.BeforeEach; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** Unit tests for {@link ArrayPrimitiveWritable} */ @@ -48,7 +48,7 @@ public class TestArrayPrimitiveWritable { final DataOutputBuffer out = new DataOutputBuffer(); final DataInputBuffer in = new DataInputBuffer(); - @Before + @BeforeEach public void resetBuffers() throws IOException { out.reset(); in.reset(); @@ -79,12 +79,11 @@ public void testMany() throws IOException { //validate data structures and values assertEquals(expectedResultSet.length, resultSet.length); for (int x = 0; x < resultSet.length; x++) { - assertEquals("ComponentType of array " + x, - expectedResultSet[x].getClass().getComponentType(), - resultSet[x].getClass().getComponentType()); + assertEquals(expectedResultSet[x].getClass().getComponentType(), + resultSet[x].getClass().getComponentType(), "ComponentType of array " + x); } - assertTrue("In and Out arrays didn't match values", - Arrays.deepEquals(expectedResultSet, resultSet)); + assertTrue(Arrays.deepEquals(expectedResultSet, resultSet), + "In and Out arrays didn't match values"); } @Test @@ -107,36 +106,35 @@ public void testObjectLabeling() throws IOException { //Read the int[] object as written by ObjectWritable, but //"going around" ObjectWritable String className = UTF8.readString(in); - assertEquals("The int[] written by ObjectWritable was not labelled as " - + "an ArrayPrimitiveWritable.Internal", - ArrayPrimitiveWritable.Internal.class.getName(), className); + assertEquals(ArrayPrimitiveWritable.Internal.class.getName(), className, + "The int[] written by ObjectWritable was not labelled as " + + "an ArrayPrimitiveWritable.Internal"); ArrayPrimitiveWritable.Internal apwi = new ArrayPrimitiveWritable.Internal(); apwi.readFields(in); - assertEquals("The ArrayPrimitiveWritable.Internal component type was corrupted", - int.class, apw.getComponentType()); - assertTrue("The int[] written by ObjectWritable as " - + "ArrayPrimitiveWritable.Internal was corrupted", - Arrays.equals(i, (int[])(apwi.get()))); + assertEquals(int.class, apw.getComponentType(), + "The ArrayPrimitiveWritable.Internal component type was corrupted"); + assertTrue(Arrays.equals(i, (int[])(apwi.get())), "The int[] written by ObjectWritable as " + + "ArrayPrimitiveWritable.Internal was corrupted"); //Read the APW object as written by ObjectWritable, but //"going around" ObjectWritable String declaredClassName = UTF8.readString(in); - assertEquals("The APW written by ObjectWritable was not labelled as " - + "declaredClass ArrayPrimitiveWritable", - ArrayPrimitiveWritable.class.getName(), declaredClassName); + assertEquals(ArrayPrimitiveWritable.class.getName(), declaredClassName, + "The APW written by ObjectWritable was not labelled as " + + "declaredClass ArrayPrimitiveWritable"); className = UTF8.readString(in); - assertEquals("The APW written by ObjectWritable was not labelled as " - + "class ArrayPrimitiveWritable", - ArrayPrimitiveWritable.class.getName(), className); + assertEquals(ArrayPrimitiveWritable.class.getName(), className, + "The APW written by ObjectWritable was not labelled as " + + "class ArrayPrimitiveWritable"); ArrayPrimitiveWritable apw2 = new ArrayPrimitiveWritable(); apw2.readFields(in); - assertEquals("The ArrayPrimitiveWritable component type was corrupted", - int.class, apw2.getComponentType()); - assertTrue("The int[] written by ObjectWritable as " - + "ArrayPrimitiveWritable was corrupted", - Arrays.equals(i, (int[])(apw2.get()))); + assertEquals(int.class, apw2.getComponentType(), + "The ArrayPrimitiveWritable component type was corrupted"); + assertTrue(Arrays.equals(i, (int[])(apw2.get())), + "The int[] written by ObjectWritable as " + + "ArrayPrimitiveWritable was corrupted"); } @Test @@ -154,13 +152,14 @@ public void testOldFormat() throws IOException { //"going around" ObjectWritable @SuppressWarnings("deprecation") String className = UTF8.readString(in); - assertEquals("The int[] written by ObjectWritable as a non-compact array " - + "was not labelled as an array of int", - i.getClass().getName(), className); + assertEquals(i.getClass().getName(), className, + "The int[] written by ObjectWritable as a non-compact array " + + "was not labelled as an array of int"); int length = in.readInt(); - assertEquals("The int[] written by ObjectWritable as a non-compact array " - + "was not expected length", i.length, length); + assertEquals(i.length, length, + "The int[] written by ObjectWritable as a non-compact array " + + "was not expected length"); int[] readValue = new int[length]; try { @@ -173,8 +172,9 @@ public void testOldFormat() throws IOException { + length + ". Got exception:\n" + StringUtils.stringifyException(e)); } - assertTrue("The int[] written by ObjectWritable as a non-compact array " - + "was corrupted.", Arrays.equals(i, readValue)); + assertTrue(Arrays.equals(i, readValue), + "The int[] written by ObjectWritable as a non-compact array " + + "was corrupted."); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java index 20d4f08612964..286a739e7fb59 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java @@ -18,13 +18,14 @@ package org.apache.hadoop.io; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import org.junit.Test; +import org.junit.jupiter.api.Test; /** Unit tests for ArrayWritable */ @@ -73,7 +74,8 @@ public void testArrayWritableToArray() { arrayWritable.set(elements); Object array = arrayWritable.toArray(); - assertTrue("TestArrayWritable testArrayWritableToArray error!!! ", array instanceof Text[]); + assertTrue(array instanceof Text[], + "TestArrayWritable testArrayWritableToArray error!!! "); Text[] destElements = (Text[]) array; for (int i = 0; i < elements.length; i++) { @@ -84,9 +86,11 @@ public void testArrayWritableToArray() { /** * test {@link ArrayWritable} constructor with null */ - @Test(expected = IllegalArgumentException.class) + @Test public void testNullArgument() { - new ArrayWritable((Class) null); + assertThrows(IllegalArgumentException.class, () -> { + new ArrayWritable((Class) null); + }); } /** @@ -96,10 +100,10 @@ public void testNullArgument() { public void testArrayWritableStringConstructor() { String[] original = { "test1", "test2", "test3" }; ArrayWritable arrayWritable = new ArrayWritable(original); - assertEquals("testArrayWritableStringConstructor class error!!!", - Text.class, arrayWritable.getValueClass()); - assertArrayEquals("testArrayWritableStringConstructor toString error!!!", - original, arrayWritable.toStrings()); + assertEquals(Text.class, arrayWritable.getValueClass(), + "testArrayWritableStringConstructor class error!!!"); + assertArrayEquals(original, arrayWritable.toStrings(), + "testArrayWritableStringConstructor toString error!!!"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java index a80f6e07b3878..526852e859733 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java @@ -18,7 +18,9 @@ package org.apache.hadoop.io; -import static org.mockito.Mockito.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; import java.io.IOException; import java.io.InputStream; @@ -42,13 +44,13 @@ import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Progressable; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import org.junit.Before; -import org.junit.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; public class TestBloomMapFile { private static final Logger LOG = @@ -59,7 +61,7 @@ public class TestBloomMapFile { private static final Path TEST_DIR = new Path(TEST_ROOT, "testfile"); private static final Path TEST_FILE = new Path(TEST_ROOT, "testfile"); - @Before + @BeforeEach public void setUp() throws Exception { LocalFileSystem fs = FileSystem.getLocal(conf); if (fs.exists(TEST_ROOT) && !fs.delete(TEST_ROOT, true)) { @@ -134,8 +136,8 @@ private void checkMembershipVaryingSizedKeys(List keys) reader = new BloomMapFile.Reader(fs, qualifiedDirName.toString(), conf); Collections.reverse(keys); for (Text key : keys) { - assertTrue("False negative for existing key " + key, - reader.probablyHasKey(key)); + assertTrue(reader.probablyHasKey(key), + "False negative for existing key " + key); } reader.close(); fs.delete(qualifiedDirName, true); @@ -171,7 +173,7 @@ public void testDeleteFile() { writer = new BloomMapFile.Writer(conf, TEST_FILE, MapFile.Writer.keyClass(IntWritable.class), MapFile.Writer.valueClass(Text.class)); - assertNotNull("testDeleteFile error !!!", writer); + assertNotNull(writer, "testDeleteFile error !!!"); writer.close(); BloomMapFile.delete(fs, TEST_FILE.toString()); } catch (Exception ex) { @@ -201,8 +203,8 @@ public void testIOExceptionInWriterConstructor() { reader = new BloomMapFile.Reader(dirNameSpy, conf, MapFile.Reader.comparator(new WritableComparator(IntWritable.class))); - assertNull("testIOExceptionInWriterConstructor error !!!", - reader.getBloomFilter()); + assertNull(reader.getBloomFilter(), + "testIOExceptionInWriterConstructor error !!!"); } catch (Exception ex) { fail("unexpect ex in testIOExceptionInWriterConstructor !!!"); } finally { @@ -232,12 +234,12 @@ public void testGetBloomMapFile() { MapFile.Reader.comparator(new WritableComparator(IntWritable.class))); for (int i = 0; i < SIZE; i++) { - assertNotNull("testGetBloomMapFile error !!!", - reader.get(new IntWritable(i), new Text())); + assertNotNull(reader.get(new IntWritable(i), new Text()), + "testGetBloomMapFile error !!!"); } - assertNull("testGetBloomMapFile error !!!", - reader.get(new IntWritable(SIZE + 5), new Text())); + assertNull(reader.get(new IntWritable(SIZE + 5), new Text()), + "testGetBloomMapFile error !!!"); } catch (Exception ex) { fail("unexpect ex in testGetBloomMapFile !!!"); } finally { @@ -258,34 +260,34 @@ public void testBloomMapFileConstructors() { writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK, defaultCodec, defaultProgress); - assertNotNull("testBloomMapFileConstructors error !!!", writer); + assertNotNull(writer, "testBloomMapFileConstructors error !!!"); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK, defaultProgress); - assertNotNull("testBloomMapFileConstructors error !!!", writer); + assertNotNull(writer, "testBloomMapFileConstructors error !!!"); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.BLOCK); - assertNotNull("testBloomMapFileConstructors error !!!", writer); + assertNotNull(writer, "testBloomMapFileConstructors error !!!"); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD, defaultCodec, defaultProgress); - assertNotNull("testBloomMapFileConstructors error !!!", writer); + assertNotNull(writer, "testBloomMapFileConstructors error !!!"); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD, defaultProgress); - assertNotNull("testBloomMapFileConstructors error !!!", writer); + assertNotNull(writer, "testBloomMapFileConstructors error !!!"); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, IntWritable.class, Text.class, CompressionType.RECORD); - assertNotNull("testBloomMapFileConstructors error !!!", writer); + assertNotNull(writer, "testBloomMapFileConstructors error !!!"); writer.close(); writer = new BloomMapFile.Writer(conf, ts, testFileName, WritableComparator.get(Text.class), Text.class); - assertNotNull("testBloomMapFileConstructors error !!!", writer); + assertNotNull(writer, "testBloomMapFileConstructors error !!!"); writer.close(); } catch (Exception ex) { fail("testBloomMapFileConstructors error !!!"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java index 23c28fbe0706d..e5e8462cb8010 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java @@ -19,8 +19,10 @@ import java.io.IOException; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; public class TestBooleanWritable { @@ -56,15 +58,24 @@ protected DataOutputBuffer writeWritable(Writable writable) */ @Test public void testCommonMethods() { - assertTrue("testCommonMethods1 error !!!", newInstance(true).equals(newInstance(true))); - assertTrue("testCommonMethods2 error !!!", newInstance(false).equals(newInstance(false))); - assertFalse("testCommonMethods3 error !!!", newInstance(false).equals(newInstance(true))); - assertTrue("testCommonMethods4 error !!!", checkHashCode(newInstance(true), newInstance(true))); - assertFalse("testCommonMethods5 error !!! ", checkHashCode(newInstance(true), newInstance(false))); - assertTrue("testCommonMethods6 error !!!", newInstance(true).compareTo(newInstance(false)) > 0 ); - assertTrue("testCommonMethods7 error !!!", newInstance(false).compareTo(newInstance(true)) < 0 ); - assertTrue("testCommonMethods8 error !!!", newInstance(false).compareTo(newInstance(false)) == 0 ); - assertEquals("testCommonMethods9 error !!!", "true", newInstance(true).toString()); + assertTrue(newInstance(true).equals(newInstance(true)), + "testCommonMethods1 error !!!"); + assertTrue(newInstance(false).equals(newInstance(false)), + "testCommonMethods2 error !!!"); + assertFalse(newInstance(false).equals(newInstance(true)), + "testCommonMethods3 error !!!"); + assertTrue(checkHashCode(newInstance(true), newInstance(true)), + "testCommonMethods4 error !!!"); + assertFalse(checkHashCode(newInstance(true), newInstance(false)), + "testCommonMethods5 error !!! "); + assertTrue(newInstance(true).compareTo(newInstance(false)) > 0, + "testCommonMethods6 error !!!" ); + assertTrue(newInstance(false).compareTo(newInstance(true)) < 0, + "testCommonMethods7 error !!!" ); + assertTrue(newInstance(false).compareTo(newInstance(false)) == 0, + "testCommonMethods8 error !!!" ); + assertEquals("true", newInstance(true).toString(), + "testCommonMethods9 error !!!"); } private boolean checkHashCode(BooleanWritable f, BooleanWritable s) { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java index 191fc6520624b..ea56af23b8b00 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBoundedByteArrayOutputStream.java @@ -18,11 +18,11 @@ package org.apache.hadoop.io; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.IOException; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Arrays; import java.util.Random; @@ -44,8 +44,8 @@ public void testBoundedStream() throws IOException { // Write to the stream, get the data back and check for contents stream.write(INPUT, 0, SIZE); - assertTrue("Array Contents Mismatch", - Arrays.equals(INPUT, stream.getBuffer())); + assertTrue(Arrays.equals(INPUT, stream.getBuffer()), + "Array Contents Mismatch"); // Try writing beyond end of buffer. Should throw an exception boolean caughtException = false; @@ -56,16 +56,16 @@ public void testBoundedStream() throws IOException { caughtException = true; } - assertTrue("Writing beyond limit did not throw an exception", - caughtException); + assertTrue(caughtException, + "Writing beyond limit did not throw an exception"); //Reset the stream and try, should succeed stream.reset(); - assertTrue("Limit did not get reset correctly", - (stream.getLimit() == SIZE)); + assertTrue((stream.getLimit() == SIZE), + "Limit did not get reset correctly"); stream.write(INPUT, 0, SIZE); - assertTrue("Array Contents Mismatch", - Arrays.equals(INPUT, stream.getBuffer())); + assertTrue(Arrays.equals(INPUT, stream.getBuffer()), + "Array Contents Mismatch"); // Try writing one more byte, should fail caughtException = false; @@ -78,8 +78,8 @@ public void testBoundedStream() throws IOException { // Reset the stream, but set a lower limit. Writing beyond // the limit should throw an exception stream.reset(SIZE - 1); - assertTrue("Limit did not get reset correctly", - (stream.getLimit() == SIZE -1)); + assertTrue((stream.getLimit() == SIZE -1), + "Limit did not get reset correctly"); caughtException = false; try { @@ -88,8 +88,8 @@ public void testBoundedStream() throws IOException { caughtException = true; } - assertTrue("Writing beyond limit did not throw an exception", - caughtException); + assertTrue(caughtException, + "Writing beyond limit did not throw an exception"); } @@ -114,8 +114,8 @@ public void testResetBuffer() throws IOException { // Write to the stream, get the data back and check for contents stream.write(INPUT, 0, SIZE); - assertTrue("Array Contents Mismatch", - Arrays.equals(INPUT, stream.getBuffer())); + assertTrue(Arrays.equals(INPUT, stream.getBuffer()), + "Array Contents Mismatch"); // Try writing beyond end of buffer. Should throw an exception boolean caughtException = false; @@ -126,17 +126,17 @@ public void testResetBuffer() throws IOException { caughtException = true; } - assertTrue("Writing beyond limit did not throw an exception", - caughtException); + assertTrue(caughtException, + "Writing beyond limit did not throw an exception"); //Reset the stream and try, should succeed byte[] newBuf = new byte[SIZE]; stream.resetBuffer(newBuf, 0, newBuf.length); - assertTrue("Limit did not get reset correctly", - (stream.getLimit() == SIZE)); + assertTrue((stream.getLimit() == SIZE), + "Limit did not get reset correctly"); stream.write(INPUT, 0, SIZE); - assertTrue("Array Contents Mismatch", - Arrays.equals(INPUT, stream.getBuffer())); + assertTrue(Arrays.equals(INPUT, stream.getBuffer()), + "Array Contents Mismatch"); // Try writing one more byte, should fail caughtException = false; @@ -145,8 +145,8 @@ public void testResetBuffer() throws IOException { } catch (Exception e) { caughtException = true; } - assertTrue("Writing beyond limit did not throw an exception", - caughtException); + assertTrue(caughtException, + "Writing beyond limit did not throw an exception"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java index 698ae32e4c176..695d02d2e1176 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.io; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; /** @@ -112,26 +112,28 @@ public void testZeroCopy() { BytesWritable zeroBuf = new BytesWritable(bytes, bytes.length); // new BytesWritable copyBuf = new BytesWritable(bytes); // old // using zero copy constructor shouldn't result in a copy - assertTrue("copy took place, backing array != array passed to constructor", - bytes == zeroBuf.getBytes()); - assertTrue("length of BW should backing byte array", zeroBuf.getLength() == bytes.length); - assertEquals("objects with same backing array should be equal", zeroBuf, copyBuf); - assertEquals("string repr of objects with same backing array should be equal", - zeroBuf.toString(), copyBuf.toString()); - assertTrue("compare order objects with same backing array should be equal", - zeroBuf.compareTo(copyBuf) == 0); - assertTrue("hash of objects with same backing array should be equal", - zeroBuf.hashCode() == copyBuf.hashCode()); + assertTrue(bytes == zeroBuf.getBytes(), + "copy took place, backing array != array passed to constructor"); + assertTrue(zeroBuf.getLength() == bytes.length, + "length of BW should backing byte array"); + assertEquals(zeroBuf, copyBuf, + "objects with same backing array should be equal"); + assertEquals(zeroBuf.toString(), copyBuf.toString(), + "string repr of objects with same backing array should be equal"); + assertTrue(zeroBuf.compareTo(copyBuf) == 0, + "compare order objects with same backing array should be equal"); + assertTrue(zeroBuf.hashCode() == copyBuf.hashCode(), + "hash of objects with same backing array should be equal"); // ensure expanding buffer is handled correctly // for buffers created with zero copy api byte[] buffer = new byte[bytes.length * 5]; zeroBuf.set(buffer, 0, buffer.length); // expand internal buffer zeroBuf.set(bytes, 0, bytes.length); // set back to normal contents - assertEquals("buffer created with (array, len) has bad contents", - zeroBuf, copyBuf); - assertTrue("buffer created with (array, len) has bad length", - zeroBuf.getLength() == copyBuf.getLength()); + assertEquals(zeroBuf, copyBuf, + "buffer created with (array, len) has bad contents"); + assertTrue(zeroBuf.getLength() == copyBuf.getLength(), + "buffer created with (array, len) has bad length"); } /** @@ -143,14 +145,16 @@ public void testObjectCommonMethods() { byte b = 0x9; ByteWritable bw = new ByteWritable(); bw.set(b); - assertTrue("testSetByteWritable error", bw.get() == b); - assertTrue("testSetByteWritable error < 0", bw.compareTo(new ByteWritable((byte)0xA)) < 0); - assertTrue("testSetByteWritable error > 0", bw.compareTo(new ByteWritable((byte)0x8)) > 0); - assertTrue("testSetByteWritable error == 0", bw.compareTo(new ByteWritable((byte)0x9)) == 0); - assertTrue("testSetByteWritable equals error !!!", bw.equals(new ByteWritable((byte)0x9))); - assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new ByteWritable((byte)0xA))); - assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new IntWritable(1))); - assertEquals("testSetByteWritable error ", "9", bw.toString()); + assertTrue(bw.get() == b, "testSetByteWritable error"); + assertTrue(bw.compareTo(new ByteWritable((byte)0xA)) < 0, "testSetByteWritable error < 0"); + assertTrue(bw.compareTo(new ByteWritable((byte)0x8)) > 0, "testSetByteWritable error > 0"); + assertTrue(bw.compareTo(new ByteWritable((byte)0x9)) == 0, "testSetByteWritable error == 0"); + assertTrue(bw.equals(new ByteWritable((byte)0x9)), "testSetByteWritable equals error !!!"); + assertTrue(! bw.equals(new ByteWritable((byte)0xA)), + "testSetByteWritable equals error !!!"); + assertTrue(! bw.equals(new IntWritable(1)), + "testSetByteWritable equals error !!!"); + assertEquals("9", bw.toString(), "testSetByteWritable error "); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDataByteBuffers.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDataByteBuffers.java index d06ebaf81e1bf..3c1c2b483463d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDataByteBuffers.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDataByteBuffers.java @@ -24,8 +24,9 @@ import java.nio.ByteBuffer; import java.util.Random; -import org.junit.Test; -import static org.junit.Assert.*; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestDataByteBuffers { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java index c15ec8caa4f6c..60e12d7bd41c3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestDefaultStringifier.java @@ -22,12 +22,12 @@ import java.util.Random; import org.apache.hadoop.conf.Configuration; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.hadoop.test.LambdaTestUtils.intercept; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestDefaultStringifier { @@ -93,8 +93,8 @@ public void testStoreLoad() throws IOException { DefaultStringifier.store(conf,text, keyName); Text claimedText = DefaultStringifier.load(conf, keyName, Text.class); - assertEquals("DefaultStringifier#load() or #store() might be flawed" - , text, claimedText); + assertEquals(text, claimedText, + "DefaultStringifier#load() or #store() might be flawed"); } @@ -114,7 +114,7 @@ public void testStoreLoadArray() throws Exception { Integer[] claimedArray = DefaultStringifier.loadArray(conf, keyName, Integer.class); for (int i = 0; i < array.length; i++) { - assertEquals("two arrays are not equal", array[i], claimedArray[i]); + assertEquals(array[i], claimedArray[i], "two arrays are not equal"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java index 11459261f5b74..6385d57d26409 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java @@ -18,10 +18,10 @@ package org.apache.hadoop.io; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; @@ -67,10 +67,9 @@ public void testSerializeAndDeserializeEmpty() throws IOException { gotException = true; } - assertTrue( + assertTrue(gotException, "Instantiation of empty EnumSetWritable with no element type class " - + "provided should throw exception.", - gotException); + + "provided should throw exception."); EnumSetWritable emptyFlagWritable = new EnumSetWritable(emptyFlag, TestEnumSet.class); @@ -95,10 +94,9 @@ public void testSerializeAndDeserializeNull() throws IOException { gotException = true; } - assertTrue( + assertTrue(gotException, "Instantiation of empty EnumSetWritable with no element type class " - + "provided should throw exception", - gotException); + + "provided should throw exception"); EnumSetWritable nullFlagWritable = new EnumSetWritable(null, TestEnumSet.class); @@ -136,13 +134,12 @@ public void testEnumSetWritableEquals() { EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class); EnumSetWritable eset2 = new EnumSetWritable( EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class); - assertTrue("testEnumSetWritableEquals error !!!", eset1.equals(eset2)); - assertFalse("testEnumSetWritableEquals error !!!", - eset1.equals(new EnumSetWritable(EnumSet.of( - TestEnumSet.APPEND, TestEnumSet.CREATE, TestEnumSet.OVERWRITE), - TestEnumSet.class))); - assertTrue("testEnumSetWritableEquals getElementType error !!!", eset1 - .getElementType().equals(TestEnumSet.class)); + assertTrue(eset1.equals(eset2), "testEnumSetWritableEquals error !!!"); + assertFalse(eset1.equals(new EnumSetWritable(EnumSet.of( + TestEnumSet.APPEND, TestEnumSet.CREATE, TestEnumSet.OVERWRITE), + TestEnumSet.class)), "testEnumSetWritableEquals error !!!"); + assertTrue(eset1.getElementType().equals(TestEnumSet.class), + "testEnumSetWritableEquals getElementType error !!!"); } /** @@ -165,8 +162,8 @@ public void testEnumSetWritableWriteRead() throws Exception { Iterator dstIter = result.iterator(); Iterator srcIter = srcSet.iterator(); while (dstIter.hasNext() && srcIter.hasNext()) { - assertEquals("testEnumSetWritableWriteRead error !!!", dstIter.next(), - srcIter.next()); + assertEquals(dstIter.next(), srcIter.next(), + "testEnumSetWritableWriteRead error !!!"); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java index 2f576441645d6..70f97a395b651 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestGenericWritable.java @@ -24,12 +24,12 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; -import org.junit.Before; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.fail; /** * TestCase for {@link GenericWritable} class. @@ -41,7 +41,7 @@ public class TestGenericWritable { public static final String CONF_TEST_KEY = "test.generic.writable"; public static final String CONF_TEST_VALUE = "dummy"; - @Before + @BeforeEach public void setUp() throws Exception { conf = new Configuration(); //set the configuration parameter @@ -100,8 +100,8 @@ public static class Baz extends Bar { public void readFields(DataInput in) throws IOException { super.readFields(in); //needs a configuration parameter - assertEquals("Configuration is not set for the wrapped object", - CONF_TEST_VALUE, getConf().get(CONF_TEST_KEY)); + assertEquals(CONF_TEST_VALUE, getConf().get(CONF_TEST_KEY), + "Configuration is not set for the wrapped object"); } @Override public void write(DataOutput out) throws IOException { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java index 51f207f97ad29..0fa638a51ebf2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestIOUtils.java @@ -18,8 +18,16 @@ package org.apache.hadoop.io; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.atMost; +import static org.mockito.Mockito.atLeastOnce; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; @@ -43,9 +51,7 @@ import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.LambdaTestUtils; -import org.junit.Assert; -import org.junit.Test; -import org.mockito.Mockito; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,109 +64,109 @@ public class TestIOUtils { @Test public void testCopyBytesShouldCloseStreamsWhenCloseIsTrue() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[1]); + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[1]); IOUtils.copyBytes(inputStream, outputStream, 1, true); - Mockito.verify(inputStream, Mockito.atLeastOnce()).close(); - Mockito.verify(outputStream, Mockito.atLeastOnce()).close(); + verify(inputStream, atLeastOnce()).close(); + verify(outputStream, atLeastOnce()).close(); } @Test public void testCopyBytesShouldCloseInputSteamWhenOutputStreamCloseThrowsException() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[1]); - Mockito.doThrow(new IOException()).when(outputStream).close(); + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[1]); + doThrow(new IOException()).when(outputStream).close(); try{ IOUtils.copyBytes(inputStream, outputStream, 1, true); } catch (IOException e) { } - Mockito.verify(inputStream, Mockito.atLeastOnce()).close(); - Mockito.verify(outputStream, Mockito.atLeastOnce()).close(); + verify(inputStream, atLeastOnce()).close(); + verify(outputStream, atLeastOnce()).close(); } @Test public void testCopyBytesShouldCloseInputSteamWhenOutputStreamCloseThrowsRunTimeException() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[1]); - Mockito.doThrow(new RuntimeException()).when(outputStream).close(); + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[1]); + doThrow(new RuntimeException()).when(outputStream).close(); try { IOUtils.copyBytes(inputStream, outputStream, 1, true); fail("Didn't throw exception"); } catch (RuntimeException e) { } - Mockito.verify(outputStream, Mockito.atLeastOnce()).close(); + verify(outputStream, atLeastOnce()).close(); } @Test public void testCopyBytesShouldCloseInputSteamWhenInputStreamCloseThrowsRunTimeException() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[1]); - Mockito.doThrow(new RuntimeException()).when(inputStream).close(); + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[1]); + doThrow(new RuntimeException()).when(inputStream).close(); try { IOUtils.copyBytes(inputStream, outputStream, 1, true); fail("Didn't throw exception"); } catch (RuntimeException e) { } - Mockito.verify(inputStream, Mockito.atLeastOnce()).close(); + verify(inputStream, atLeastOnce()).close(); } @Test public void testCopyBytesShouldNotCloseStreamsWhenCloseIsFalse() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[1]); + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[1]); IOUtils.copyBytes(inputStream, outputStream, 1, false); - Mockito.verify(inputStream, Mockito.atMost(0)).close(); - Mockito.verify(outputStream, Mockito.atMost(0)).close(); + verify(inputStream, atMost(0)).close(); + verify(outputStream, atMost(0)).close(); } @Test public void testCopyBytesWithCountShouldCloseStreamsWhenCloseIsTrue() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[4096], 0, 1); + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[4096], 0, 1); IOUtils.copyBytes(inputStream, outputStream, (long) 1, true); - Mockito.verify(inputStream, Mockito.atLeastOnce()).close(); - Mockito.verify(outputStream, Mockito.atLeastOnce()).close(); + verify(inputStream, atLeastOnce()).close(); + verify(outputStream, atLeastOnce()).close(); } @Test public void testCopyBytesWithCountShouldNotCloseStreamsWhenCloseIsFalse() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[4096], 0, 1); + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[4096], 0, 1); IOUtils.copyBytes(inputStream, outputStream, (long) 1, false); - Mockito.verify(inputStream, Mockito.atMost(0)).close(); - Mockito.verify(outputStream, Mockito.atMost(0)).close(); + verify(inputStream, atMost(0)).close(); + verify(outputStream, atMost(0)).close(); } @Test public void testCopyBytesWithCountShouldThrowOutTheStreamClosureExceptions() throws Exception { - InputStream inputStream = Mockito.mock(InputStream.class); - OutputStream outputStream = Mockito.mock(OutputStream.class); - Mockito.doReturn(-1).when(inputStream).read(new byte[4096], 0, 1); - Mockito.doThrow(new IOException("Exception in closing the stream")).when( + InputStream inputStream = mock(InputStream.class); + OutputStream outputStream = mock(OutputStream.class); + doReturn(-1).when(inputStream).read(new byte[4096], 0, 1); + doThrow(new IOException("Exception in closing the stream")).when( outputStream).close(); try { IOUtils.copyBytes(inputStream, outputStream, (long) 1, true); fail("Should throw out the exception"); } catch (IOException e) { - assertEquals("Not throwing the expected exception.", - "Exception in closing the stream", e.getMessage()); + assertEquals("Exception in closing the stream", e.getMessage(), + "Not throwing the expected exception."); } - Mockito.verify(inputStream, Mockito.atLeastOnce()).close(); - Mockito.verify(outputStream, Mockito.atLeastOnce()).close(); + verify(inputStream, atLeastOnce()).close(); + verify(outputStream, atLeastOnce()).close(); } @Test @@ -205,14 +211,14 @@ public void testWriteFully() throws IOException { @Test public void testWrappedReadForCompressedData() throws IOException { byte[] buf = new byte[2]; - InputStream mockStream = Mockito.mock(InputStream.class); - Mockito.when(mockStream.read(buf, 0, 1)).thenReturn(1); - Mockito.when(mockStream.read(buf, 0, 2)).thenThrow( + InputStream mockStream = mock(InputStream.class); + when(mockStream.read(buf, 0, 1)).thenReturn(1); + when(mockStream.read(buf, 0, 2)).thenThrow( new java.lang.InternalError()); try { - assertEquals("Check expected value", 1, - IOUtils.wrappedReadForCompressedData(mockStream, buf, 0, 1)); + assertEquals(1, IOUtils.wrappedReadForCompressedData(mockStream, buf, 0, 1), + "Check expected value"); } catch (IOException ioe) { fail("Unexpected error while reading"); } @@ -285,14 +291,14 @@ public void testListDirectory() throws IOException { List list = IOUtils.listDirectory(dir, NoEntry3Filter.INSTANCE); for (String entry : list) { - Assert.assertTrue(entries.remove(entry)); + assertTrue(entries.remove(entry)); } - Assert.assertTrue(entries.contains("entry3")); + assertTrue(entries.contains("entry3")); list = IOUtils.listDirectory(dir, null); for (String entry : list) { entries.remove(entry); } - Assert.assertTrue(entries.isEmpty()); + assertTrue(entries.isEmpty()); } finally { FileUtils.deleteDirectory(dir); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java index e3f5df046e1df..99a8fc71898df 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMD5Hash.java @@ -18,10 +18,10 @@ package org.apache.hadoop.io; -import org.junit.Test; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.ByteArrayInputStream; @@ -90,8 +90,8 @@ public void testMD5Hash() throws Exception { assertEquals(0x0102030405060708L, orderedHash.halfDigest()); assertEquals(0xfffefdfcfbfaf9f8L, backwardHash.halfDigest()); - assertTrue("hash collision", - closeHash1.hashCode() != closeHash2.hashCode()); + assertTrue(closeHash1.hashCode() != closeHash2.hashCode(), + "hash collision"); Thread t1 = new Thread() { @Override diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java index d8a22f358adaa..876fade217250 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java @@ -41,13 +41,18 @@ import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Progressable; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import static org.junit.Assert.*; - -import static org.mockito.Mockito.*; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; public class TestMapFile { private static final Logger LOG = LoggerFactory.getLogger(TestMapFile.class); @@ -56,11 +61,11 @@ public class TestMapFile { private static Configuration conf = new Configuration(); - @Before + @BeforeEach public void setup() throws Exception { LocalFileSystem fs = FileSystem.getLocal(conf); if (fs.exists(TEST_DIR) && !fs.delete(TEST_DIR, true)) { - Assert.fail("Can't clean up test root dir"); + fail("Can't clean up test root dir"); } fs.mkdirs(TEST_DIR); } @@ -183,7 +188,7 @@ public void testGetClosestOnCurrentApi() throws Exception { // Assert that null is returned if key is > last entry in mapfile. key = new Text("92"); closest = (Text) reader.getClosest(key, value); - assertNull("Not null key in testGetClosestWithNewCode", closest); + assertNull(closest, "Not null key in testGetClosestWithNewCode"); // If we were looking for the key before, we should get the last key closest = (Text) reader.getClosest(key, value, true); @@ -264,8 +269,8 @@ public void testRenameWithException() { MapFile.rename(spyFs, oldDir.toString(), newDir.toString()); fail("testRenameWithException no exception error !!!"); } catch (IOException ex) { - assertEquals("testRenameWithException invalid IOExceptionMessage !!!", - ex.getMessage(), ERROR_MESSAGE); + assertEquals(ex.getMessage(), ERROR_MESSAGE, + "testRenameWithException invalid IOExceptionMessage !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } @@ -291,8 +296,8 @@ public void testRenameWithFalse() { MapFile.rename(spyFs, oldDir.toString(), newDir.toString()); fail("testRenameWithException no exception error !!!"); } catch (IOException ex) { - assertTrue("testRenameWithFalse invalid IOExceptionMessage error !!!", ex - .getMessage().startsWith(ERROR_MESSAGE)); + assertTrue(ex.getMessage().startsWith(ERROR_MESSAGE), + "testRenameWithFalse invalid IOExceptionMessage error !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } @@ -318,8 +323,8 @@ public void testWriteWithFailDirCreation() { MapFile.Writer.valueClass(Text.class)); fail("testWriteWithFailDirCreation error !!!"); } catch (IOException ex) { - assertTrue("testWriteWithFailDirCreation ex error !!!", ex.getMessage() - .startsWith(ERROR_MESSAGE)); + assertTrue(ex.getMessage().startsWith(ERROR_MESSAGE), + "testWriteWithFailDirCreation ex error !!!"); } finally { IOUtils.cleanupWithLogger(LOG, writer); } @@ -344,8 +349,8 @@ public void testOnFinalKey() { reader = createReader(TEST_METHOD_KEY, IntWritable.class); IntWritable expectedKey = new IntWritable(0); reader.finalKey(expectedKey); - assertEquals("testOnFinalKey not same !!!", expectedKey, new IntWritable( - 9)); + assertEquals(expectedKey, new IntWritable(9), + "testOnFinalKey not same !!!"); } catch (IOException ex) { fail("testOnFinalKey error !!!"); } finally { @@ -364,10 +369,10 @@ public void testKeyValueClasses() { try { createWriter("testKeyValueClasses.mapfile", IntWritable.class, Text.class) .close(); - assertNotNull("writer key class null error !!!", - MapFile.Writer.keyClass(keyClass)); - assertNotNull("writer value class null error !!!", - MapFile.Writer.valueClass(valueClass)); + assertNotNull(MapFile.Writer.keyClass(keyClass), + "writer key class null error !!!"); + assertNotNull(MapFile.Writer.valueClass(valueClass), + "writer value class null error !!!"); } catch (IOException ex) { fail(ex.getMessage()); } @@ -446,10 +451,10 @@ public void testReaderKeyIteration() { } reader.reset(); } - assertTrue("reader seek error !!!", - reader.seek(new IntWritable(SIZE / 2))); - assertFalse("reader seek error !!!", - reader.seek(new IntWritable(SIZE * 2))); + assertTrue(reader.seek(new IntWritable(SIZE / 2)), + "reader seek error !!!"); + assertFalse(reader.seek(new IntWritable(SIZE * 2)), + "reader seek error !!!"); } catch (IOException ex) { fail("reader seek error !!!"); } finally { @@ -479,8 +484,8 @@ public void testFix() { isDeleted = indexFile.delete(); if (isDeleted) - assertTrue("testFix error !!!", - MapFile.fix(fs, dir, IntWritable.class, Text.class, true, conf) == PAIR_SIZE); + assertTrue(MapFile.fix(fs, dir, IntWritable.class, Text.class, true, conf) == PAIR_SIZE, + "testFix error !!!"); } catch (Exception ex) { fail("testFix error !!!"); } finally { @@ -521,9 +526,8 @@ public void testFixBlockCompress() throws Exception { Path index = new Path(dir, MapFile.INDEX_FILE_NAME); fs.rename(index, index.suffix(".orig")); - assertEquals("No of valid MapFile entries wrong", size, - MapFile.fix(fs, dir, IntWritable.class, Text.class, - false, conf)); + assertEquals(size, MapFile.fix(fs, dir, IntWritable.class, Text.class, false, conf), + "No of valid MapFile entries wrong"); reader = new MapFile.Reader(dir, conf); IntWritable key; Text val = new Text(); @@ -534,8 +538,8 @@ public void testFixBlockCompress() throws Exception { notFound++; } } - assertEquals("With MapFile.fix-ed index, could not get entries # ", - 0, notFound); + assertEquals(0, notFound, + "With MapFile.fix-ed index, could not get entries # "); } finally { IOUtils.cleanupWithLogger(null, writer, reader); if (fs.exists(dir)) { @@ -585,8 +589,8 @@ public void testDeprecatedConstructors() { reader = new MapFile.Reader(fs, path, WritableComparator.get(IntWritable.class), conf); assertNotNull(reader); - assertNotNull("reader key is null !!!", reader.getKeyClass()); - assertNotNull("reader value in null", reader.getValueClass()); + assertNotNull(reader.getKeyClass(), "reader key is null !!!"); + assertNotNull(reader.getValueClass(), "reader value in null"); } catch (IOException e) { fail(e.getMessage()); } finally { @@ -633,8 +637,8 @@ public void testPathExplosionWriterCreation() { MapFile.Writer.valueClass(IntWritable.class)); fail("fail in testPathExplosionWriterCreation !!!"); } catch (IOException ex) { - assertEquals("testPathExplosionWriterCreation ex message error !!!", - ex.getMessage(), TEST_ERROR_MESSAGE); + assertEquals(ex.getMessage(), TEST_ERROR_MESSAGE, + "testPathExplosionWriterCreation ex message error !!!"); } catch (Exception e) { fail("fail in testPathExplosionWriterCreation. Other ex !!!"); } finally { @@ -829,8 +833,8 @@ public void testMerge() throws Exception { Text value = startValue; IntWritable prev = new IntWritable(start); while (reader.next(key, value)) { - assertTrue("Next key should be always equal or more", - prev.get() <= key.get()); + assertTrue(prev.get() <= key.get(), + "Next key should be always equal or more"); assertEquals(expectedIterator.next().intValue(), key.get()); prev.set(key.get()); } @@ -841,8 +845,8 @@ public void testMerge() throws Exception { // inputs should be deleted for (int j = 0; j < in.length; j++) { Path path = in[j]; - assertFalse("inputs should be deleted", - path.getFileSystem(conf).exists(path)); + assertFalse(path.getFileSystem(conf).exists(path), + "inputs should be deleted"); } } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java index ecdb7f8d7dfe0..e08690d46c269 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapWritable.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.io; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -25,9 +25,9 @@ import java.io.DataOutputStream; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; /** * Tests MapWritable diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestObjectWritableProtos.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestObjectWritableProtos.java index f3012ded25bb5..f439a43700298 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestObjectWritableProtos.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestObjectWritableProtos.java @@ -17,12 +17,12 @@ */ package org.apache.hadoop.io; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import org.apache.hadoop.conf.Configuration; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.apache.hadoop.thirdparty.protobuf.DescriptorProtos; import org.apache.hadoop.thirdparty.protobuf.Message; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java index f9a5a30966419..6fdc95e2e2ac4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSecureIOUtils.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.io; -import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.junit.jupiter.api.Assumptions.assumeTrue; import java.io.File; import java.io.FileOutputStream; @@ -30,9 +30,10 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.nativeio.NativeIO; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestSecureIOUtils { @@ -42,7 +43,7 @@ public class TestSecureIOUtils { private static File testFilePathFadis; private static FileSystem fs; - @BeforeClass + @BeforeAll public static void makeTestFile() throws Exception { Configuration conf = new Configuration(); fs = FileSystem.getLocal(conf).getRaw(); @@ -69,14 +70,16 @@ public static void makeTestFile() throws Exception { realGroup = stat.getGroup(); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testReadUnrestricted() throws IOException { SecureIOUtils.openForRead(testFilePathIs, null, null).close(); SecureIOUtils.openFSDataInputStream(testFilePathFadis, null, null).close(); SecureIOUtils.openForRandomRead(testFilePathRaf, "r", null, null).close(); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testReadCorrectlyRestrictedWithSecurity() throws IOException { SecureIOUtils .openForRead(testFilePathIs, realOwner, realGroup).close(); @@ -86,7 +89,8 @@ public void testReadCorrectlyRestrictedWithSecurity() throws IOException { .close(); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testReadIncorrectlyRestrictedWithSecurity() throws IOException { // this will only run if libs are available assumeTrue(NativeIO.isAvailable()); @@ -129,7 +133,8 @@ public void testReadIncorrectlyRestrictedWithSecurity() throws IOException { } } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testCreateForWrite() throws IOException { try { SecureIOUtils.createForWrite(testFilePathIs, 0777); @@ -139,7 +144,7 @@ public void testCreateForWrite() throws IOException { } } - @AfterClass + @AfterAll public static void removeTestFile() throws Exception { // cleaning files for (File f : new File[] { testFilePathIs, testFilePathRaf, diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java index 8944cae70f955..f63fe057efe59 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java @@ -33,15 +33,16 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.conf.*; -import org.assertj.core.api.Assertions; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import org.mockito.Mockito; +import org.junit.jupiter.api.Test; +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,14 +67,14 @@ public void testSorterProperties() throws IOException { // Test to ensure that deprecated properties have no default // references anymore. Configuration config = new Configuration(); - assertNull("The deprecated sort memory property " + assertNull(config.get(CommonConfigurationKeys.IO_SORT_MB_KEY), + "The deprecated sort memory property " + CommonConfigurationKeys.IO_SORT_MB_KEY - + " must not exist in any core-*.xml files.", - config.get(CommonConfigurationKeys.IO_SORT_MB_KEY)); - assertNull("The deprecated sort factor property " + + " must not exist in any core-*.xml files."); + assertNull(config.get(CommonConfigurationKeys.IO_SORT_FACTOR_KEY), + "The deprecated sort factor property " + CommonConfigurationKeys.IO_SORT_FACTOR_KEY - + " must not exist in any core-*.xml files.", - config.get(CommonConfigurationKeys.IO_SORT_FACTOR_KEY)); + + " must not exist in any core-*.xml files."); // Test deprecated property honoring // Set different values for old and new property names @@ -86,10 +87,10 @@ public void testSorterProperties() throws IOException { config.setInt(CommonConfigurationKeys.SEQ_IO_SORT_FACTOR_KEY, 20); SequenceFile.Sorter sorter = new SequenceFile.Sorter( fs, Text.class, Text.class, config); - assertEquals("Deprecated memory conf must be honored over newer property", - 10*1024*1024, sorter.getMemory()); - assertEquals("Deprecated factor conf must be honored over newer property", - 10, sorter.getFactor()); + assertEquals(10 * 1024 * 1024, sorter.getMemory(), + "Deprecated memory conf must be honored over newer property"); + assertEquals(10, sorter.getFactor(), + "Deprecated factor conf must be honored over newer property"); // Test deprecated properties (graceful deprecation) config = new Configuration(); @@ -98,15 +99,14 @@ public void testSorterProperties() throws IOException { config.setInt(CommonConfigurationKeys.IO_SORT_FACTOR_KEY, 10); sorter = new SequenceFile.Sorter( fs, Text.class, Text.class, config); - assertEquals("Deprecated memory property " + assertEquals(10 * 1024 * 1024, // In bytes + sorter.getMemory(), "Deprecated memory property " + CommonConfigurationKeys.IO_SORT_MB_KEY - + " must get properly applied.", - 10*1024*1024, // In bytes - sorter.getMemory()); - assertEquals("Deprecated sort factor property " + + " must get properly applied."); + assertEquals(10, + sorter.getFactor(), "Deprecated sort factor property " + CommonConfigurationKeys.IO_SORT_FACTOR_KEY - + " must get properly applied.", - 10, sorter.getFactor()); + + " must get properly applied."); // Test regular properties (graceful deprecation) config = new Configuration(); @@ -115,15 +115,14 @@ public void testSorterProperties() throws IOException { config.setInt(CommonConfigurationKeys.SEQ_IO_SORT_FACTOR_KEY, 20); sorter = new SequenceFile.Sorter( fs, Text.class, Text.class, config); - assertEquals("Memory property " + assertEquals(20 * 1024 * 1024, // In bytes + sorter.getMemory(), "Memory property " + CommonConfigurationKeys.SEQ_IO_SORT_MB_KEY - + " must get properly applied if present.", - 20*1024*1024, // In bytes - sorter.getMemory()); - assertEquals("Merge factor property " + + " must get properly applied if present."); + assertEquals(20, sorter.getFactor(), + "Merge factor property " + CommonConfigurationKeys.SEQ_IO_SORT_FACTOR_KEY - + " must get properly applied if present.", - 20, sorter.getFactor()); + + " must get properly applied if present."); } public void compressedSeqFileTest(CompressionCodec codec) throws Exception { @@ -553,12 +552,12 @@ public void testClose() throws IOException { @Test public void testCreateUsesFsArg() throws Exception { FileSystem fs = FileSystem.getLocal(conf); - FileSystem spyFs = Mockito.spy(fs); + FileSystem spyFs = spy(fs); Path p = new Path(GenericTestUtils.getTempPath("testCreateUsesFSArg.seq")); SequenceFile.Writer writer = SequenceFile.createWriter( spyFs, conf, p, NullWritable.class, NullWritable.class); writer.close(); - Mockito.verify(spyFs).getDefaultReplication(p); + verify(spyFs).getDefaultReplication(p); } private static class TestFSDataInputStream extends FSDataInputStream { @@ -605,8 +604,8 @@ protected FSDataInputStream openFile(FileSystem fs, Path file, int bufferSize, l fail("IOException expected."); } catch (IOException expected) {} - assertNotNull(path + " should have been opened.", openedFile[0]); - assertTrue("InputStream for " + path + " should have been closed.", openedFile[0].isClosed()); + assertNotNull(openedFile[0], path + " should have been opened."); + assertTrue(openedFile[0].isClosed(), "InputStream for " + path + " should have been closed."); } /** @@ -743,9 +742,9 @@ public void testSequenceFileWriter() throws Exception { .getTempPath("testSequenceFileWriter.seq")); try(SequenceFile.Writer writer = SequenceFile.createWriter( fs, conf, p, LongWritable.class, Text.class)) { - Assertions.assertThat(writer.hasCapability + assertThat(writer.hasCapability (StreamCapabilities.HSYNC)).isEqualTo(true); - Assertions.assertThat(writer.hasCapability( + assertThat(writer.hasCapability( StreamCapabilities.HFLUSH)).isEqualTo(true); LongWritable key = new LongWritable(); key.set(1); @@ -755,7 +754,7 @@ public void testSequenceFileWriter() throws Exception { writer.flush(); writer.hflush(); writer.hsync(); - Assertions.assertThat(fs.getFileStatus(p).getLen()).isGreaterThan(0); + assertThat(fs.getFileStatus(p).getLen()).isGreaterThan(0); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileAppend.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileAppend.java index b31c809adeb0b..fe2ea36fc6d25 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileAppend.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileAppend.java @@ -18,9 +18,9 @@ package org.apache.hadoop.io; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; @@ -35,9 +35,10 @@ import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.serializer.JavaSerializationComparator; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; public class TestSequenceFileAppend { @@ -46,7 +47,7 @@ public class TestSequenceFileAppend { private static Path ROOT_PATH = new Path(GenericTestUtils.getTestDir().getAbsolutePath()); - @BeforeClass + @BeforeAll public static void setUp() throws Exception { conf = new Configuration(); conf.set("io.serializations", @@ -55,12 +56,13 @@ public static void setUp() throws Exception { fs = FileSystem.get(conf); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { fs.close(); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testAppend() throws Exception { Path file = new Path(ROOT_PATH, "testseqappend.seq"); @@ -139,7 +141,8 @@ public void testAppend() throws Exception { fs.deleteOnExit(file); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testAppendRecordCompression() throws Exception { GenericTestUtils.assumeInNativeProfile(); @@ -173,7 +176,8 @@ public void testAppendRecordCompression() throws Exception { fs.deleteOnExit(file); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testAppendBlockCompression() throws Exception { GenericTestUtils.assumeInNativeProfile(); @@ -248,7 +252,8 @@ public void testAppendBlockCompression() throws Exception { fs.deleteOnExit(file); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testAppendNoneCompression() throws Exception { Path file = new Path(ROOT_PATH, "testseqappendnonecompr.seq"); fs.delete(file, true); @@ -315,7 +320,8 @@ public void testAppendNoneCompression() throws Exception { fs.deleteOnExit(file); } - @Test(timeout = 30000) + @Test + @Timeout(value = 30) public void testAppendSort() throws Exception { GenericTestUtils.assumeInNativeProfile(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java index b1c519a7085da..90fe5ab589a0b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSerialization.java @@ -25,18 +25,18 @@ import org.apache.hadoop.io.SequenceFile.Reader; import org.apache.hadoop.io.SequenceFile.Writer; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; public class TestSequenceFileSerialization { private Configuration conf; private FileSystem fs; - @Before + @BeforeEach public void setUp() throws Exception { conf = new Configuration(); conf.set("io.serializations", @@ -44,7 +44,7 @@ public void setUp() throws Exception { fs = FileSystem.getLocal(conf); } - @After + @AfterEach public void tearDown() throws Exception { fs.close(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java index 5fbb083189e8a..fa61e61c3fad8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFileSync.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java index b6ec487458358..d3ae37afd99b6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java @@ -25,14 +25,14 @@ import org.apache.hadoop.conf.*; import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.test.GenericTestUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.fail; /** Support for flat files of binary key/value pairs. */ public class TestSetFile { @@ -67,10 +67,13 @@ public void testSetFileAccessMethods() { int size = 10; writeData(fs, size); SetFile.Reader reader = createReader(fs); - assertTrue("testSetFileWithConstruction1 error !!!", reader.next(new IntWritable(0))); + assertTrue(reader.next(new IntWritable(0)), + "testSetFileWithConstruction1 error !!!"); // don't know why reader.get(i) return i+1 - assertEquals("testSetFileWithConstruction2 error !!!", new IntWritable(size/2 + 1), reader.get(new IntWritable(size/2))); - assertNull("testSetFileWithConstruction3 error !!!", reader.get(new IntWritable(size*2))); + assertEquals(new IntWritable(size/2 + 1), reader.get(new IntWritable(size/2)), + "testSetFileWithConstruction2 error !!!"); + assertNull(reader.get(new IntWritable(size*2)), + "testSetFileWithConstruction3 error !!!"); } catch (Exception ex) { fail("testSetFileWithConstruction error !!!"); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSortedMapWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSortedMapWritable.java index 3d5bb1eab9134..c5fe5c05eff18 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSortedMapWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSortedMapWritable.java @@ -17,13 +17,14 @@ */ package org.apache.hadoop.io; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; /** * Tests SortedMapWritable @@ -118,14 +119,14 @@ public void testEqualsAndHashCode() { // Sanity checks failureReason = "SortedMapWritable couldn't be initialized. Got null reference"; - assertNotNull(failureReason, mapA); - assertNotNull(failureReason, mapB); + assertNotNull(mapA, failureReason); + assertNotNull(mapB, failureReason); // Basic null check - assertFalse("equals method returns true when passed null", mapA.equals(null)); + assertFalse(mapA.equals(null), "equals method returns true when passed null"); // When entry set is empty, they should be equal - assertTrue("Two empty SortedMapWritables are no longer equal", mapA.equals(mapB)); + assertTrue(mapA.equals(mapB), "Two empty SortedMapWritables are no longer equal"); // Setup Text[] keys = { @@ -143,40 +144,40 @@ public void testEqualsAndHashCode() { // entrySets are different failureReason = "Two SortedMapWritables with different data are now equal"; - assertTrue(failureReason, mapA.hashCode() != mapB.hashCode()); - assertTrue(failureReason, !mapA.equals(mapB)); - assertTrue(failureReason, !mapB.equals(mapA)); + assertTrue(mapA.hashCode() != mapB.hashCode(), failureReason); + assertTrue(!mapA.equals(mapB), failureReason); + assertTrue(!mapB.equals(mapA), failureReason); mapA.put(keys[1], values[1]); mapB.put(keys[0], values[0]); // entrySets are now same failureReason = "Two SortedMapWritables with same entry sets formed in different order are now different"; - assertEquals(failureReason, mapA.hashCode(), mapB.hashCode()); - assertTrue(failureReason, mapA.equals(mapB)); - assertTrue(failureReason, mapB.equals(mapA)); + assertEquals(mapA.hashCode(), mapB.hashCode(), failureReason); + assertTrue(mapA.equals(mapB), failureReason); + assertTrue(mapB.equals(mapA), failureReason); // Let's check if entry sets of same keys but different values mapA.put(keys[0], values[1]); mapA.put(keys[1], values[0]); failureReason = "Two SortedMapWritables with different content are now equal"; - assertTrue(failureReason, mapA.hashCode() != mapB.hashCode()); - assertTrue(failureReason, !mapA.equals(mapB)); - assertTrue(failureReason, !mapB.equals(mapA)); + assertTrue(mapA.hashCode() != mapB.hashCode(), failureReason); + assertTrue(!mapA.equals(mapB), failureReason); + assertTrue(!mapB.equals(mapA), failureReason); } - @Test(timeout = 10000) + @Test + @Timeout(value = 10) public void testPutAll() { SortedMapWritable map1 = new SortedMapWritable(); SortedMapWritable map2 = new SortedMapWritable(); map1.put(new Text("key"), new Text("value")); map2.putAll(map1); - assertEquals("map1 entries don't match map2 entries", map1, map2); - assertTrue( - "map2 doesn't have class information from map1", - map2.classToIdMap.containsKey(Text.class) - && map2.idToClassMap.containsValue(Text.class)); + assertEquals(map1, map2, "map1 entries don't match map2 entries"); + assertTrue(map2.classToIdMap.containsKey(Text.class) + && map2.idToClassMap.containsValue(Text.class), + "map2 doesn't have class information from map1"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java index 24bb1edb7a0ce..95c4abe5e3907 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java @@ -27,12 +27,12 @@ import org.apache.hadoop.constants.ConfigConstants; import org.apache.hadoop.thirdparty.com.google.common.primitives.Bytes; -import org.junit.Test; +import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; /** Unit tests for LargeUTF8. */ public class TestText { @@ -223,13 +223,11 @@ public void testCompare() throws Exception { assertEquals(ret1, ret2); - assertEquals("Equivalence of different txt objects, same content" , - 0, - txt1.compareTo(txt3)); - assertEquals("Equvalence of data output buffers", - 0, - comparator.compare(out1.getData(), 0, out3.getLength(), - out3.getData(), 0, out3.getLength())); + assertEquals(0, txt1.compareTo(txt3), + "Equivalence of different txt objects, same content"); + assertEquals(0, comparator.compare(out1.getData(), 0, out3.getLength(), + out3.getData(), 0, out3.getLength()), + "Equvalence of data output buffers"); } } @@ -263,29 +261,25 @@ public void testValidate() throws Exception { public void testClear() throws Exception { // Test lengths on an empty text object Text text = new Text(); - assertEquals( - "Actual string on an empty text object must be an empty string", - "", text.toString()); - assertEquals("Underlying byte array length must be zero", - 0, text.getBytes().length); - assertEquals("String's length must be zero", - 0, text.getLength()); - assertEquals("String's text length must be zero", - 0, text.getTextLength()); + assertEquals("", text.toString(), + "Actual string on an empty text object must be an empty string"); + assertEquals(0, text.getBytes().length, + "Underlying byte array length must be zero"); + assertEquals(0, text.getLength(), "String's length must be zero"); + assertEquals(0, text.getTextLength(), "String's text length must be zero"); // Test if clear works as intended text = new Text("abcd\u20acbdcd\u20ac"); int len = text.getLength(); text.clear(); - assertEquals("String must be empty after clear()", - "", text.toString()); - assertTrue( - "Length of the byte array must not decrease after clear()", - text.getBytes().length >= len); - assertEquals("Length of the string must be reset to 0 after clear()", - 0, text.getLength()); - assertEquals("Text length of the string must be reset to 0 after clear()", - 0, text.getTextLength()); + assertEquals("", text.toString(), + "String must be empty after clear()"); + assertTrue(text.getBytes().length >= len, + "Length of the byte array must not decrease after clear()"); + assertEquals(0, text.getLength(), + "Length of the string must be reset to 0 after clear()"); + assertEquals(0, text.getTextLength(), + "Text length of the string must be reset to 0 after clear()"); } @Test @@ -297,9 +291,9 @@ public void testTextText() throws CharacterCodingException { assertEquals(3, a.getTextLength()); assertEquals(3, b.getTextLength()); a.append("xdefgxxx".getBytes(), 1, 4); - assertEquals("modified aliased string", "abc", b.toString()); - assertEquals("appended string incorrectly", "abcdefg", a.toString()); - assertEquals("This should reflect in the lenght", 7, a.getTextLength()); + assertEquals("abc", b.toString(), "modified aliased string"); + assertEquals("abcdefg", a.toString(), "appended string incorrectly"); + assertEquals(7, a.getTextLength(), "This should reflect in the lenght"); // add an extra byte so that capacity = 10 and length = 8 a.append(new byte[]{'d'}, 0, 1); assertEquals(10, a.getBytes().length); @@ -324,7 +318,7 @@ public void run() { in.reset(out.getData(), out.getLength()); String s = WritableUtils.readString(in); - assertEquals("input buffer reset contents = " + name, name, s); + assertEquals(name, s, "input buffer reset contents = " + name); } catch (Exception ioe) { throw new RuntimeException(ioe); } @@ -361,11 +355,11 @@ public void testCharAt() { String line = "adsawseeeeegqewgasddga"; Text text = new Text(line); for (int i = 0; i < line.length(); i++) { - assertTrue("testCharAt error1 !!!", text.charAt(i) == line.charAt(i)); + assertTrue(text.charAt(i) == line.charAt(i), "testCharAt error1 !!!"); } - assertEquals("testCharAt error2 !!!", -1, text.charAt(-1)); - assertEquals("testCharAt error3 !!!", -1, text.charAt(100)); - } + assertEquals(-1, text.charAt(-1), "testCharAt error2 !!!"); + assertEquals(-1, text.charAt(100), "testCharAt error3 !!!"); + } /** * test {@code Text} readFields/write operations @@ -428,7 +422,7 @@ public void testBytesToCodePoint() { try { ByteBuffer bytes = ByteBuffer.wrap(new byte[] {-2, 45, 23, 12, 76, 89}); Text.bytesToCodePoint(bytes); - assertTrue("testBytesToCodePoint error !!!", bytes.position() == 6 ); + assertTrue(bytes.position() == 6, "testBytesToCodePoint error !!!"); } catch (BufferUnderflowException ex) { fail("testBytesToCodePoint unexp exception"); } catch (Exception e) { @@ -449,36 +443,36 @@ public void testbytesToCodePointWithInvalidUTF() { @Test public void testUtf8Length() { - assertEquals("testUtf8Length1 error !!!", - 1, Text.utf8Length(new String(new char[]{(char) 1}))); - assertEquals("testUtf8Length127 error !!!", - 1, Text.utf8Length(new String(new char[]{(char) 127}))); - assertEquals("testUtf8Length128 error !!!", - 2, Text.utf8Length(new String(new char[]{(char) 128}))); - assertEquals("testUtf8Length193 error !!!", - 2, Text.utf8Length(new String(new char[]{(char) 193}))); - assertEquals("testUtf8Length225 error !!!", - 2, Text.utf8Length(new String(new char[]{(char) 225}))); - assertEquals("testUtf8Length254 error !!!", - 2, Text.utf8Length(new String(new char[]{(char)254}))); + assertEquals(1, Text.utf8Length(new String(new char[]{(char) 1})), + "testUtf8Length1 error !!!"); + assertEquals(1, Text.utf8Length(new String(new char[]{(char) 127})), + "testUtf8Length127 error !!!"); + assertEquals(2, Text.utf8Length(new String(new char[]{(char) 128})), + "testUtf8Length128 error !!!"); + assertEquals(2, Text.utf8Length(new String(new char[]{(char) 193})), + "testUtf8Length193 error !!!"); + assertEquals(2, Text.utf8Length(new String(new char[]{(char) 225})), + "testUtf8Length225 error !!!"); + assertEquals(2, Text.utf8Length(new String(new char[]{(char)254})), + "testUtf8Length254 error !!!"); } @Test public void testSetBytes(){ Text a = new Text(new byte[100]); - assertEquals("testSetBytes100 getLength error !", - 100, a.getLength()); - assertEquals("testSetBytes100 getBytes.length error !", - 100, a.getBytes().length); - assertEquals("testSetBytes100 getTextLength error !", - 100, a.getTextLength()); + assertEquals(100, a.getLength(), + "testSetBytes100 getLength error !"); + assertEquals(100, a.getBytes().length, + "testSetBytes100 getBytes.length error !"); + assertEquals(100, a.getTextLength(), + "testSetBytes100 getTextLength error !"); a.set(new byte[0]); - assertEquals("testSetBytes0 getLength error !", - 0, a.getLength()); - assertEquals("testSetBytes0 getBytes.length error !", - 0, a.getBytes().length); - assertEquals("testSetBytes0 getTextLength error !", - 0, a.getTextLength()); + assertEquals(0, a.getLength(), + "testSetBytes0 getLength error !"); + assertEquals(0, a.getBytes().length, + "testSetBytes0 getBytes.length error !"); + assertEquals(0, a.getTextLength(), + "testSetBytes0 getTextLength error !"); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java index d09865b0be669..a74f32f0c98f5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestTextNonUTF8.java @@ -18,9 +18,9 @@ package org.apache.hadoop.io; -import org.junit.Test; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.charset.MalformedInputException; import java.util.Arrays; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java index 6899d1cdcabf7..e83309322aae0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestUTF8.java @@ -28,10 +28,10 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; /** Unit tests for UTF8. */ @SuppressWarnings("deprecation") diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java index 3276289a39dc7..f80ef5e123574 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestVersionedWritable.java @@ -18,7 +18,7 @@ package org.apache.hadoop.io; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.io.*; import java.util.Random; diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java index 8d9f6c064a8d5..780220a8daf0a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritable.java @@ -27,12 +27,12 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.ReflectionUtils; -import org.junit.Test; +import org.junit.jupiter.api.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** Unit tests for Writable. */ public class TestWritable { @@ -191,21 +191,23 @@ public void testShortWritableComparator() throws Exception { ShortWritable writable3 = new ShortWritable((short) 256); final String SHOULD_NOT_MATCH_WITH_RESULT_ONE = "Result should be 1, should not match the writables"; - assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE, - writable1.compareTo(writable2) == 1); - assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_ONE, WritableComparator.get( - ShortWritable.class).compare(writable1, writable2) == 1); + assertTrue(writable1.compareTo(writable2) == 1, + SHOULD_NOT_MATCH_WITH_RESULT_ONE); + assertTrue(WritableComparator.get( + ShortWritable.class).compare(writable1, writable2) == 1, + SHOULD_NOT_MATCH_WITH_RESULT_ONE); final String SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE = "Result should be -1, should not match the writables"; - assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, writable2 - .compareTo(writable1) == -1); - assertTrue(SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE, WritableComparator.get( - ShortWritable.class).compare(writable2, writable1) == -1); + assertTrue(writable2.compareTo(writable1) == -1, + SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE); + assertTrue(WritableComparator.get( + ShortWritable.class).compare(writable2, writable1) == -1, + SHOULD_NOT_MATCH_WITH_RESULT_MINUS_ONE); final String SHOULD_MATCH = "Result should be 0, should match the writables"; - assertTrue(SHOULD_MATCH, writable1.compareTo(writable1) == 0); - assertTrue(SHOULD_MATCH, WritableComparator.get(ShortWritable.class) - .compare(writable1, writable3) == 0); + assertTrue(writable1.compareTo(writable1) == 0, SHOULD_MATCH); + assertTrue(WritableComparator.get(ShortWritable.class) + .compare(writable1, writable3) == 0, SHOULD_MATCH); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java index 22f2aee62ad0a..44dc7f1a036e2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableName.java @@ -29,10 +29,10 @@ import org.apache.hadoop.io.serializer.Serialization; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; -import org.junit.Test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; /** Unit tests for WritableName. */ public class TestWritableName { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java index 57359a0b86c25..2a1a5ed5e3e31 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestWritableUtils.java @@ -20,12 +20,12 @@ import java.io.IOException; -import org.junit.Test; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.fail; public class TestWritableUtils { private static final Logger LOG =