Skip to content

HADOOP-19425. [JDK17] Upgrade JUnit from 4 to 5 in hadoop-azure Part3. #7674

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 1 commit into
base: trunk
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,10 @@

package org.apache.hadoop.fs.azurebfs;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
import static org.junit.jupiter.api.Assumptions.assumeFalse;

import java.io.IOException;
import java.net.URI;
import java.util.Hashtable;
Expand All @@ -28,10 +32,8 @@
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;

import org.assertj.core.api.Assertions;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -73,7 +75,6 @@
import static org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode.FILE_SYSTEM_NOT_FOUND;
import static org.apache.hadoop.fs.azurebfs.constants.TestConfigurationKeys.*;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
import static org.junit.Assume.assumeTrue;

/**
* Base for AzureBlobFileSystem Integration tests.
Expand Down Expand Up @@ -111,8 +112,8 @@ protected AbstractAbfsIntegrationTest() throws Exception {
// check if accountName is set using different config key
accountName = rawConfig.get(FS_AZURE_ABFS_ACCOUNT_NAME);
}
assumeTrue("Not set: " + FS_AZURE_ABFS_ACCOUNT_NAME,
accountName != null && !accountName.isEmpty());
assumeTrue(accountName != null && !accountName.isEmpty(),
"Not set: " + FS_AZURE_ABFS_ACCOUNT_NAME);

final String abfsUrl = this.getFileSystemName() + "@" + this.getAccountName();
URI defaultUri = null;
Expand Down Expand Up @@ -188,7 +189,7 @@ public TracingContext getTestTracingContext(AzureBlobFileSystem fs,
FSOperationType.TEST_OP, needsPrimaryReqId, format, null);
}

@Before
@BeforeEach
public void setup() throws Exception {
//Create filesystem first to make sure getWasbFileSystem() can return an existing filesystem.
createFileSystem();
Expand Down Expand Up @@ -221,7 +222,7 @@ public void setup() throws Exception {
}
}

@After
@AfterEach
public void teardown() throws Exception {
try {
IOUtils.closeStream(wasb);
Expand Down Expand Up @@ -565,23 +566,22 @@ protected AbfsOutputStream createAbfsOutputStreamWithFlushEnabled(
*/
protected long assertAbfsStatistics(AbfsStatistic statistic,
long expectedValue, Map<String, Long> metricMap) {
assertEquals("Mismatch in " + statistic.getStatName(), expectedValue,
(long) metricMap.get(statistic.getStatName()));
assertEquals(expectedValue, (long) metricMap.get(statistic.getStatName()),
"Mismatch in " + statistic.getStatName());
return expectedValue;
}

protected void assumeValidTestConfigPresent(final Configuration conf, final String key) {
String configuredValue = conf.get(accountProperty(key, accountName),
conf.get(key, ""));
Assume.assumeTrue(String.format("Missing Required Test Config: %s.", key),
!configuredValue.isEmpty());
assumeTrue(!configuredValue.isEmpty(),
String.format("Missing Required Test Config: %s.", key));
}

protected void assumeValidAuthConfigsPresent() {
final AuthType currentAuthType = getAuthType();
Assume.assumeFalse(
"SAS Based Authentication Not Allowed For Integration Tests",
currentAuthType == AuthType.SAS);
assumeFalse(currentAuthType == AuthType.SAS,
"SAS Based Authentication Not Allowed For Integration Tests");
if (currentAuthType == AuthType.SharedKey) {
assumeValidTestConfigPresent(getRawConfiguration(), FS_AZURE_ACCOUNT_KEY);
} else {
Expand Down Expand Up @@ -612,7 +612,7 @@ public AbfsServiceType getIngressServiceType() {
* @param path path to create. Can be relative or absolute.
*/
protected void createAzCopyFolder(Path path) throws Exception {
Assume.assumeTrue(getAbfsServiceType() == AbfsServiceType.BLOB);
assumeTrue(getAbfsServiceType() == AbfsServiceType.BLOB);
assumeValidTestConfigPresent(getRawConfiguration(), FS_AZURE_TEST_FIXED_SAS_TOKEN);
String sasToken = getRawConfiguration().get(FS_AZURE_TEST_FIXED_SAS_TOKEN);
AzcopyToolHelper azcopyHelper = AzcopyToolHelper.getInstance(sasToken);
Expand All @@ -624,7 +624,7 @@ protected void createAzCopyFolder(Path path) throws Exception {
* @param path path to create. Can be relative or absolute.
*/
protected void createAzCopyFile(Path path) throws Exception {
Assume.assumeTrue(getAbfsServiceType() == AbfsServiceType.BLOB);
assumeTrue(getAbfsServiceType() == AbfsServiceType.BLOB);
assumeValidTestConfigPresent(getRawConfiguration(), FS_AZURE_TEST_FIXED_SAS_TOKEN);
String sasToken = getRawConfiguration().get(FS_AZURE_TEST_FIXED_SAS_TOKEN);
AzcopyToolHelper azcopyHelper = AzcopyToolHelper.getInstance(sasToken);
Expand All @@ -642,17 +642,17 @@ private String getAzcopyAbsolutePath(Path path) throws IOException {
* Otherwise, the test will be skipped.
*/
protected void assumeBlobServiceType() {
Assume.assumeTrue("Blob service type is required for this test",
getAbfsServiceType() == AbfsServiceType.BLOB);
assumeTrue(getAbfsServiceType() == AbfsServiceType.BLOB,
"Blob service type is required for this test");
}

/**
* Utility method to assume that the test is running against a DFS service.
* Otherwise, the test will be skipped.
*/
protected void assumeDfsServiceType() {
Assume.assumeTrue("DFS service type is required for this test",
getAbfsServiceType() == AbfsServiceType.DFS);
assumeTrue(getAbfsServiceType() == AbfsServiceType.DFS,
"DFS service type is required for this test");
}

/**
Expand All @@ -670,7 +670,7 @@ protected void assumeHnsEnabled() throws IOException {
* @throws IOException if an error occurs while checking the account type.
*/
protected void assumeHnsEnabled(String errorMessage) throws IOException {
Assume.assumeTrue(errorMessage, getIsNamespaceEnabled(getFileSystem()));
assumeTrue(getIsNamespaceEnabled(getFileSystem()), errorMessage);
}

/**
Expand All @@ -688,7 +688,7 @@ protected void assumeHnsDisabled() throws IOException {
* @throws IOException if an error occurs while checking the account type.
*/
protected void assumeHnsDisabled(String message) throws IOException {
Assume.assumeFalse(message, getIsNamespaceEnabled(getFileSystem()));
assumeFalse(getIsNamespaceEnabled(getFileSystem()), message);
}

/**
Expand All @@ -699,7 +699,7 @@ protected void assumeHnsDisabled(String message) throws IOException {
protected void assertPathDns(Path path) {
String expectedDns = getAbfsServiceType() == AbfsServiceType.BLOB
? ABFS_BLOB_DOMAIN_NAME : ABFS_DFS_DOMAIN_NAME;
Assertions.assertThat(path.toString())
assertThat(path.toString())
.describedAs("Path does not contain expected DNS")
.contains(expectedDns);
}
Expand Down Expand Up @@ -745,19 +745,23 @@ protected void checkFuturesForExceptions(List<Future<?>> futures, int exceptionV
protected void assumeRecoveryThroughClientTransactionID(boolean isCreate)
throws IOException {
// Assumes that recovery through client transaction ID is enabled.
Assume.assumeTrue("Recovery through client transaction ID is not enabled",
getConfiguration().getIsClientTransactionIdEnabled());
assumeTrue(getConfiguration().getIsClientTransactionIdEnabled(),
"Recovery through client transaction ID is not enabled");
// Assumes that service type is DFS.
assumeDfsServiceType();
// Assumes that namespace is enabled for the given AzureBlobFileSystem.
assumeHnsEnabled();
if (isCreate) {
// Assume that create client is DFS client.
Assume.assumeTrue("Ingress service type is not DFS",
AbfsServiceType.DFS.equals(getIngressServiceType()));
assumeTrue(AbfsServiceType.DFS.equals(getIngressServiceType()),
"Ingress service type is not DFS");
// Assume that append blob is not enabled in DFS client.
Assume.assumeFalse("Append blob is enabled in DFS client",
isAppendBlobEnabled());
assumeFalse(isAppendBlobEnabled(),
"Append blob is enabled in DFS client");
}
}

protected void assumeNotNull(Object objects) {
assumeTrue(objects != null);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,21 @@

package org.apache.hadoop.fs.azurebfs;

import org.junit.jupiter.api.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azure.integration.AzureTestConstants;

import static org.apache.hadoop.fs.azure.integration.AzureTestConstants.SCALE_TEST_TIMEOUT_MILLIS;
import static org.apache.hadoop.fs.azure.integration.AzureTestUtils.assumeScaleTestsEnabled;

/**
* Integration tests at bigger scale; configurable as to
* size, off by default.
*/
@Timeout(SCALE_TEST_TIMEOUT_MILLIS)
public class AbstractAbfsScaleTest extends AbstractAbfsIntegrationTest {

protected static final Logger LOG =
Expand All @@ -39,11 +42,6 @@ public AbstractAbfsScaleTest() throws Exception {
super();
}

@Override
protected int getTestTimeoutMillis() {
return AzureTestConstants.SCALE_TEST_TIMEOUT_MILLIS;
}

@Override
public void setup() throws Exception {
super.setup();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@

import java.io.IOException;

import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.rules.Timeout;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.test.TestName;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;

Expand All @@ -37,35 +37,30 @@
* Base class for any ABFS test with timeouts & named threads.
* This class does not attempt to bind to Azure.
*/
public class AbstractAbfsTestWithTimeout extends Assert {
@Timeout(TEST_TIMEOUT)
public class AbstractAbfsTestWithTimeout extends Assertions {
private static final Logger LOG =
LoggerFactory.getLogger(AbstractAbfsTestWithTimeout.class);

/**
* The name of the current method.
*/
@Rule
public TestName methodName = new TestName();
/**
* Set the timeout for every test.
* This is driven by the value returned by {@link #getTestTimeoutMillis()}.
*/
@Rule
public Timeout testTimeout = new Timeout(getTestTimeoutMillis());
@RegisterExtension
protected TestName methodName = new TestName();

/**
* Name the junit thread for the class. This will overridden
* before the individual test methods are run.
*/
@BeforeClass
@BeforeAll
public static void nameTestThread() {
Thread.currentThread().setName("JUnit");
}

/**
* Name the thread to the current test method.
*/
@Before
@BeforeEach
public void nameThread() {
Thread.currentThread().setName("JUnit-" + methodName.getMethodName());
}
Expand Down Expand Up @@ -110,15 +105,15 @@ protected boolean validateContent(AzureBlobFileSystem fs, Path path,

while (valueOfContentAtPos != -1 && pos < lenOfOriginalByteArray) {
if (originalByteArray[pos] != valueOfContentAtPos) {
assertEquals("Mismatch in content validation at position {}", pos,
originalByteArray[pos], valueOfContentAtPos);
assertEquals(originalByteArray[pos], valueOfContentAtPos,
"Mismatch in content validation at position " + pos);
return false;
}
valueOfContentAtPos = (byte) in.read();
pos++;
}
if (valueOfContentAtPos != -1) {
assertEquals("Expected end of file", -1, valueOfContentAtPos);
assertEquals(-1, valueOfContentAtPos, "Expected end of file");
return false;
}
return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

package org.apache.hadoop.fs.azurebfs;

import org.junit.Test;
import org.junit.jupiter.api.Test;

import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -35,7 +35,7 @@ public void testIncompatibleCredentialProviderIsExcluded() throws Exception {
rawConfig.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH,
"jceks://abfs@[email protected]/tmp/a.jceks,jceks://file/tmp/secret.jceks");
try (AzureBlobFileSystem fs = (AzureBlobFileSystem) FileSystem.get(rawConfig)) {
assertNotNull("filesystem", fs);
assertNotNull(fs, "filesystem");
String providers = fs.getConf().get(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH);
assertEquals("jceks://file/tmp/secret.jceks", providers);
}
Expand Down
Loading