diff --git a/hbase-endpoint/pom.xml b/hbase-endpoint/pom.xml
index 7b75119f928c..da729213cb33 100644
--- a/hbase-endpoint/pom.xml
+++ b/hbase-endpoint/pom.xml
@@ -178,11 +178,6 @@
junit-jupiter-params
test
-
- org.junit.vintage
- junit-vintage-engine
- test
-
org.mockito
mockito-core
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClient.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClient.java
index 1126a073fc14..2baf5d493411 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClient.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClient.java
@@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.coprocessor.AsyncAggregationClient;
@@ -33,19 +32,15 @@
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ MediumTests.class, CoprocessorTests.class })
+@Tag(MediumTests.TAG)
+@Tag(CoprocessorTests.TAG)
public class TestAsyncAggregationClient {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncAggregationClient.class);
-
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static TableName TABLE_NAME = TableName.valueOf("TestAsyncAggregationClient");
@@ -62,7 +57,7 @@ public class TestAsyncAggregationClient {
private static AsyncTable TABLE;
- @BeforeClass
+ @BeforeAll
public static void setUp() throws Exception {
Configuration conf = UTIL.getConfiguration();
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
@@ -81,7 +76,7 @@ public static void setUp() throws Exception {
.collect(Collectors.toList())).get();
}
- @AfterClass
+ @AfterAll
public static void tearDown() throws Exception {
CONN.close();
UTIL.shutdownMiniCluster();
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClientWithCallbackThreadPool.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClientWithCallbackThreadPool.java
index 7b37ddfd1555..3dc23e94dc66 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClientWithCallbackThreadPool.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAggregationClientWithCallbackThreadPool.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
@@ -25,7 +25,6 @@
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.coprocessor.AsyncAggregationClient;
@@ -35,23 +34,19 @@
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
/**
* Same as TestAsyncAggregationClient, except that {@link AsyncTableImpl} is involved in addition to
* {@link RawAsyncTableImpl}. Exercises the code paths in {@link AsyncTableImpl#coprocessorService}.
*/
-@Category({ MediumTests.class, CoprocessorTests.class })
+@Tag(MediumTests.TAG)
+@Tag(CoprocessorTests.TAG)
public class TestAsyncAggregationClientWithCallbackThreadPool {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncAggregationClientWithCallbackThreadPool.class);
-
private static HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static TableName TABLE_NAME = TableName.valueOf("TestAsyncAggregationClient");
@@ -70,7 +65,7 @@ public class TestAsyncAggregationClientWithCallbackThreadPool {
private static ExecutorService EXECUTOR_SERVICE;
- @BeforeClass
+ @BeforeAll
public static void setUp() throws Exception {
Configuration conf = UTIL.getConfiguration();
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
@@ -90,7 +85,7 @@ public static void setUp() throws Exception {
.collect(Collectors.toList())).get();
}
- @AfterClass
+ @AfterAll
public static void tearDown() throws Exception {
CONN.close();
UTIL.shutdownMiniCluster();
@@ -152,5 +147,4 @@ public void testStd() throws InterruptedException, ExecutionException {
.std(TABLE, new LongColumnInterpreter(), new Scan().addColumn(CF, CQ)).get().doubleValue(),
DELTA);
}
-
}
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
index 5c201dadccaf..3dc5035e32cc 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/TestRpcControllerFactory.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.List;
@@ -28,7 +28,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -40,25 +39,20 @@
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.apache.hbase.thirdparty.com.google.common.collect.ConcurrentHashMultiset;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.common.collect.Multiset;
-@Category({ MediumTests.class, ClientTests.class })
+@Tag(MediumTests.TAG)
+@Tag(ClientTests.TAG)
public class TestRpcControllerFactory {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestRpcControllerFactory.class);
-
public static class StaticRpcControllerFactory extends RpcControllerFactory {
public StaticRpcControllerFactory(Configuration conf) {
@@ -117,11 +111,8 @@ public void setPriority(TableName tn) {
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
- @Rule
- public TestName name = new TestName();
-
- @BeforeClass
- public static void setup() throws Exception {
+ @BeforeAll
+ public static void setUp() throws Exception {
// load an endpoint so we have an endpoint to test - it doesn't matter which one, but
// this is already in tests, so we can just use it.
Configuration conf = UTIL.getConfiguration();
@@ -131,8 +122,8 @@ public static void setup() throws Exception {
UTIL.startMiniCluster();
}
- @AfterClass
- public static void teardown() throws Exception {
+ @AfterAll
+ public static void tearDown() throws Exception {
UTIL.shutdownMiniCluster();
}
@@ -143,13 +134,13 @@ public static void teardown() throws Exception {
* @throws Exception on failure
*/
@Test
- public void testCountController() throws Exception {
+ public void testCountController(TestInfo testInfo) throws Exception {
Configuration conf = new Configuration(UTIL.getConfiguration());
// setup our custom controller
conf.set(RpcControllerFactory.CUSTOM_CONTROLLER_CONF_KEY,
StaticRpcControllerFactory.class.getName());
- final TableName tableName = TableName.valueOf(name.getMethodName());
+ final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName());
UTIL.createTable(tableName, fam1).close();
// change one of the connection properties so we get a new Connection with our configuration
@@ -246,6 +237,6 @@ public void testFallbackToDefaultRpcControllerFactory() {
// Should not fail
RpcControllerFactory factory = RpcControllerFactory.instantiate(conf);
assertNotNull(factory);
- assertEquals(factory.getClass(), RpcControllerFactory.class);
+ assertEquals(RpcControllerFactory.class, factory.getClass());
}
}
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/coprocessor/TestAggregationClient.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/coprocessor/TestAggregationClient.java
index 185ef8d2fcf4..6b7c8d0730d1 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/coprocessor/TestAggregationClient.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/client/coprocessor/TestAggregationClient.java
@@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hbase.client.coprocessor;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
@@ -35,19 +34,15 @@
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ MediumTests.class, CoprocessorTests.class })
+@Tag(MediumTests.TAG)
+@Tag(CoprocessorTests.TAG)
public class TestAggregationClient {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAggregationClient.class);
-
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final TableName TABLE_NAME = TableName.valueOf("TestAggregationClient");
@@ -58,7 +53,7 @@ public class TestAggregationClient {
private static Table TABLE;
- @BeforeClass
+ @BeforeAll
public static void setUp() throws Exception {
Configuration conf = UTIL.getConfiguration();
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
@@ -69,7 +64,7 @@ public static void setUp() throws Exception {
TABLE = CONN.getTable(TABLE_NAME);
}
- @AfterClass
+ @AfterAll
public static void tearDown() throws Exception {
CONN.close();
UTIL.shutdownMiniCluster();
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorEndpoint.java
index acecfe2c1462..e4ae0044ce5a 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorEndpoint.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -27,10 +27,12 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Collections;
+import java.util.function.Supplier;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.client.AsyncAdmin;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RetriesExhaustedException;
import org.apache.hadoop.hbase.client.TestAsyncAdminBase;
@@ -43,24 +45,23 @@
import org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-@Category({ ClientTests.class, MediumTests.class })
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+
+@Tag(ClientTests.TAG)
+@Tag(MediumTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncCoprocessorEndpoint extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncCoprocessorEndpoint.class);
private static final FileNotFoundException WHAT_TO_THROW = new FileNotFoundException("/file.txt");
private static final String DUMMY_VALUE = "val";
- @BeforeClass
+ public TestAsyncCoprocessorEndpoint(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -73,7 +74,7 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
}
- @Test
+ @TestTemplate
public void testMasterCoprocessorService() throws Exception {
TestProtos.EchoRequestProto request =
TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build();
@@ -85,7 +86,7 @@ TestProtos.EchoResponseProto> coprocessorService(
assertEquals("hello", response.getMessage());
}
- @Test
+ @TestTemplate
public void testMasterCoprocessorError() throws Exception {
TestProtos.EmptyRequestProto emptyRequest = TestProtos.EmptyRequestProto.getDefaultInstance();
try {
@@ -99,7 +100,7 @@ TestProtos.EmptyResponseProto> coprocessorService(
}
}
- @Test
+ @TestTemplate
public void testRegionServerCoprocessorService() throws Exception {
final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
DummyRegionServerEndpointProtos.DummyRequest request =
@@ -113,7 +114,7 @@ DummyRegionServerEndpointProtos.DummyResponse> coprocessorService(
assertEquals(DUMMY_VALUE, response.getValue());
}
- @Test
+ @TestTemplate
public void testRegionServerCoprocessorServiceError() throws Exception {
final ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
DummyRegionServerEndpointProtos.DummyRequest request =
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorOnAllRegionServersEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorOnAllRegionServersEndpoint.java
index 018c67588029..fd49b2c33a8b 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorOnAllRegionServersEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAsyncCoprocessorOnAllRegionServersEndpoint.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -28,9 +28,11 @@
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.client.AsyncAdmin;
import org.apache.hadoop.hbase.client.AsyncAdminClientUtils;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RetriesExhaustedException;
@@ -42,27 +44,26 @@
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
-@RunWith(Parameterized.class)
-@Category({ ClientTests.class, MediumTests.class })
+@Tag(ClientTests.TAG)
+@Tag(MediumTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: admin = {0}")
public class TestAsyncCoprocessorOnAllRegionServersEndpoint extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncCoprocessorOnAllRegionServersEndpoint.class);
private static final String THROW_CLASS_NAME = "java.io.FileNotFoundException";
private static final String DUMMY_VALUE = "val";
private static final int NUM_SLAVES = 5;
private static final int NUM_SUCCESS_REGION_SERVERS = 3;
- @BeforeClass
+ public TestAsyncCoprocessorOnAllRegionServersEndpoint(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -75,12 +76,12 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @TestTemplate
public void testRegionServersCoprocessorService()
throws ExecutionException, InterruptedException {
DummyRequest request = DummyRequest.getDefaultInstance();
@@ -96,7 +97,7 @@ public void testRegionServersCoprocessorService()
});
}
- @Test
+ @TestTemplate
public void testRegionServerCoprocessorsServiceAllFail()
throws ExecutionException, InterruptedException {
DummyRequest request = DummyRequest.getDefaultInstance();
@@ -113,7 +114,7 @@ public void testRegionServerCoprocessorsServiceAllFail()
});
}
- @Test
+ @TestTemplate
public void testRegionServerCoprocessorsServicePartialFail()
throws ExecutionException, InterruptedException {
DummyRequest request = DummyRequest.getDefaultInstance();
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
index f66fb60da847..a94669d15e18 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
@@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;
@@ -26,7 +26,6 @@
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -46,24 +45,20 @@
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TestEndpoint: test cases to verify the batch execution of coprocessor Endpoint
*/
-@Category({ CoprocessorTests.class, MediumTests.class })
+@Tag(CoprocessorTests.TAG)
+@Tag(MediumTests.TAG)
public class TestBatchCoprocessorEndpoint {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestBatchCoprocessorEndpoint.class);
-
private static final Logger LOG = LoggerFactory.getLogger(TestBatchCoprocessorEndpoint.class);
private static final TableName TEST_TABLE = TableName.valueOf("TestTable");
@@ -78,7 +73,7 @@ public class TestBatchCoprocessorEndpoint {
private static HBaseTestingUtility util = new HBaseTestingUtility();
- @BeforeClass
+ @BeforeAll
public static void setupBeforeClass() throws Exception {
// set configure to indicate which cp should be loaded
Configuration conf = util.getConfiguration();
@@ -106,7 +101,7 @@ public static void setupBeforeClass() throws Exception {
table.close();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
util.shutdownMiniCluster();
}
@@ -136,7 +131,7 @@ public void testAggregationNullResponse() throws Throwable {
for (long i = 0; i < rowSeperator2; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
table.close();
}
@@ -177,7 +172,7 @@ public void testAggregationWithReturnValue() throws Throwable {
for (long i = 0; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
results.clear();
@@ -193,7 +188,7 @@ public void testAggregationWithReturnValue() throws Throwable {
for (long i = rowSeperator1; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
table.close();
}
@@ -212,7 +207,7 @@ public void testAggregation() throws Throwable {
for (long i = 0; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
// scan: for region 2 and region 3
results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[rowSeperator1], ROWS[ROWS.length - 1]);
@@ -226,7 +221,7 @@ public void testAggregation() throws Throwable {
for (long i = rowSeperator1; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
table.close();
}
@@ -272,7 +267,7 @@ public void update(byte[] region, byte[] row,
for (long i = 0; i < rowSeperator2; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
assertTrue(hasError);
table.close();
}
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index 4848fe5a65c1..6833af66e58a 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -17,10 +17,10 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
@@ -35,7 +35,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -53,22 +52,19 @@
import org.apache.hadoop.hbase.util.ClassLoaderTestHelper;
import org.apache.hadoop.hbase.util.CoprocessorClassLoader;
import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test coprocessors class loading.
*/
-@Category({ CoprocessorTests.class, LargeTests.class })
+@Tag(CoprocessorTests.TAG)
+@Tag(LargeTests.TAG)
public class TestClassLoading {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestClassLoading.class);
private static final Logger LOG = LoggerFactory.getLogger(TestClassLoading.class);
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@@ -103,7 +99,7 @@ public Optional getMasterObserver() {
new String[] { regionCoprocessor1.getSimpleName(),
MultiRowMutationEndpoint.class.getSimpleName(), regionServerCoprocessor.getSimpleName() };
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();
@@ -122,7 +118,7 @@ public static void setUpBeforeClass() throws Exception {
cluster = TEST_UTIL.getDFSCluster();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@@ -146,14 +142,14 @@ public void testClassLoadingFromHDFS() throws Exception {
new Path(fs.getUri().toString() + Path.SEPARATOR));
String jarFileOnHDFS1 = fs.getUri().toString() + Path.SEPARATOR + jarFile1.getName();
Path pathOnHDFS1 = new Path(jarFileOnHDFS1);
- assertTrue("Copy jar file to HDFS failed.", fs.exists(pathOnHDFS1));
+ assertTrue(fs.exists(pathOnHDFS1), "Copy jar file to HDFS failed.");
LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS1);
fs.copyFromLocalFile(new Path(jarFile2.getPath()),
new Path(fs.getUri().toString() + Path.SEPARATOR));
String jarFileOnHDFS2 = fs.getUri().toString() + Path.SEPARATOR + jarFile2.getName();
Path pathOnHDFS2 = new Path(jarFileOnHDFS2);
- assertTrue("Copy jar file to HDFS failed.", fs.exists(pathOnHDFS2));
+ assertTrue(fs.exists(pathOnHDFS2), "Copy jar file to HDFS failed.");
LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS2);
// create a table that references the coprocessors
@@ -206,27 +202,27 @@ public void testClassLoadingFromHDFS() throws Exception {
}
}
- assertTrue("No region was found for table " + tableName, foundTableRegion);
- assertTrue("Class " + cpName1 + " was missing on a region", found1);
- assertTrue("Class " + cpName2 + " was missing on a region", found2);
- assertTrue("Configuration key 'k1' was missing on a region", found2_k1);
- assertTrue("Configuration key 'k2' was missing on a region", found2_k2);
- assertTrue("Configuration key 'k3' was missing on a region", found2_k3);
+ assertTrue(foundTableRegion, "No region was found for table " + tableName);
+ assertTrue(found1, "Class " + cpName1 + " was missing on a region");
+ assertTrue(found2, "Class " + cpName2 + " was missing on a region");
+ assertTrue(found2_k1, "Configuration key 'k1' was missing on a region");
+ assertTrue(found2_k2, "Configuration key 'k2' was missing on a region");
+ assertTrue(found2_k3, "Configuration key 'k3' was missing on a region");
// check if CP classloaders are cached
- assertNotNull(jarFileOnHDFS1 + " was not cached",
- CoprocessorClassLoader.getIfCached(pathOnHDFS1));
- assertNotNull(jarFileOnHDFS2 + " was not cached",
- CoprocessorClassLoader.getIfCached(pathOnHDFS2));
+ assertNotNull(CoprocessorClassLoader.getIfCached(pathOnHDFS1),
+ jarFileOnHDFS1 + " was not cached");
+ assertNotNull(CoprocessorClassLoader.getIfCached(pathOnHDFS2),
+ jarFileOnHDFS2 + " was not cached");
// two external jar used, should be one classloader per jar
- assertEquals(
- "The number of cached classloaders should be equal to the number" + " of external jar files",
- 2, CoprocessorClassLoader.getAllCached().size());
+ assertEquals(2, CoprocessorClassLoader.getAllCached().size(),
+ "The number of cached classloaders should be equal to the number" + " of external jar files");
// check if region active classloaders are shared across all RS regions
Set externalClassLoaders = new HashSet<>(CoprocessorClassLoader.getAllCached());
for (Map.Entry> regionCP : regionsActiveClassLoaders.entrySet()) {
- assertTrue("Some CP classloaders for region " + regionCP.getKey() + " are not cached."
- + " ClassLoader Cache:" + externalClassLoaders + " Region ClassLoaders:"
- + regionCP.getValue(), externalClassLoaders.containsAll(regionCP.getValue()));
+ assertTrue(externalClassLoaders.containsAll(regionCP.getValue()),
+ "Some CP classloaders for region " + regionCP.getKey() + " are not cached."
+ + " ClassLoader Cache:" + externalClassLoaders + " Region ClassLoaders:"
+ + regionCP.getValue());
}
}
@@ -256,7 +252,7 @@ public void testClassLoadingFromLocalFS() throws Exception {
found = (region.getCoprocessorHost().findCoprocessor(cpName3) != null);
}
}
- assertTrue("Class " + cpName3 + " was missing on a region", found);
+ assertTrue(found, "Class " + cpName3 + " was missing on a region");
}
@Test
@@ -281,12 +277,12 @@ public void testPrivateClassLoader() throws Exception {
Coprocessor cp = region.getCoprocessorHost().findCoprocessor(cpName4);
if (cp != null) {
found = true;
- assertEquals("Class " + cpName4 + " was not loaded by CoprocessorClassLoader",
- cp.getClass().getClassLoader().getClass(), CoprocessorClassLoader.class);
+ assertEquals(CoprocessorClassLoader.class, cp.getClass().getClassLoader().getClass(),
+ "Class " + cpName4 + " was not loaded by CoprocessorClassLoader");
}
}
}
- assertTrue("Class " + cpName4 + " was missing on a region", found);
+ assertTrue(found, "Class " + cpName4 + " was missing on a region");
}
@Test
@@ -361,16 +357,16 @@ public void testHBase3810() throws Exception {
}
}
- assertTrue("Class " + cpName1 + " was missing on a region", found_1);
- assertTrue("Class " + cpName2 + " was missing on a region", found_2);
- assertTrue("Class SimpleRegionObserver was missing on a region", found_3);
- assertTrue("Class " + cpName5 + " was missing on a region", found_5);
- assertTrue("Class " + cpName6 + " was missing on a region", found_6);
+ assertTrue(found_1, "Class " + cpName1 + " was missing on a region");
+ assertTrue(found_2, "Class " + cpName2 + " was missing on a region");
+ assertTrue(found_3, "Class SimpleRegionObserver was missing on a region");
+ assertTrue(found_5, "Class " + cpName5 + " was missing on a region");
+ assertTrue(found_6, "Class " + cpName6 + " was missing on a region");
- assertTrue("Configuration key 'k1' was missing on a region", found6_k1);
- assertTrue("Configuration key 'k2' was missing on a region", found6_k2);
- assertTrue("Configuration key 'k3' was missing on a region", found6_k3);
- assertFalse("Configuration key 'k4' wasn't configured", found6_k4);
+ assertTrue(found6_k1, "Configuration key 'k1' was missing on a region");
+ assertTrue(found6_k2, "Configuration key 'k2' was missing on a region");
+ assertTrue(found6_k3, "Configuration key 'k3' was missing on a region");
+ assertFalse(found6_k4, "Configuration key 'k4' wasn't configured");
}
@Test
@@ -396,7 +392,7 @@ void loadingClassFromLibDirInJar(String libPrefix) throws Exception {
fs.copyFromLocalFile(new Path(outerJarFile.getPath()),
new Path(fs.getUri().toString() + Path.SEPARATOR));
String jarFileOnHDFS = fs.getUri().toString() + Path.SEPARATOR + outerJarFile.getName();
- assertTrue("Copy jar file to HDFS failed.", fs.exists(new Path(jarFileOnHDFS)));
+ assertTrue(fs.exists(new Path(jarFileOnHDFS)), "Copy jar file to HDFS failed.");
LOG.info("Copied jar file to HDFS: " + jarFileOnHDFS);
// create a table that references the coprocessors
@@ -438,11 +434,11 @@ void loadingClassFromLibDirInJar(String libPrefix) throws Exception {
}
}
}
- assertTrue("Class " + cpName1 + " was missing on a region", found1);
- assertTrue("Class " + cpName2 + " was missing on a region", found2);
- assertTrue("Configuration key 'k1' was missing on a region", found2_k1);
- assertTrue("Configuration key 'k2' was missing on a region", found2_k2);
- assertTrue("Configuration key 'k3' was missing on a region", found2_k3);
+ assertTrue(found1, "Class " + cpName1 + " was missing on a region");
+ assertTrue(found2, "Class " + cpName2 + " was missing on a region");
+ assertTrue(found2_k1, "Configuration key 'k1' was missing on a region");
+ assertTrue(found2_k2, "Configuration key 'k2' was missing on a region");
+ assertTrue(found2_k3, "Configuration key 'k3' was missing on a region");
}
@Test
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
index 3c64c2099c23..0678c338d150 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpoint.java
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@@ -31,7 +31,6 @@
import java.util.Map;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
@@ -54,22 +53,19 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TestEndpoint: test cases to verify coprocessor Endpoint
*/
-@Category({ CoprocessorTests.class, MediumTests.class })
+@Tag(CoprocessorTests.TAG)
+@Tag(MediumTests.TAG)
public class TestCoprocessorEndpoint {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCoprocessorEndpoint.class);
private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorEndpoint.class);
@@ -85,7 +81,7 @@ public class TestCoprocessorEndpoint {
private static HBaseTestingUtility util = new HBaseTestingUtility();
- @BeforeClass
+ @BeforeAll
public static void setupBeforeClass() throws Exception {
// set configure to indicate which cp should be loaded
Configuration conf = util.getConfiguration();
@@ -111,7 +107,7 @@ public static void setupBeforeClass() throws Exception {
table.close();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
util.shutdownMiniCluster();
}
@@ -151,7 +147,7 @@ public void testAggregation() throws Throwable {
for (long i = 0; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
results.clear();
@@ -166,7 +162,7 @@ public void testAggregation() throws Throwable {
for (int i = rowSeperator1; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
table.close();
}
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpointTracing.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpointTracing.java
index 007efbf56515..8fff6ea5571b 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpointTracing.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorEndpointTracing.java
@@ -30,14 +30,15 @@
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasProperty;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.protobuf.Descriptors;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
import io.opentelemetry.api.trace.StatusCode;
+import io.opentelemetry.sdk.testing.junit5.OpenTelemetryExtension;
import io.opentelemetry.sdk.trace.data.SpanData;
import java.util.List;
import java.util.Map;
@@ -47,13 +48,12 @@
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.ConnectionRule;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.ConnectionExtension;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MatcherPredicate;
-import org.apache.hadoop.hbase.MiniClusterRule;
+import org.apache.hadoop.hbase.MiniClusterExtension;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Admin;
@@ -76,19 +76,16 @@
import org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos.TestProtobufRpcProto;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.apache.hadoop.hbase.trace.OpenTelemetryClassRule;
-import org.apache.hadoop.hbase.trace.OpenTelemetryTestRule;
import org.apache.hadoop.hbase.trace.TraceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.hamcrest.Matcher;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.ExternalResource;
-import org.junit.rules.RuleChain;
-import org.junit.rules.TestName;
-import org.junit.rules.TestRule;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Order;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -97,18 +94,27 @@
/**
* Test cases to verify tracing coprocessor Endpoint execution
*/
-@Category({ CoprocessorTests.class, MediumTests.class })
+@Tag(CoprocessorTests.TAG)
+@Tag(MediumTests.TAG)
public class TestCoprocessorEndpointTracing {
+
private static final Logger logger =
LoggerFactory.getLogger(TestCoprocessorEndpointTracing.class);
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCoprocessorEndpointTracing.class);
+ private static final TableName TEST_TABLE =
+ TableName.valueOf(TestCoprocessorEndpointTracing.class.getSimpleName());
+ private static final byte[] TEST_FAMILY = Bytes.toBytes("TestFamily");
+
+ private String testName;
+
+ @Order(1)
+ @RegisterExtension
+ private static final OpenTelemetryExtension otelExtension = OpenTelemetryExtension.create();
- private static final OpenTelemetryClassRule otelClassRule = OpenTelemetryClassRule.create();
- private static final MiniClusterRule miniclusterRule =
- MiniClusterRule.newBuilder().setConfiguration(() -> {
+ @Order(2)
+ @RegisterExtension
+ private static final MiniClusterExtension miniClusterExtension =
+ MiniClusterExtension.newBuilder().setConfiguration(() -> {
final Configuration conf = HBaseConfiguration.create();
conf.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 5000);
conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
@@ -117,39 +123,32 @@ public class TestCoprocessorEndpointTracing {
ProtobufCoprocessorService.class.getName());
return conf;
}).build();
- private static final ConnectionRule connectionRule = ConnectionRule.createConnectionRule(
- miniclusterRule::createConnection, miniclusterRule::createAsyncConnection);
-
- private static final class Setup extends ExternalResource {
- @Override
- protected void before() throws Throwable {
- final HBaseTestingUtility util = miniclusterRule.getTestingUtility();
- final AsyncConnection connection = connectionRule.getAsyncConnection();
- final AsyncAdmin admin = connection.getAdmin();
- final TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TEST_TABLE)
- .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)).build();
- admin.createTable(tableDescriptor).get();
- util.waitUntilAllRegionsAssigned(TEST_TABLE);
- }
- }
- @ClassRule
- public static final TestRule testRule = RuleChain.outerRule(otelClassRule).around(miniclusterRule)
- .around(connectionRule).around(new Setup());
+ @Order(3)
+ @RegisterExtension
+ private static final ConnectionExtension connectionExtension =
+ ConnectionExtension.createConnectionExtension(miniClusterExtension::createConnection,
+ miniClusterExtension::createAsyncConnection);
- private static final TableName TEST_TABLE =
- TableName.valueOf(TestCoprocessorEndpointTracing.class.getSimpleName());
- private static final byte[] TEST_FAMILY = Bytes.toBytes("TestFamily");
-
- @Rule
- public OpenTelemetryTestRule otelTestRule = new OpenTelemetryTestRule(otelClassRule);
+ @BeforeAll
+ public static void beforeAll() throws Exception {
+ final HBaseTestingUtility util = miniClusterExtension.getTestingUtility();
+ final AsyncConnection connection = connectionExtension.getAsyncConnection();
+ final AsyncAdmin admin = connection.getAdmin();
+ final TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TEST_TABLE)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of(TEST_FAMILY)).build();
+ admin.createTable(tableDescriptor).get();
+ util.waitUntilAllRegionsAssigned(TEST_TABLE);
+ }
- @Rule
- public TestName testName = new TestName();
+ @BeforeEach
+ public void setUp(TestInfo testInfo) throws Exception {
+ testName = testInfo.getTestMethod().get().getName();
+ }
@Test
public void traceAsyncTableEndpoint() {
- final AsyncConnection connection = connectionRule.getAsyncConnection();
+ final AsyncConnection connection = connectionExtension.getAsyncConnection();
final AsyncTable> table = connection.getTable(TEST_TABLE);
final EchoRequestProto request = EchoRequestProto.newBuilder().setMessage("hello").build();
final CompletableFuture> future = new CompletableFuture<>();
@@ -194,14 +193,14 @@ public void onError(Throwable error) {
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
- }, testName.getMethodName());
+ }, testName);
assertNotNull(results);
- assertTrue("coprocessor call returned no results.", MapUtils.isNotEmpty(results));
+ assertTrue(MapUtils.isNotEmpty(results), "coprocessor call returned no results.");
assertThat(results.values(), everyItem(allOf(notNullValue(), equalTo("hello"))));
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
final SpanData testSpan =
spans.stream().filter(parentMatcher::matches).findFirst().orElseThrow(AssertionError::new);
@@ -217,7 +216,7 @@ public void onError(Throwable error) {
@Test
public void traceSyncTableEndpointCall() throws Exception {
- final Connection connection = connectionRule.getConnection();
+ final Connection connection = connectionExtension.getConnection();
try (final Table table = connection.getTable(TEST_TABLE)) {
final RpcController controller = new ServerRpcController();
final EchoRequestProto request = EchoRequestProto.newBuilder().setMessage("hello").build();
@@ -232,16 +231,16 @@ public void traceSyncTableEndpointCall() throws Exception {
} catch (Throwable t) {
throw new RuntimeException(t);
}
- }, testName.getMethodName());
+ }, testName);
assertNotNull(results);
- assertTrue("coprocessor call returned no results.", MapUtils.isNotEmpty(results));
+ assertTrue(MapUtils.isNotEmpty(results), "coprocessor call returned no results.");
assertThat(results.values(),
everyItem(allOf(notNullValue(), hasProperty("message", equalTo("hello")))));
}
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
final SpanData testSpan =
spans.stream().filter(parentMatcher::matches).findFirst().orElseThrow(AssertionError::new);
@@ -257,7 +256,7 @@ public void traceSyncTableEndpointCall() throws Exception {
@Test
public void traceSyncTableEndpointCallAndCallback() throws Exception {
- final Connection connection = connectionRule.getConnection();
+ final Connection connection = connectionExtension.getConnection();
try (final Table table = connection.getTable(TEST_TABLE)) {
final RpcController controller = new ServerRpcController();
final EchoRequestProto request = EchoRequestProto.newBuilder().setMessage("hello").build();
@@ -273,16 +272,16 @@ public void traceSyncTableEndpointCallAndCallback() throws Exception {
} catch (Throwable t) {
throw new RuntimeException(t);
}
- }, testName.getMethodName());
+ }, testName);
assertNotNull(results);
- assertTrue("coprocessor call returned no results.", MapUtils.isNotEmpty(results));
+ assertTrue(MapUtils.isNotEmpty(results), "coprocessor call returned no results.");
assertThat(results.values(),
everyItem(allOf(notNullValue(), hasProperty("message", equalTo("hello")))));
}
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
final SpanData testSpan =
spans.stream().filter(parentMatcher::matches).findFirst().orElseThrow(AssertionError::new);
@@ -298,7 +297,7 @@ public void traceSyncTableEndpointCallAndCallback() throws Exception {
@Test
public void traceSyncTableRegionCoprocessorRpcChannel() throws Exception {
- final Connection connection = connectionRule.getConnection();
+ final Connection connection = connectionExtension.getConnection();
try (final Table table = connection.getTable(TEST_TABLE)) {
final EchoRequestProto request = EchoRequestProto.newBuilder().setMessage("hello").build();
final EchoResponseProto response = TraceUtil.trace(() -> {
@@ -310,14 +309,14 @@ public void traceSyncTableRegionCoprocessorRpcChannel() throws Exception {
} catch (Throwable t) {
throw new RuntimeException(t);
}
- }, testName.getMethodName());
+ }, testName);
assertNotNull(response);
assertEquals("hello", response.getMessage());
}
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
/*
* This interface is really low level: it returns a Channel and expects the caller to invoke it.
@@ -333,7 +332,7 @@ public void traceSyncTableRegionCoprocessorRpcChannel() throws Exception {
@Test
public void traceSyncTableBatchEndpoint() throws Exception {
- final Connection connection = connectionRule.getConnection();
+ final Connection connection = connectionExtension.getConnection();
try (final Table table = connection.getTable(TEST_TABLE)) {
final Descriptors.MethodDescriptor descriptor =
TestProtobufRpcProto.getDescriptor().findMethodByName("echo");
@@ -345,15 +344,15 @@ public void traceSyncTableBatchEndpoint() throws Exception {
} catch (Throwable t) {
throw new RuntimeException(t);
}
- }, testName.getMethodName());
+ }, testName);
assertNotNull(response);
assertThat(response.values(),
everyItem(allOf(notNullValue(), hasProperty("message", equalTo("hello")))));
}
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
final SpanData testSpan =
spans.stream().filter(parentMatcher::matches).findFirst().orElseThrow(AssertionError::new);
@@ -369,7 +368,7 @@ public void traceSyncTableBatchEndpoint() throws Exception {
@Test
public void traceSyncTableBatchEndpointCallback() throws Exception {
- final Connection connection = connectionRule.getConnection();
+ final Connection connection = connectionExtension.getConnection();
try (final Table table = connection.getTable(TEST_TABLE)) {
final Descriptors.MethodDescriptor descriptor =
TestProtobufRpcProto.getDescriptor().findMethodByName("echo");
@@ -382,16 +381,16 @@ public void traceSyncTableBatchEndpointCallback() throws Exception {
} catch (Throwable t) {
throw new RuntimeException(t);
}
- }, testName.getMethodName());
+ }, testName);
assertNotNull(results);
- assertTrue("coprocessor call returned no results.", MapUtils.isNotEmpty(results));
+ assertTrue(MapUtils.isNotEmpty(results), "coprocessor call returned no results.");
assertThat(results.values(),
everyItem(allOf(notNullValue(), hasProperty("message", equalTo("hello")))));
}
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
final SpanData testSpan =
spans.stream().filter(parentMatcher::matches).findFirst().orElseThrow(AssertionError::new);
@@ -407,7 +406,7 @@ public void traceSyncTableBatchEndpointCallback() throws Exception {
@Test
public void traceAsyncAdminEndpoint() throws Exception {
- final AsyncConnection connection = connectionRule.getAsyncConnection();
+ final AsyncConnection connection = connectionExtension.getAsyncConnection();
final AsyncAdmin admin = connection.getAdmin();
final EchoRequestProto request = EchoRequestProto.newBuilder().setMessage("hello").build();
final ServiceCaller callback =
@@ -415,13 +414,13 @@ public void traceAsyncAdminEndpoint() throws Exception {
final String response = TraceUtil
.tracedFuture(() -> admin.coprocessorService(TestProtobufRpcProto::newStub, callback),
- testName.getMethodName())
+ testName)
.get().getMessage();
assertEquals("hello", response);
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
final SpanData testSpan =
spans.stream().filter(parentMatcher::matches).findFirst().orElseThrow(AssertionError::new);
@@ -432,7 +431,7 @@ public void traceAsyncAdminEndpoint() throws Exception {
@Test
public void traceSyncAdminEndpoint() throws Exception {
- final Connection connection = connectionRule.getConnection();
+ final Connection connection = connectionExtension.getConnection();
try (final Admin admin = connection.getAdmin()) {
final TestProtobufRpcProto.BlockingInterface service =
TestProtobufRpcProto.newBlockingStub(admin.coprocessorService());
@@ -443,13 +442,13 @@ public void traceSyncAdminEndpoint() throws Exception {
} catch (ServiceException e) {
throw new RuntimeException(e);
}
- }, testName.getMethodName());
+ }, testName);
assertEquals("hello", response);
}
- final Matcher parentMatcher = allOf(hasName(testName.getMethodName()), hasEnded());
+ final Matcher parentMatcher = allOf(hasName(testName), hasEnded());
waitForAndLog(parentMatcher);
- final List spans = otelClassRule.getSpans();
+ final List spans = otelExtension.getSpans();
final SpanData testSpan =
spans.stream().filter(parentMatcher::matches).findFirst().orElseThrow(AssertionError::new);
@@ -459,10 +458,10 @@ public void traceSyncAdminEndpoint() throws Exception {
}
private void waitForAndLog(Matcher spanMatcher) {
- final Configuration conf = connectionRule.getAsyncConnection().getConfiguration();
+ final Configuration conf = connectionExtension.getAsyncConnection().getConfiguration();
Waiter.waitFor(conf, TimeUnit.SECONDS.toMillis(5),
- new MatcherPredicate<>(otelClassRule::getSpans, hasItem(spanMatcher)));
- final List spans = otelClassRule.getSpans();
+ new MatcherPredicate<>(otelExtension::getSpans, hasItem(spanMatcher)));
+ final List spans = otelExtension.getSpans();
if (logger.isDebugEnabled()) {
StringTraceRenderer renderer = new StringTraceRenderer(spans);
renderer.render(logger::debug);
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
index ceb5bcf3bcce..1be250ce2b72 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
@@ -17,13 +17,12 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import com.google.protobuf.ByteString;
import com.google.protobuf.ServiceException;
import java.io.IOException;
import java.util.Map;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -37,19 +36,16 @@
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
-
-@Category({ CoprocessorTests.class, MediumTests.class })
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+
+@Tag(CoprocessorTests.TAG)
+@Tag(MediumTests.TAG)
public class TestCoprocessorTableEndpoint {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCoprocessorTableEndpoint.class);
private static final byte[] TEST_FAMILY = Bytes.toBytes("TestFamily");
private static final byte[] TEST_QUALIFIER = Bytes.toBytes("TestQualifier");
@@ -61,22 +57,26 @@ public class TestCoprocessorTableEndpoint {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
- @Rule
- public TestName name = new TestName();
+ private String methodName;
- @BeforeClass
+ @BeforeAll
public static void setupBeforeClass() throws Exception {
TEST_UTIL.startMiniCluster(2);
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
+ @BeforeEach
+ public void setUpEach(TestInfo testInfo) {
+ this.methodName = testInfo.getTestMethod().get().getName();
+ }
+
@Test
public void testCoprocessorTableEndpoint() throws Throwable {
- final TableName tableName = TableName.valueOf(name.getMethodName());
+ final TableName tableName = TableName.valueOf(methodName);
HTableDescriptor desc = new HTableDescriptor(tableName);
desc.addFamily(new HColumnDescriptor(TEST_FAMILY));
@@ -88,7 +88,7 @@ public void testCoprocessorTableEndpoint() throws Throwable {
@Test
public void testDynamicCoprocessorTableEndpoint() throws Throwable {
- final TableName tableName = TableName.valueOf(name.getMethodName());
+ final TableName tableName = TableName.valueOf(methodName);
HTableDescriptor desc = new HTableDescriptor(tableName);
desc.addFamily(new HColumnDescriptor(TEST_FAMILY));
@@ -167,7 +167,7 @@ private static final void verifyTable(TableName tableName) throws Throwable {
for (long i = 0; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
// scan: for region 2 and region 3
results.clear();
@@ -180,7 +180,7 @@ private static final void verifyTable(TableName tableName) throws Throwable {
for (int i = rowSeperator1; i < ROWSIZE; i++) {
expectedResult += i;
}
- assertEquals("Invalid result", expectedResult, sumResult);
+ assertEquals(expectedResult, sumResult, "Invalid result");
} finally {
table.close();
}
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestImportExport.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestImportExport.java
index 51592df1559a..3fc90ad703d0 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestImportExport.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestImportExport.java
@@ -18,26 +18,27 @@
package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.mapreduce.TestImportExportBase;
import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category(LargeTests.class)
-public class TestImportExport extends org.apache.hadoop.hbase.mapreduce.TestImportExport {
+@Tag(LargeTests.TAG)
+public class TestImportExport extends TestImportExportBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestImportExport.class);
-
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Throwable {
UTIL.getConfiguration().setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
org.apache.hadoop.hbase.coprocessor.Export.class.getName());
- org.apache.hadoop.hbase.mapreduce.TestImportExport.beforeClass();
+ TestImportExportBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void afterClass() throws Throwable {
+ UTIL.shutdownMiniCluster();
}
@Override
@@ -52,7 +53,7 @@ protected void runExportMain(String[] args) throws Throwable {
}
@Test
- @Ignore
+ @Disabled
@Override
public void testImport94Table() throws Throwable {
// Skip the test which is unrelated to the coprocessor.Export.
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
index da2a597f00f4..9fbb9b253b7f 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorEndpoint.java
@@ -17,8 +17,9 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -26,7 +27,6 @@
import java.io.FileNotFoundException;
import java.util.Collections;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos;
@@ -39,25 +39,21 @@
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category({ CoprocessorTests.class, LargeTests.class })
+@Tag(CoprocessorTests.TAG)
+@Tag(LargeTests.TAG)
public class TestRegionServerCoprocessorEndpoint {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestRegionServerCoprocessorEndpoint.class);
-
public static final FileNotFoundException WHAT_TO_THROW = new FileNotFoundException("/file.txt");
private static HBaseTestingUtility TEST_UTIL = null;
private static Configuration CONF = null;
private static final String DUMMY_VALUE = "val";
- @BeforeClass
+ @BeforeAll
public static void setupBeforeClass() throws Exception {
TEST_UTIL = new HBaseTestingUtility();
CONF = TEST_UTIL.getConfiguration();
@@ -66,7 +62,7 @@ public static void setupBeforeClass() throws Exception {
TEST_UTIL.startMiniCluster();
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
@@ -101,7 +97,7 @@ public void testEndpointExceptions() throws Exception {
TEST_UTIL.getAdmin().coprocessorService(serverName));
service.dummyThrow(controller,
DummyRegionServerEndpointProtos.DummyRequest.getDefaultInstance(), rpcCallback);
- assertEquals(null, rpcCallback.get());
+ assertNull(rpcCallback.get());
assertTrue(controller.failedOnException());
assertEquals(WHAT_TO_THROW.getClass().getName().trim(),
((RemoteWithExtrasException) controller.getFailedOn().getCause()).getClassName().trim());
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index 7746c6be51d7..bad387a36f31 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.coprocessor;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import com.google.protobuf.ServiceException;
import java.io.File;
@@ -33,7 +33,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@@ -69,25 +68,20 @@
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ToolRunner;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.rules.TestName;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.Iterables;
-@Category(LargeTests.class)
+@Tag(LargeTests.TAG)
public class TestSecureExport {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestSecureExport.class);
private static final Logger LOG = LoggerFactory.getLogger(TestSecureExport.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
@@ -122,8 +116,7 @@ public class TestSecureExport {
private static final String CONFIDENTIAL = "confidential";
private static final String SECRET = "secret";
private static final String TOPSECRET = "topsecret";
- @Rule
- public final TestName name = new TestName();
+ private String methodName;
private static void setUpKdcServer() throws Exception {
KDC = UTIL.setupMiniKdc(KEYTAB_FILE);
@@ -170,12 +163,13 @@ private static void addLabels(final Configuration conf, final List users
getUserByLogin(USER_ADMIN).runAs(action);
}
- @Before
- public void announce() {
- LOG.info("Running " + name.getMethodName());
+ @BeforeEach
+ public void announce(TestInfo testInfo) {
+ methodName = testInfo.getTestMethod().get().getName();
+ LOG.info("Running " + methodName);
}
- @After
+ @AfterEach
public void cleanup() throws IOException {
}
@@ -189,7 +183,7 @@ private static void clearOutput(Path path) throws IOException {
/**
* Sets the security firstly for getting the correct default realm.
*/
- @BeforeClass
+ @BeforeAll
public static void beforeClass() throws Exception {
UserProvider.setUserProviderForTesting(UTIL.getConfiguration(),
HadoopSecurityEnabledUserProviderForTesting.class);
@@ -210,7 +204,7 @@ public static void beforeClass() throws Exception {
Arrays.asList(PRIVATE, CONFIDENTIAL, SECRET, TOPSECRET));
}
- @AfterClass
+ @AfterAll
public static void afterClass() throws Exception {
if (KDC != null) {
KDC.stop();
@@ -224,11 +218,10 @@ public static void afterClass() throws Exception {
*/
@Test
public void testAccessCase() throws Throwable {
- final String exportTable = name.getMethodName();
- TableDescriptor exportHtd =
- TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()))
- .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)).setOwnerString(USER_OWNER)
- .build();
+ final String exportTable = methodName;
+ TableDescriptor exportHtd = TableDescriptorBuilder.newBuilder(TableName.valueOf(exportTable))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)).setOwnerString(USER_OWNER)
+ .build();
SecureTestUtil.createTable(UTIL, exportHtd, new byte[][] { Bytes.toBytes("s") });
SecureTestUtil.grantOnTable(UTIL, USER_RO, TableName.valueOf(exportTable), null, null,
Permission.Action.READ);
@@ -283,11 +276,11 @@ public void testAccessCase() throws Throwable {
// owned by the current user, not the hbase service user.
FileStatus outputDirFileStatus = fs.getFileStatus(new Path(openDir, "output"));
String currentUserName = User.getCurrent().getShortName();
- assertEquals("Unexpected file owner", currentUserName, outputDirFileStatus.getOwner());
+ assertEquals(currentUserName, outputDirFileStatus.getOwner(), "Unexpected file owner");
FileStatus[] outputFileStatus = fs.listStatus(new Path(openDir, "output"));
for (FileStatus fileStatus : outputFileStatus) {
- assertEquals("Unexpected file owner", currentUserName, fileStatus.getOwner());
+ assertEquals(currentUserName, fileStatus.getOwner(), "Unexpected file owner");
}
} else {
LOG.info("output directory doesn't exist. Skip check");
@@ -311,8 +304,8 @@ public void testAccessCase() throws Throwable {
@Test
@org.junit.Ignore // See HBASE-23990
public void testVisibilityLabels() throws IOException, Throwable {
- final String exportTable = name.getMethodName() + "_export";
- final String importTable = name.getMethodName() + "_import";
+ final String exportTable = methodName + "_export";
+ final String importTable = methodName + "_import";
final TableDescriptor exportHtd =
TableDescriptorBuilder.newBuilder(TableName.valueOf(exportTable))
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA)).setOwnerString(USER_OWNER)
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java
index 0d75f6e625a4..29888b2deab1 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/ipc/TestCoprocessorRpcUtils.java
@@ -17,24 +17,18 @@
*/
package org.apache.hadoop.hbase.ipc;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import com.google.protobuf.Descriptors;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.coprocessor.protobuf.generated.DummyRegionServerEndpointProtos;
import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
-@Category(SmallTests.class)
+@Tag(SmallTests.TAG)
public class TestCoprocessorRpcUtils {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestCoprocessorRpcUtils.class);
-
@Test
public void testServiceName() throws Exception {
// verify that we de-namespace build in HBase rpc services
diff --git a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index 6cfeaf89a5fa..9cab130ed266 100644
--- a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -17,9 +17,9 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
@@ -29,7 +29,6 @@
import java.util.Collections;
import java.util.Map;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
@@ -56,21 +55,18 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-@Category({ RegionServerTests.class, MediumTests.class })
+@Tag(RegionServerTests.TAG)
+@Tag(MediumTests.TAG)
public class TestServerCustomProtocol {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestServerCustomProtocol.class);
private static final Logger LOG = LoggerFactory.getLogger(TestServerCustomProtocol.class);
static final String WHOAREYOU = "Who are you?";
@@ -150,14 +146,14 @@ public Iterable getServices() {
private static HBaseTestingUtility util = new HBaseTestingUtility();
- @BeforeClass
+ @BeforeAll
public static void setupBeforeClass() throws Exception {
util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
PingHandler.class.getName());
util.startMiniCluster();
}
- @Before
+ @BeforeEach
public void before() throws Exception {
final byte[][] SPLIT_KEYS = new byte[][] { ROW_B, ROW_C };
Table table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
@@ -175,12 +171,12 @@ public void before() throws Exception {
table.put(putc);
}
- @After
+ @AfterEach
public void after() throws Exception {
util.deleteTable(TEST_TABLE);
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
util.shutdownMiniCluster();
}
@@ -192,7 +188,7 @@ public void testSingleProxy() throws Throwable {
// There are three regions so should get back three results.
assertEquals(3, results.size());
for (Map.Entry e : results.entrySet()) {
- assertEquals("Invalid custom protocol response", "pong", e.getValue());
+ assertEquals("pong", e.getValue(), "Invalid custom protocol response");
}
hello(table, "George", HELLO + "George");
LOG.info("Did george");
@@ -239,7 +235,7 @@ private Map hello(final Table table, final String send, final St
throws ServiceException, Throwable {
Map results = hello(table, send);
for (Map.Entry e : results.entrySet()) {
- assertEquals("Invalid custom protocol response", response, e.getValue());
+ assertEquals(response, e.getValue(), "Invalid custom protocol response");
}
return results;
}
@@ -355,8 +351,8 @@ public void testRowRange() throws Throwable {
assertEquals(2, results.size());
// should contain last 2 regions
HRegionLocation loc = locator.getRegionLocation(ROW_A, true);
- assertNull("Should be missing region for row aaa (prior to start row)",
- results.get(loc.getRegionInfo().getRegionName()));
+ assertNull(results.get(loc.getRegion().getRegionName()),
+ "Should be missing region for row aaa (prior to start row)");
verifyRegionResults(locator, results, ROW_B);
verifyRegionResults(locator, results, ROW_C);
@@ -367,8 +363,8 @@ public void testRowRange() throws Throwable {
verifyRegionResults(locator, results, ROW_A);
verifyRegionResults(locator, results, ROW_B);
loc = locator.getRegionLocation(ROW_C, true);
- assertNull("Should be missing region for row ccc (past stop row)",
- results.get(loc.getRegionInfo().getRegionName()));
+ assertNull(results.get(loc.getRegion().getRegionName()),
+ "Should be missing region for row ccc (past stop row)");
// test explicit start + end
results = ping(table, ROW_AB, ROW_BC);
@@ -377,8 +373,8 @@ public void testRowRange() throws Throwable {
verifyRegionResults(locator, results, ROW_A);
verifyRegionResults(locator, results, ROW_B);
loc = locator.getRegionLocation(ROW_C, true);
- assertNull("Should be missing region for row ccc (past stop row)",
- results.get(loc.getRegionInfo().getRegionName()));
+ assertNull(results.get(loc.getRegion().getRegionName()),
+ "Should be missing region for row ccc (past stop row)");
// test single region
results = ping(table, ROW_B, ROW_BC);
@@ -386,11 +382,11 @@ public void testRowRange() throws Throwable {
assertEquals(1, results.size());
verifyRegionResults(locator, results, ROW_B);
loc = locator.getRegionLocation(ROW_A, true);
- assertNull("Should be missing region for row aaa (prior to start)",
- results.get(loc.getRegionInfo().getRegionName()));
+ assertNull(results.get(loc.getRegion().getRegionName()),
+ "Should be missing region for row aaa (prior to start)");
loc = locator.getRegionLocation(ROW_C, true);
- assertNull("Should be missing region for row ccc (past stop row)",
- results.get(loc.getRegionInfo().getRegionName()));
+ assertNull(results.get(loc.getRegion().getRegionName()),
+ "Should be missing region for row ccc (past stop row)");
}
}
@@ -449,7 +445,7 @@ public void testNullReturn() throws Throwable {
public void testEmptyReturnType() throws Throwable {
try (Table table = util.getConnection().getTable(TEST_TABLE)) {
Map results = noop(table, ROW_A, ROW_C);
- assertEquals("Should have results from three regions", 3, results.size());
+ assertEquals(3, results.size(), "Should have results from three regions");
// all results should be null
for (Object v : results.values()) {
assertNull(v);
@@ -470,9 +466,9 @@ private void verifyRegionResults(RegionLocator regionLocator, Map"));
- assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output"));
- assertTrue(data.toString().contains("-Dmapreduce.reduce.speculative=false"));
- } finally {
- System.setErr(oldPrintStream);
- System.setSecurityManager(SECURITY_MANAGER);
- }
- }
-
- @Test
- public void testExportScan() throws Exception {
- int version = 100;
- long startTime = EnvironmentEdgeManager.currentTime();
- long endTime = startTime + 1;
- String prefix = "row";
- String label_0 = "label_0";
- String label_1 = "label_1";
- String[] args = { "table", "outputDir", String.valueOf(version), String.valueOf(startTime),
- String.valueOf(endTime), prefix };
- Scan scan = ExportUtils.getScanFromCommandLine(UTIL.getConfiguration(), args);
- assertEquals(version, scan.getMaxVersions());
- assertEquals(startTime, scan.getTimeRange().getMin());
- assertEquals(endTime, scan.getTimeRange().getMax());
- assertEquals(true, (scan.getFilter() instanceof PrefixFilter));
- assertEquals(0,
- Bytes.compareTo(((PrefixFilter) scan.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
- String[] argsWithLabels =
- { "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + label_0 + "," + label_1, "table",
- "outputDir", String.valueOf(version), String.valueOf(startTime), String.valueOf(endTime),
- prefix };
- Configuration conf = new Configuration(UTIL.getConfiguration());
- // parse the "-D" options
- String[] otherArgs = new GenericOptionsParser(conf, argsWithLabels).getRemainingArgs();
- Scan scanWithLabels = ExportUtils.getScanFromCommandLine(conf, otherArgs);
- assertEquals(version, scanWithLabels.getMaxVersions());
- assertEquals(startTime, scanWithLabels.getTimeRange().getMin());
- assertEquals(endTime, scanWithLabels.getTimeRange().getMax());
- assertEquals(true, (scanWithLabels.getFilter() instanceof PrefixFilter));
- assertEquals(0, Bytes.compareTo(((PrefixFilter) scanWithLabels.getFilter()).getPrefix(),
- Bytes.toBytesBinary(prefix)));
- assertEquals(2, scanWithLabels.getAuthorizations().getLabels().size());
- assertEquals(label_0, scanWithLabels.getAuthorizations().getLabels().get(0));
- assertEquals(label_1, scanWithLabels.getAuthorizations().getLabels().get(1));
- }
-
- /**
- * test main method. Export should print help and call System.exit
- */
- @Test
- public void testExportMain() throws Throwable {
- PrintStream oldPrintStream = System.err;
- SecurityManager SECURITY_MANAGER = System.getSecurityManager();
- LauncherSecurityManager newSecurityManager = new LauncherSecurityManager();
- System.setSecurityManager(newSecurityManager);
- ByteArrayOutputStream data = new ByteArrayOutputStream();
- String[] args = {};
- System.setErr(new PrintStream(data));
- try {
- System.setErr(new PrintStream(data));
- runExportMain(args);
- fail("should be SecurityException");
- } catch (SecurityException e) {
- assertEquals(-1, newSecurityManager.getExitCode());
- String errMsg = data.toString();
- assertTrue(errMsg.contains("Wrong number of arguments:"));
- assertTrue(
- errMsg.contains("Usage: Export [-D ]* [ "
- + "[ []] [^[regex pattern] or [Prefix] to filter]]"));
- assertTrue(errMsg.contains("-D hbase.mapreduce.scan.column.family=,, ..."));
- assertTrue(errMsg.contains("-D hbase.mapreduce.include.deleted.rows=true"));
- assertTrue(errMsg.contains("-D hbase.client.scanner.caching=100"));
- assertTrue(errMsg.contains("-D hbase.export.scanner.batch=10"));
- assertTrue(errMsg.contains("-D hbase.export.scanner.caching=100"));
- } finally {
- System.setErr(oldPrintStream);
- System.setSecurityManager(SECURITY_MANAGER);
- }
- }
-
- /**
- * Test map method of Importer
- */
- @SuppressWarnings({ "unchecked", "rawtypes" })
- @Test
- public void testKeyValueImporter() throws Throwable {
- KeyValueImporter importer = new KeyValueImporter();
- Configuration configuration = new Configuration();
- Context ctx = mock(Context.class);
- when(ctx.getConfiguration()).thenReturn(configuration);
-
- doAnswer(new Answer() {
-
- @Override
- public Void answer(InvocationOnMock invocation) throws Throwable {
- ImmutableBytesWritable writer = invocation.getArgument(0);
- KeyValue key = invocation.getArgument(1);
- assertEquals("Key", Bytes.toString(writer.get()));
- assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
- return null;
- }
- }).when(ctx).write(any(), any());
-
- importer.setup(ctx);
- Result value = mock(Result.class);
- KeyValue[] keys = {
- new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
- Bytes.toBytes("value")),
- new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
- Bytes.toBytes("value1")) };
- when(value.rawCells()).thenReturn(keys);
- importer.map(new ImmutableBytesWritable(Bytes.toBytes("Key")), value, ctx);
-
- }
-
- /**
- * Test addFilterAndArguments method of Import This method set couple parameters into
- * Configuration
- */
- @Test
- public void testAddFilterAndArguments() throws IOException {
- Configuration configuration = new Configuration();
-
- List args = new ArrayList<>();
- args.add("param1");
- args.add("param2");
-
- Import.addFilterAndArguments(configuration, FilterBase.class, args);
- assertEquals("org.apache.hadoop.hbase.filter.FilterBase",
- configuration.get(Import.FILTER_CLASS_CONF_KEY));
- assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY));
- }
-
- @Test
- public void testDurability() throws Throwable {
- // Create an export table.
- String exportTableName = name.getMethodName() + "export";
- try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);) {
-
- // Insert some data
- Put put = new Put(ROW1);
- put.addColumn(FAMILYA, QUAL, now, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
- exportTable.put(put);
-
- put = new Put(ROW2);
- put.addColumn(FAMILYA, QUAL, now, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
- put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
- exportTable.put(put);
-
- // Run the export
- String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000" };
- assertTrue(runExport(args));
-
- // Create the table for import
- String importTableName = name.getMethodName() + "import1";
- Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
-
- // Register the wal listener for the import table
- RegionInfo region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
- .getRegions(importTable.getName()).get(0).getRegionInfo();
- TableWALActionListener walListener = new TableWALActionListener(region);
- WAL wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
- wal.registerWALActionsListener(walListener);
-
- // Run the import with SKIP_WAL
- args = new String[] { "-D" + Import.WAL_DURABILITY + "=" + Durability.SKIP_WAL.name(),
- importTableName, FQ_OUTPUT_DIR };
- assertTrue(runImport(args));
- // Assert that the wal is not visisted
- assertTrue(!walListener.isWALVisited());
- // Ensure that the count is 2 (only one version of key value is obtained)
- assertTrue(getCount(importTable, null) == 2);
-
- // Run the import with the default durability option
- importTableName = name.getMethodName() + "import2";
- importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
- region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
- .getRegions(importTable.getName()).get(0).getRegionInfo();
- wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
- walListener = new TableWALActionListener(region);
- wal.registerWALActionsListener(walListener);
- args = new String[] { importTableName, FQ_OUTPUT_DIR };
- assertTrue(runImport(args));
- // Assert that the wal is visisted
- assertTrue(walListener.isWALVisited());
- // Ensure that the count is 2 (only one version of key value is obtained)
- assertTrue(getCount(importTable, null) == 2);
- }
- }
-
- /**
- * This listens to the {@link #visitLogEntryBeforeWrite(RegionInfo, WALKey, WALEdit)} to identify
- * that an entry is written to the Write Ahead Log for the given table.
- */
- private static class TableWALActionListener implements WALActionsListener {
-
- private RegionInfo regionInfo;
- private boolean isVisited = false;
-
- public TableWALActionListener(RegionInfo region) {
- this.regionInfo = region;
- }
-
- @Override
- public void visitLogEntryBeforeWrite(RegionInfo info, WALKey logKey, WALEdit logEdit) {
- if (
- logKey.getTableName().getNameAsString()
- .equalsIgnoreCase(this.regionInfo.getTable().getNameAsString()) && (!logEdit.isMetaEdit())
- ) {
- isVisited = true;
- }
- }
-
- public boolean isWALVisited() {
- return isVisited;
- }
- }
-
- /**
- * Add cell tags to delete mutations, run export and import tool and verify that tags are present
- * in import table also.
- * @throws Throwable throws Throwable.
- */
- @Test
- public void testTagsAddition() throws Throwable {
- final TableName exportTable = TableName.valueOf(name.getMethodName());
- TableDescriptor desc = TableDescriptorBuilder.newBuilder(exportTable)
- .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
- .setKeepDeletedCells(KeepDeletedCells.TRUE).build())
- .setCoprocessor(MetadataController.class.getName()).build();
- UTIL.getAdmin().createTable(desc);
-
- Table exportT = UTIL.getConnection().getTable(exportTable);
-
- // Add first version of QUAL
- Put p = new Put(ROW1);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
- exportT.put(p);
-
- // Add Delete family marker
- Delete d = new Delete(ROW1, now + 3);
- // Add test attribute to delete mutation.
- d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
- exportT.delete(d);
-
- // Run export tool with KeyValueCodecWithTags as Codec. This will ensure that export tool
- // will use KeyValueCodecWithTags.
- String[] args = new String[] { "-D" + ExportUtils.RAW_SCAN + "=true",
- // This will make sure that codec will encode and decode tags in rpc call.
- "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
- exportTable.getNameAsString(), FQ_OUTPUT_DIR, "1000", // max number of key versions per key to
- // export
- };
- assertTrue(runExport(args));
- // Assert tag exists in exportTable
- checkWhetherTagExists(exportTable, true);
-
- // Create an import table with MetadataController.
- final TableName importTable = TableName.valueOf("importWithTestTagsAddition");
- TableDescriptor importTableDesc = TableDescriptorBuilder.newBuilder(importTable)
- .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
- .setKeepDeletedCells(KeepDeletedCells.TRUE).build())
- .setCoprocessor(MetadataController.class.getName()).build();
- UTIL.getAdmin().createTable(importTableDesc);
-
- // Run import tool.
- args = new String[] {
- // This will make sure that codec will encode and decode tags in rpc call.
- "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
- importTable.getNameAsString(), FQ_OUTPUT_DIR };
- assertTrue(runImport(args));
- // Make sure that tags exists in imported table.
- checkWhetherTagExists(importTable, true);
- }
-
- private void checkWhetherTagExists(TableName table, boolean tagExists) throws IOException {
- List values = new ArrayList<>();
- for (HRegion region : UTIL.getHBaseCluster().getRegions(table)) {
- Scan scan = new Scan();
- // Make sure to set rawScan to true so that we will get Delete Markers.
- scan.setRaw(true);
- scan.readAllVersions();
- scan.withStartRow(ROW1);
- // Need to use RegionScanner instead of table#getScanner since the latter will
- // not return tags since it will go through rpc layer and remove tags intentionally.
- RegionScanner scanner = region.getScanner(scan);
- scanner.next(values);
- if (!values.isEmpty()) {
- break;
- }
- }
- boolean deleteFound = false;
- for (Cell cell : values) {
- if (PrivateCellUtil.isDelete(cell.getType().getCode())) {
- deleteFound = true;
- List tags = PrivateCellUtil.getTags(cell);
- // If tagExists flag is true then validate whether tag contents are as expected.
- if (tagExists) {
- Assert.assertEquals(1, tags.size());
- for (Tag tag : tags) {
- Assert.assertEquals(TEST_TAG, Tag.getValueAsString(tag));
- }
- } else {
- // If tagExists flag is disabled then check for 0 size tags.
- assertEquals(0, tags.size());
- }
- }
- }
- Assert.assertTrue(deleteFound);
- }
-
- /*
- * This co-proc will add a cell tag to delete mutation.
- */
- public static class MetadataController implements RegionCoprocessor, RegionObserver {
- @Override
- public Optional getRegionObserver() {
- return Optional.of(this);
- }
-
- @Override
- public void preBatchMutate(ObserverContext c,
- MiniBatchOperationInProgress miniBatchOp) throws IOException {
- if (c.getEnvironment().getRegion().getRegionInfo().getTable().isSystemTable()) {
- return;
- }
- for (int i = 0; i < miniBatchOp.size(); i++) {
- Mutation m = miniBatchOp.getOperation(i);
- if (!(m instanceof Delete)) {
- continue;
- }
- byte[] sourceOpAttr = m.getAttribute(TEST_ATTR);
- if (sourceOpAttr == null) {
- continue;
- }
- Tag sourceOpTag = new ArrayBackedTag(TEST_TAG_TYPE, sourceOpAttr);
- List updatedCells = new ArrayList<>();
- for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
- Cell cell = cellScanner.current();
- List tags = PrivateCellUtil.getTags(cell);
- tags.add(sourceOpTag);
- Cell updatedCell = PrivateCellUtil.createCell(cell, tags);
- updatedCells.add(updatedCell);
- }
- m.getFamilyCellMap().clear();
- // Clear and add new Cells to the Mutation.
- for (Cell cell : updatedCells) {
- Delete d = (Delete) m;
- d.add(cell);
- }
- }
- }
- }
-
- /**
- * Set hbase.client.rpc.codec and hbase.client.default.rpc.codec both to empty string This means
- * it will use no Codec. Make sure that we don't return Tags in response.
- * @throws Exception Exception
- */
- @Test
- public void testTagsWithEmptyCodec() throws Exception {
- TableName tableName = TableName.valueOf(name.getMethodName());
- TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName)
- .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
- .setKeepDeletedCells(KeepDeletedCells.TRUE).build())
- .setCoprocessor(MetadataController.class.getName()).build();
- UTIL.getAdmin().createTable(tableDesc);
- Configuration conf = new Configuration(UTIL.getConfiguration());
- conf.set(RPC_CODEC_CONF_KEY, "");
- conf.set(DEFAULT_CODEC_CLASS, "");
- try (Connection connection = ConnectionFactory.createConnection(conf);
- Table table = connection.getTable(tableName)) {
- // Add first version of QUAL
- Put p = new Put(ROW1);
- p.addColumn(FAMILYA, QUAL, now, QUAL);
- table.put(p);
-
- // Add Delete family marker
- Delete d = new Delete(ROW1, now + 3);
- // Add test attribute to delete mutation.
- d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
- table.delete(d);
-
- // Since RPC_CODEC_CONF_KEY and DEFAULT_CODEC_CLASS is set to empty, it will use
- // empty Codec and it shouldn't encode/decode tags.
- Scan scan = new Scan().withStartRow(ROW1).setRaw(true);
- ResultScanner scanner = table.getScanner(scan);
- int count = 0;
- Result result;
- while ((result = scanner.next()) != null) {
- List cells = result.listCells();
- assertEquals(2, cells.size());
- Cell cell = cells.get(0);
- assertTrue(CellUtil.isDelete(cell));
- List tags = PrivateCellUtil.getTags(cell);
- assertEquals(0, tags.size());
- count++;
- }
- assertEquals(1, count);
- } finally {
- UTIL.deleteTable(tableName);
- }
- }
}
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExportBase.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExportBase.java
new file mode 100644
index 000000000000..a91c2e36dcb0
--- /dev/null
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExportBase.java
@@ -0,0 +1,980 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.mapreduce;
+
+import static org.apache.hadoop.hbase.HConstants.RPC_CODEC_CONF_KEY;
+import static org.apache.hadoop.hbase.ipc.RpcClient.DEFAULT_CODEC_CLASS;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.ArrayBackedTag;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellScanner;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.ExtendedCell;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.KeepDeletedCells;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.PrivateCellUtil;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.Tag;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.apache.hadoop.hbase.client.Delete;
+import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.RegionInfo;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.ResultScanner;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.client.TableDescriptor;
+import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionObserver;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterBase;
+import org.apache.hadoop.hbase.filter.PrefixFilter;
+import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+import org.apache.hadoop.hbase.mapreduce.Import.KeyValueImporter;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.regionserver.RegionScanner;
+import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+import org.apache.hadoop.hbase.util.LauncherSecurityManager;
+import org.apache.hadoop.hbase.wal.WAL;
+import org.apache.hadoop.hbase.wal.WALEdit;
+import org.apache.hadoop.hbase.wal.WALKey;
+import org.apache.hadoop.mapreduce.Mapper.Context;
+import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.util.ToolRunner;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInfo;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.stubbing.Answer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Base class for testing Import/Export. Shared logic without @BeforeAll/@AfterAll to allow
+ * subclasses to manage their own lifecycle.
+ */
+public class TestImportExportBase {
+
+ private static final Logger LOG = LoggerFactory.getLogger(TestImportExportBase.class);
+ protected static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
+ private static final byte[] ROW1 = Bytes.toBytesBinary("\\x32row1");
+ private static final byte[] ROW2 = Bytes.toBytesBinary("\\x32row2");
+ private static final byte[] ROW3 = Bytes.toBytesBinary("\\x32row3");
+ private static final String FAMILYA_STRING = "a";
+ private static final String FAMILYB_STRING = "b";
+ private static final byte[] FAMILYA = Bytes.toBytes(FAMILYA_STRING);
+ private static final byte[] FAMILYB = Bytes.toBytes(FAMILYB_STRING);
+ private static final byte[] QUAL = Bytes.toBytes("q");
+ private static final String OUTPUT_DIR = "outputdir";
+ private static String FQ_OUTPUT_DIR;
+ private static final String EXPORT_BATCH_SIZE = "100";
+
+ private static final long now = EnvironmentEdgeManager.currentTime();
+ private final TableName EXPORT_TABLE = TableName.valueOf("export_table");
+ private final TableName IMPORT_TABLE = TableName.valueOf("import_table");
+ public static final byte TEST_TAG_TYPE = (byte) (Tag.CUSTOM_TAG_TYPE_RANGE + 1);
+ public static final String TEST_ATTR = "source_op";
+ public static final String TEST_TAG = "test_tag";
+
+ protected String name;
+
+ @BeforeEach
+ public void announce(TestInfo testInfo) {
+ name = testInfo.getTestMethod().get().getName();
+ LOG.info("Running {}", name);
+ }
+
+ @AfterEach
+ public void cleanup() throws Throwable {
+ FileSystem fs = FileSystem.get(UTIL.getConfiguration());
+ fs.delete(new Path(OUTPUT_DIR), true);
+ if (UTIL.getAdmin().tableExists(EXPORT_TABLE)) {
+ UTIL.deleteTable(EXPORT_TABLE);
+ }
+ if (UTIL.getAdmin().tableExists(IMPORT_TABLE)) {
+ UTIL.deleteTable(IMPORT_TABLE);
+ }
+ }
+
+ protected static void setUpBeforeClass() throws Exception {
+ // Up the handlers; this test needs more than usual.
+ UTIL.getConfiguration().setInt(HConstants.REGION_SERVER_HIGH_PRIORITY_HANDLER_COUNT, 10);
+ UTIL.startMiniCluster();
+ FQ_OUTPUT_DIR =
+ new Path(OUTPUT_DIR).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString();
+ }
+
+ /**
+ * Runs an export job with the specified command line args
+ * @return true if job completed successfully
+ */
+ protected boolean runExport(String[] args) throws Throwable {
+ // need to make a copy of the configuration because to make sure different temp dirs are used.
+ int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Export(), args);
+ return status == 0;
+ }
+
+ protected void runExportMain(String[] args) throws Throwable {
+ Export.main(args);
+ }
+
+ /**
+ * Runs an import job with the specified command line args
+ * @return true if job completed successfully
+ */
+ boolean runImport(String[] args) throws Throwable {
+ // need to make a copy of the configuration because to make sure different temp dirs are used.
+ int status = ToolRunner.run(new Configuration(UTIL.getConfiguration()), new Import(), args);
+ return status == 0;
+ }
+
+ /**
+ * Test simple replication case with column mapping
+ */
+ @Test
+ public void testSimpleCase() throws Throwable {
+ try (Table t = UTIL.createTable(TableName.valueOf(name), FAMILYA, 3);) {
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ t.put(p);
+ p = new Put(ROW2);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ t.put(p);
+ p = new Put(ROW3);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ t.put(p);
+ }
+
+ String[] args = new String[] {
+ // Only export row1 & row2.
+ "-D" + TableInputFormat.SCAN_ROW_START + "=\\x32row1",
+ "-D" + TableInputFormat.SCAN_ROW_STOP + "=\\x32row3", name, FQ_OUTPUT_DIR, "1000", // max
+ // number
+ // of key
+ // versions
+ // per key
+ // to
+ // export
+ };
+ assertTrue(runExport(args));
+
+ final String IMPORT_TABLE = name + "import";
+ try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), FAMILYB, 3);) {
+ args =
+ new String[] { "-D" + Import.CF_RENAME_PROP + "=" + FAMILYA_STRING + ":" + FAMILYB_STRING,
+ IMPORT_TABLE, FQ_OUTPUT_DIR };
+ assertTrue(runImport(args));
+
+ Get g = new Get(ROW1);
+ g.setMaxVersions();
+ Result r = t.get(g);
+ assertEquals(3, r.size());
+ g = new Get(ROW2);
+ g.setMaxVersions();
+ r = t.get(g);
+ assertEquals(3, r.size());
+ g = new Get(ROW3);
+ r = t.get(g);
+ assertEquals(0, r.size());
+ }
+ }
+
+ /**
+ * Test export hbase:meta table
+ */
+ @Test
+ public void testMetaExport() throws Throwable {
+ String[] args =
+ new String[] { TableName.META_TABLE_NAME.getNameAsString(), FQ_OUTPUT_DIR, "1", "0", "0" };
+ assertTrue(runExport(args));
+ }
+
+ /**
+ * Test import data from 0.94 exported file
+ */
+ @Test
+ public void testImport94Table() throws Throwable {
+ final String name = "exportedTableIn94Format";
+ URL url = TestImportExport.class.getResource(name);
+ File f = new File(url.toURI());
+ if (!f.exists()) {
+ LOG.warn("FAILED TO FIND " + f + "; skipping out on test");
+ return;
+ }
+ assertTrue(f.exists());
+ LOG.info("FILE=" + f);
+ Path importPath = new Path(f.toURI());
+ FileSystem fs = FileSystem.get(UTIL.getConfiguration());
+ fs.copyFromLocalFile(importPath, new Path(FQ_OUTPUT_DIR + Path.SEPARATOR + name));
+ String IMPORT_TABLE = name;
+ try (Table t = UTIL.createTable(TableName.valueOf(IMPORT_TABLE), Bytes.toBytes("f1"), 3);) {
+ String[] args = new String[] { "-Dhbase.import.version=0.94", IMPORT_TABLE, FQ_OUTPUT_DIR };
+ assertTrue(runImport(args));
+ // @formatter:off
+ // exportedTableIn94Format contains 5 rows
+ // ROW COLUMN+CELL
+ // r1 column=f1:c1, timestamp=1383766761171, value=val1
+ // r2 column=f1:c1, timestamp=1383766771642, value=val2
+ // r3 column=f1:c1, timestamp=1383766777615, value=val3
+ // r4 column=f1:c1, timestamp=1383766785146, value=val4
+ // r5 column=f1:c1, timestamp=1383766791506, value=val5
+ // @formatter:on
+ assertEquals(5, UTIL.countRows(t));
+ }
+ }
+
+ /**
+ * Test export scanner batching
+ */
+ @Test
+ public void testExportScannerBatching() throws Throwable {
+ TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(name))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(1).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+ try (Table t = UTIL.getConnection().getTable(desc.getTableName());) {
+
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+ t.put(p);
+ // added scanner batching arg.
+ String[] args = new String[] { "-D" + ExportUtils.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE,
+ name, FQ_OUTPUT_DIR };
+ assertTrue(runExport(args));
+
+ FileSystem fs = FileSystem.get(UTIL.getConfiguration());
+ fs.delete(new Path(FQ_OUTPUT_DIR), true);
+ }
+ }
+
+ @Test
+ public void testWithDeletes() throws Throwable {
+ TableDescriptor desc = TableDescriptorBuilder
+ .newBuilder(TableName.valueOf(name)).setColumnFamily(ColumnFamilyDescriptorBuilder
+ .newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+ try (Table t = UTIL.getConnection().getTable(desc.getTableName());) {
+
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+ p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+ t.put(p);
+
+ Delete d = new Delete(ROW1, now + 3);
+ t.delete(d);
+ d = new Delete(ROW1);
+ d.addColumns(FAMILYA, QUAL, now + 2);
+ t.delete(d);
+ }
+
+ String[] args =
+ new String[] { "-D" + ExportUtils.RAW_SCAN + "=true", name, FQ_OUTPUT_DIR, "1000", // max
+ // number
+ // of key
+ // versions
+ // per key
+ // to
+ // export
+ };
+ assertTrue(runExport(args));
+
+ final String IMPORT_TABLE = name + "import";
+ desc = TableDescriptorBuilder
+ .newBuilder(TableName.valueOf(IMPORT_TABLE)).setColumnFamily(ColumnFamilyDescriptorBuilder
+ .newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+ try (Table t = UTIL.getConnection().getTable(desc.getTableName());) {
+ args = new String[] { IMPORT_TABLE, FQ_OUTPUT_DIR };
+ assertTrue(runImport(args));
+
+ Scan s = new Scan();
+ s.setMaxVersions();
+ s.setRaw(true);
+ ResultScanner scanner = t.getScanner(s);
+ Result r = scanner.next();
+ Cell[] res = r.rawCells();
+ assertTrue(PrivateCellUtil.isDeleteFamily(res[0]));
+ assertEquals(now + 4, res[1].getTimestamp());
+ assertEquals(now + 3, res[2].getTimestamp());
+ assertTrue(CellUtil.isDelete(res[3]));
+ assertEquals(now + 2, res[4].getTimestamp());
+ assertEquals(now + 1, res[5].getTimestamp());
+ assertEquals(now, res[6].getTimestamp());
+ }
+ }
+
+ @Test
+ public void testWithMultipleDeleteFamilyMarkersOfSameRowSameFamily() throws Throwable {
+ final TableName exportTable = TableName.valueOf(name);
+ TableDescriptor desc = TableDescriptorBuilder
+ .newBuilder(TableName.valueOf(name)).setColumnFamily(ColumnFamilyDescriptorBuilder
+ .newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+
+ Table exportT = UTIL.getConnection().getTable(exportTable);
+
+ // Add first version of QUAL
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ exportT.put(p);
+
+ // Add Delete family marker
+ Delete d = new Delete(ROW1, now + 3);
+ exportT.delete(d);
+
+ // Add second version of QUAL
+ p = new Put(ROW1);
+ p.addColumn(FAMILYA, QUAL, now + 5, "s".getBytes());
+ exportT.put(p);
+
+ // Add second Delete family marker
+ d = new Delete(ROW1, now + 7);
+ exportT.delete(d);
+
+ String[] args = new String[] { "-D" + ExportUtils.RAW_SCAN + "=true",
+ exportTable.getNameAsString(), FQ_OUTPUT_DIR, "1000", // max number of key versions per key to
+ // export
+ };
+ assertTrue(runExport(args));
+
+ final String importTable = name + "import";
+ desc = TableDescriptorBuilder
+ .newBuilder(TableName.valueOf(importTable)).setColumnFamily(ColumnFamilyDescriptorBuilder
+ .newBuilder(FAMILYA).setMaxVersions(5).setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+
+ Table importT = UTIL.getConnection().getTable(TableName.valueOf(importTable));
+ args = new String[] { importTable, FQ_OUTPUT_DIR };
+ assertTrue(runImport(args));
+
+ Scan s = new Scan();
+ s.setMaxVersions();
+ s.setRaw(true);
+
+ ResultScanner importedTScanner = importT.getScanner(s);
+ Result importedTResult = importedTScanner.next();
+
+ ResultScanner exportedTScanner = exportT.getScanner(s);
+ Result exportedTResult = exportedTScanner.next();
+ try {
+ Result.compareResults(exportedTResult, importedTResult);
+ } catch (Throwable e) {
+ fail("Original and imported tables data comparision failed with error:" + e.getMessage());
+ } finally {
+ exportT.close();
+ importT.close();
+ }
+ }
+
+ /**
+ * Create a simple table, run an Export Job on it, Import with filtering on, verify counts,
+ * attempt with invalid values.
+ */
+ @Test
+ public void testWithFilter() throws Throwable {
+ // Create simple table to export
+ TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(name))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+ Table exportTable = UTIL.getConnection().getTable(desc.getTableName());
+
+ Put p1 = new Put(ROW1);
+ p1.addColumn(FAMILYA, QUAL, now, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+ p1.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+
+ // Having another row would actually test the filter.
+ Put p2 = new Put(ROW2);
+ p2.addColumn(FAMILYA, QUAL, now, QUAL);
+
+ exportTable.put(Arrays.asList(p1, p2));
+
+ // Export the simple table
+ String[] args = new String[] { name, FQ_OUTPUT_DIR, "1000" };
+ assertTrue(runExport(args));
+
+ // Import to a new table
+ final String IMPORT_TABLE = name + "import";
+ desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(IMPORT_TABLE))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+
+ Table importTable = UTIL.getConnection().getTable(desc.getTableName());
+ args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + PrefixFilter.class.getName(),
+ "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1), IMPORT_TABLE, FQ_OUTPUT_DIR,
+ "1000" };
+ assertTrue(runImport(args));
+
+ // get the count of the source table for that time range
+ PrefixFilter filter = new PrefixFilter(ROW1);
+ int count = getCount(exportTable, filter);
+
+ assertEquals(count, getCount(importTable, null),
+ "Unexpected row count between export and import tables");
+
+ // and then test that a broken command doesn't bork everything - easier here because we don't
+ // need to re-run the export job
+
+ args = new String[] { "-D" + Import.FILTER_CLASS_CONF_KEY + "=" + Filter.class.getName(),
+ "-D" + Import.FILTER_ARGS_CONF_KEY + "=" + Bytes.toString(ROW1) + "", name, FQ_OUTPUT_DIR,
+ "1000" };
+ assertFalse(runImport(args));
+
+ // cleanup
+ exportTable.close();
+ importTable.close();
+ }
+
+ /**
+ * Create a simple table, run an Export Job on it, Import with bulk output and enable largeResult
+ */
+ @Test
+ public void testBulkImportAndLargeResult() throws Throwable {
+ // Create simple table to export
+ TableDescriptor desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(name))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+ Table exportTable = UTIL.getConnection().getTable(desc.getTableName());
+
+ Put p1 = new Put(ROW1);
+ p1.addColumn(FAMILYA, QUAL, now, QUAL);
+
+ // Having another row would actually test the filter.
+ Put p2 = new Put(ROW2);
+ p2.addColumn(FAMILYA, QUAL, now, QUAL);
+
+ exportTable.put(Arrays.asList(p1, p2));
+
+ // Export the simple table
+ String[] args = new String[] { name, FQ_OUTPUT_DIR, "1000" };
+ assertTrue(runExport(args));
+
+ // Import to a new table
+ final String IMPORT_TABLE = name + "import";
+ desc = TableDescriptorBuilder.newBuilder(TableName.valueOf(IMPORT_TABLE))
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5).build())
+ .build();
+ UTIL.getAdmin().createTable(desc);
+
+ String O_OUTPUT_DIR =
+ new Path(OUTPUT_DIR + 1).makeQualified(FileSystem.get(UTIL.getConfiguration())).toString();
+
+ args = new String[] { "-D" + Import.BULK_OUTPUT_CONF_KEY + "=" + O_OUTPUT_DIR,
+ "-D" + Import.HAS_LARGE_RESULT + "=" + true, IMPORT_TABLE, FQ_OUTPUT_DIR, "1000" };
+ assertTrue(runImport(args));
+ }
+
+ /**
+ * Count the number of keyvalues in the specified table with the given filter
+ * @param table the table to scan
+ * @return the number of keyvalues found
+ */
+ private int getCount(Table table, Filter filter) throws IOException {
+ Scan scan = new Scan();
+ scan.setFilter(filter);
+ ResultScanner results = table.getScanner(scan);
+ int count = 0;
+ for (Result res : results) {
+ count += res.size();
+ }
+ results.close();
+ return count;
+ }
+
+ /**
+ * test main method. Import should print help and call System.exit
+ */
+ @Test
+ public void testImportMain() throws Throwable {
+ PrintStream oldPrintStream = System.err;
+ SecurityManager SECURITY_MANAGER = System.getSecurityManager();
+ LauncherSecurityManager newSecurityManager = new LauncherSecurityManager();
+ System.setSecurityManager(newSecurityManager);
+ ByteArrayOutputStream data = new ByteArrayOutputStream();
+ String[] args = {};
+ System.setErr(new PrintStream(data));
+ try {
+ System.setErr(new PrintStream(data));
+ Import.main(args);
+ fail("should be SecurityException");
+ } catch (SecurityException e) {
+ assertEquals(-1, newSecurityManager.getExitCode());
+ assertTrue(data.toString().contains("Wrong number of arguments:"));
+ assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output"));
+ assertTrue(data.toString().contains("-Dimport.filter.class="));
+ assertTrue(data.toString().contains("-Dimport.bulk.output=/path/for/output"));
+ assertTrue(data.toString().contains("-Dmapreduce.reduce.speculative=false"));
+ } finally {
+ System.setErr(oldPrintStream);
+ System.setSecurityManager(SECURITY_MANAGER);
+ }
+ }
+
+ @Test
+ public void testExportScan() throws Exception {
+ int version = 100;
+ long startTime = EnvironmentEdgeManager.currentTime();
+ long endTime = startTime + 1;
+ String prefix = "row";
+ String label_0 = "label_0";
+ String label_1 = "label_1";
+ String[] args = { "table", "outputDir", String.valueOf(version), String.valueOf(startTime),
+ String.valueOf(endTime), prefix };
+ Scan scan = ExportUtils.getScanFromCommandLine(UTIL.getConfiguration(), args);
+ assertEquals(version, scan.getMaxVersions());
+ assertEquals(startTime, scan.getTimeRange().getMin());
+ assertEquals(endTime, scan.getTimeRange().getMax());
+ assertEquals(true, (scan.getFilter() instanceof PrefixFilter));
+ assertEquals(0,
+ Bytes.compareTo(((PrefixFilter) scan.getFilter()).getPrefix(), Bytes.toBytesBinary(prefix)));
+ String[] argsWithLabels =
+ { "-D " + ExportUtils.EXPORT_VISIBILITY_LABELS + "=" + label_0 + "," + label_1, "table",
+ "outputDir", String.valueOf(version), String.valueOf(startTime), String.valueOf(endTime),
+ prefix };
+ Configuration conf = new Configuration(UTIL.getConfiguration());
+ // parse the "-D" options
+ String[] otherArgs = new GenericOptionsParser(conf, argsWithLabels).getRemainingArgs();
+ Scan scanWithLabels = ExportUtils.getScanFromCommandLine(conf, otherArgs);
+ assertEquals(version, scanWithLabels.getMaxVersions());
+ assertEquals(startTime, scanWithLabels.getTimeRange().getMin());
+ assertEquals(endTime, scanWithLabels.getTimeRange().getMax());
+ assertEquals(true, (scanWithLabels.getFilter() instanceof PrefixFilter));
+ assertEquals(0, Bytes.compareTo(((PrefixFilter) scanWithLabels.getFilter()).getPrefix(),
+ Bytes.toBytesBinary(prefix)));
+ assertEquals(2, scanWithLabels.getAuthorizations().getLabels().size());
+ assertEquals(label_0, scanWithLabels.getAuthorizations().getLabels().get(0));
+ assertEquals(label_1, scanWithLabels.getAuthorizations().getLabels().get(1));
+ }
+
+ /**
+ * test main method. Export should print help and call System.exit
+ */
+ @Test
+ public void testExportMain() throws Throwable {
+ PrintStream oldPrintStream = System.err;
+ SecurityManager SECURITY_MANAGER = System.getSecurityManager();
+ LauncherSecurityManager newSecurityManager = new LauncherSecurityManager();
+ System.setSecurityManager(newSecurityManager);
+ ByteArrayOutputStream data = new ByteArrayOutputStream();
+ String[] args = {};
+ System.setErr(new PrintStream(data));
+ try {
+ System.setErr(new PrintStream(data));
+ runExportMain(args);
+ fail("should be SecurityException");
+ } catch (SecurityException e) {
+ assertEquals(-1, newSecurityManager.getExitCode());
+ String errMsg = data.toString();
+ assertTrue(errMsg.contains("Wrong number of arguments:"));
+ assertTrue(
+ errMsg.contains("Usage: Export [-D ]* [ "
+ + "[ []] [^[regex pattern] or [Prefix] to filter]]"));
+ assertTrue(errMsg.contains("-D hbase.mapreduce.scan.column.family=,, ..."));
+ assertTrue(errMsg.contains("-D hbase.mapreduce.include.deleted.rows=true"));
+ assertTrue(errMsg.contains("-D hbase.client.scanner.caching=100"));
+ assertTrue(errMsg.contains("-D hbase.export.scanner.batch=10"));
+ assertTrue(errMsg.contains("-D hbase.export.scanner.caching=100"));
+ } finally {
+ System.setErr(oldPrintStream);
+ System.setSecurityManager(SECURITY_MANAGER);
+ }
+ }
+
+ /**
+ * Test map method of Importer
+ */
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ @Test
+ public void testKeyValueImporter() throws Throwable {
+ KeyValueImporter importer = new KeyValueImporter();
+ Configuration configuration = new Configuration();
+ Context ctx = mock(Context.class);
+ when(ctx.getConfiguration()).thenReturn(configuration);
+
+ doAnswer(new Answer() {
+
+ @Override
+ public Void answer(InvocationOnMock invocation) throws Throwable {
+ ImmutableBytesWritable writer = invocation.getArgument(0);
+ KeyValue key = invocation.getArgument(1);
+ assertEquals("Key", Bytes.toString(writer.get()));
+ assertEquals("row", Bytes.toString(CellUtil.cloneRow(key)));
+ return null;
+ }
+ }).when(ctx).write(any(), any());
+
+ importer.setup(ctx);
+ Result value = mock(Result.class);
+ KeyValue[] keys = {
+ new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
+ Bytes.toBytes("value")),
+ new KeyValue(Bytes.toBytes("row"), Bytes.toBytes("family"), Bytes.toBytes("qualifier"),
+ Bytes.toBytes("value1")) };
+ when(value.rawCells()).thenReturn(keys);
+ importer.map(new ImmutableBytesWritable(Bytes.toBytes("Key")), value, ctx);
+
+ }
+
+ /**
+ * Test addFilterAndArguments method of Import This method set couple parameters into
+ * Configuration
+ */
+ @Test
+ public void testAddFilterAndArguments() throws IOException {
+ Configuration configuration = new Configuration();
+
+ List args = new ArrayList<>();
+ args.add("param1");
+ args.add("param2");
+
+ Import.addFilterAndArguments(configuration, FilterBase.class, args);
+ assertEquals("org.apache.hadoop.hbase.filter.FilterBase",
+ configuration.get(Import.FILTER_CLASS_CONF_KEY));
+ assertEquals("param1,param2", configuration.get(Import.FILTER_ARGS_CONF_KEY));
+ }
+
+ @Test
+ public void testDurability() throws Throwable {
+ // Create an export table.
+ String exportTableName = name + "export";
+ try (Table exportTable = UTIL.createTable(TableName.valueOf(exportTableName), FAMILYA, 3);) {
+
+ // Insert some data
+ Put put = new Put(ROW1);
+ put.addColumn(FAMILYA, QUAL, now, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ exportTable.put(put);
+
+ put = new Put(ROW2);
+ put.addColumn(FAMILYA, QUAL, now, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+ put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+ exportTable.put(put);
+
+ // Run the export
+ String[] args = new String[] { exportTableName, FQ_OUTPUT_DIR, "1000" };
+ assertTrue(runExport(args));
+
+ // Create the table for import
+ String importTableName = name + "import1";
+ Table importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
+
+ // Register the wal listener for the import table
+ RegionInfo region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
+ .getRegions(importTable.getName()).get(0).getRegionInfo();
+ TableWALActionListener walListener = new TableWALActionListener(region);
+ WAL wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
+ wal.registerWALActionsListener(walListener);
+
+ // Run the import with SKIP_WAL
+ args = new String[] { "-D" + Import.WAL_DURABILITY + "=" + Durability.SKIP_WAL.name(),
+ importTableName, FQ_OUTPUT_DIR };
+ assertTrue(runImport(args));
+ // Assert that the wal is not visisted
+ assertTrue(!walListener.isWALVisited());
+ // Ensure that the count is 2 (only one version of key value is obtained)
+ assertTrue(getCount(importTable, null) == 2);
+
+ // Run the import with the default durability option
+ importTableName = name + "import2";
+ importTable = UTIL.createTable(TableName.valueOf(importTableName), FAMILYA, 3);
+ region = UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer()
+ .getRegions(importTable.getName()).get(0).getRegionInfo();
+ wal = UTIL.getMiniHBaseCluster().getRegionServer(0).getWAL(region);
+ walListener = new TableWALActionListener(region);
+ wal.registerWALActionsListener(walListener);
+ args = new String[] { importTableName, FQ_OUTPUT_DIR };
+ assertTrue(runImport(args));
+ // Assert that the wal is visisted
+ assertTrue(walListener.isWALVisited());
+ // Ensure that the count is 2 (only one version of key value is obtained)
+ assertTrue(getCount(importTable, null) == 2);
+ }
+ }
+
+ /**
+ * This listens to the {@link #visitLogEntryBeforeWrite(RegionInfo, WALKey, WALEdit)} to identify
+ * that an entry is written to the Write Ahead Log for the given table.
+ */
+ private static class TableWALActionListener implements WALActionsListener {
+
+ private RegionInfo regionInfo;
+ private boolean isVisited = false;
+
+ public TableWALActionListener(RegionInfo region) {
+ this.regionInfo = region;
+ }
+
+ @Override
+ public void visitLogEntryBeforeWrite(RegionInfo info, WALKey logKey, WALEdit logEdit) {
+ if (
+ logKey.getTableName().getNameAsString()
+ .equalsIgnoreCase(this.regionInfo.getTable().getNameAsString()) && (!logEdit.isMetaEdit())
+ ) {
+ isVisited = true;
+ }
+ }
+
+ public boolean isWALVisited() {
+ return isVisited;
+ }
+ }
+
+ /**
+ * Add cell tags to delete mutations, run export and import tool and verify that tags are present
+ * in import table also.
+ * @throws Throwable throws Throwable.
+ */
+ @Test
+ public void testTagsAddition() throws Throwable {
+ final TableName exportTable = TableName.valueOf(name);
+ TableDescriptor desc = TableDescriptorBuilder.newBuilder(exportTable)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
+ .setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .setCoprocessor(MetadataController.class.getName()).build();
+ UTIL.getAdmin().createTable(desc);
+
+ Table exportT = UTIL.getConnection().getTable(exportTable);
+
+ // Add first version of QUAL
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ exportT.put(p);
+
+ // Add Delete family marker
+ Delete d = new Delete(ROW1, now + 3);
+ // Add test attribute to delete mutation.
+ d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
+ exportT.delete(d);
+
+ // Run export tool with KeyValueCodecWithTags as Codec. This will ensure that export tool
+ // will use KeyValueCodecWithTags.
+ String[] args = new String[] { "-D" + ExportUtils.RAW_SCAN + "=true",
+ // This will make sure that codec will encode and decode tags in rpc call.
+ "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+ exportTable.getNameAsString(), FQ_OUTPUT_DIR, "1000", // max number of key versions per key to
+ // export
+ };
+ assertTrue(runExport(args));
+ // Assert tag exists in exportTable
+ checkWhetherTagExists(exportTable, true);
+
+ // Create an import table with MetadataController.
+ final TableName importTable = TableName.valueOf("importWithTestTagsAddition");
+ TableDescriptor importTableDesc = TableDescriptorBuilder.newBuilder(importTable)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
+ .setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .setCoprocessor(MetadataController.class.getName()).build();
+ UTIL.getAdmin().createTable(importTableDesc);
+
+ // Run import tool.
+ args = new String[] {
+ // This will make sure that codec will encode and decode tags in rpc call.
+ "-Dhbase.client.rpc.codec=org.apache.hadoop.hbase.codec.KeyValueCodecWithTags",
+ importTable.getNameAsString(), FQ_OUTPUT_DIR };
+ assertTrue(runImport(args));
+ // Make sure that tags exists in imported table.
+ checkWhetherTagExists(importTable, true);
+ }
+
+ private void checkWhetherTagExists(TableName table, boolean tagExists) throws IOException {
+ List values = new ArrayList<>();
+ for (HRegion region : UTIL.getHBaseCluster().getRegions(table)) {
+ Scan scan = new Scan();
+ // Make sure to set rawScan to true so that we will get Delete Markers.
+ scan.setRaw(true);
+ scan.readAllVersions();
+ scan.withStartRow(ROW1);
+ // Need to use RegionScanner instead of table#getScanner since the latter will
+ // not return tags since it will go through rpc layer and remove tags intentionally.
+ RegionScanner scanner = region.getScanner(scan);
+ scanner.next(values);
+ if (!values.isEmpty()) {
+ break;
+ }
+ }
+ boolean deleteFound = false;
+ for (Cell cell : values) {
+ if (PrivateCellUtil.isDelete(cell.getType().getCode())) {
+ deleteFound = true;
+ List tags = PrivateCellUtil.getTags(cell);
+ // If tagExists flag is true then validate whether tag contents are as expected.
+ if (tagExists) {
+ assertEquals(1, tags.size());
+ for (Tag tag : tags) {
+ assertEquals(TEST_TAG, Tag.getValueAsString(tag));
+ }
+ } else {
+ // If tagExists flag is disabled then check for 0 size tags.
+ assertEquals(0, tags.size());
+ }
+ }
+ }
+ assertTrue(deleteFound);
+ }
+
+ /*
+ * This co-proc will add a cell tag to delete mutation.
+ */
+ public static class MetadataController implements RegionCoprocessor, RegionObserver {
+ @Override
+ public Optional getRegionObserver() {
+ return Optional.of(this);
+ }
+
+ @Override
+ public void preBatchMutate(ObserverContext c,
+ MiniBatchOperationInProgress miniBatchOp) throws IOException {
+ if (c.getEnvironment().getRegion().getRegionInfo().getTable().isSystemTable()) {
+ return;
+ }
+ for (int i = 0; i < miniBatchOp.size(); i++) {
+ Mutation m = miniBatchOp.getOperation(i);
+ if (!(m instanceof Delete)) {
+ continue;
+ }
+ byte[] sourceOpAttr = m.getAttribute(TEST_ATTR);
+ if (sourceOpAttr == null) {
+ continue;
+ }
+ Tag sourceOpTag = new ArrayBackedTag(TEST_TAG_TYPE, sourceOpAttr);
+ List updatedCells = new ArrayList<>();
+ for (CellScanner cellScanner = m.cellScanner(); cellScanner.advance();) {
+ Cell cell = cellScanner.current();
+ List tags = PrivateCellUtil.getTags(cell);
+ tags.add(sourceOpTag);
+ Cell updatedCell = PrivateCellUtil.createCell((ExtendedCell) cell, tags);
+ updatedCells.add(updatedCell);
+ }
+ m.getFamilyCellMap().clear();
+ // Clear and add new Cells to the Mutation.
+ for (Cell cell : updatedCells) {
+ Delete d = (Delete) m;
+ d.add(cell);
+ }
+ }
+ }
+ }
+
+ /**
+ * Set hbase.client.rpc.codec and hbase.client.default.rpc.codec both to empty string This means
+ * it will use no Codec. Make sure that we don't return Tags in response.
+ * @throws Exception Exception
+ */
+ @Test
+ public void testTagsWithEmptyCodec() throws Exception {
+ TableName tableName = TableName.valueOf(name);
+ TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName)
+ .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA).setMaxVersions(5)
+ .setKeepDeletedCells(KeepDeletedCells.TRUE).build())
+ .setCoprocessor(MetadataController.class.getName()).build();
+ UTIL.getAdmin().createTable(tableDesc);
+ Configuration conf = new Configuration(UTIL.getConfiguration());
+ conf.set(RPC_CODEC_CONF_KEY, "");
+ conf.set(DEFAULT_CODEC_CLASS, "");
+ try (Connection connection = ConnectionFactory.createConnection(conf);
+ Table table = connection.getTable(tableName)) {
+ // Add first version of QUAL
+ Put p = new Put(ROW1);
+ p.addColumn(FAMILYA, QUAL, now, QUAL);
+ table.put(p);
+
+ // Add Delete family marker
+ Delete d = new Delete(ROW1, now + 3);
+ // Add test attribute to delete mutation.
+ d.setAttribute(TEST_ATTR, Bytes.toBytes(TEST_TAG));
+ table.delete(d);
+
+ // Since RPC_CODEC_CONF_KEY and DEFAULT_CODEC_CLASS is set to empty, it will use
+ // empty Codec and it shouldn't encode/decode tags.
+ Scan scan = new Scan().withStartRow(ROW1).setRaw(true);
+ ResultScanner scanner = table.getScanner(scan);
+ int count = 0;
+ Result result;
+ while ((result = scanner.next()) != null) {
+ List cells = result.listCells();
+ assertEquals(2, cells.size());
+ Cell cell = cells.get(0);
+ assertTrue(CellUtil.isDelete(cell));
+ List tags = PrivateCellUtil.getTags(cell);
+ assertEquals(0, tags.size());
+ count++;
+ }
+ assertEquals(1, count);
+ } finally {
+ UTIL.deleteTable(tableName);
+ }
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ConnectionExtension.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ConnectionExtension.java
new file mode 100644
index 000000000000..e9bb50fac5de
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ConnectionExtension.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.util.concurrent.CompletableFuture;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.client.AsyncConnection;
+import org.apache.hadoop.hbase.client.Connection;
+import org.junit.jupiter.api.extension.AfterAllCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
+import org.junit.jupiter.api.extension.Extension;
+import org.junit.jupiter.api.extension.ExtensionContext;
+
+/**
+ * An {@link Extension} that manages the lifecycle of an instance of {@link AsyncConnection}.
+ * | | | | | |
+ * Use in combination with {@link MiniClusterExtension}, for example:
+ *
+ *
+ * {
+ * public class TestMyClass {
+ *
+ * @Order(1)
+ * @RegisterExtension
+ * private static final MiniClusterExtension miniClusterExtension =
+ * miniClusterExtension.newBuilder().build();
+ *
+ * @Order(2)
+ * @RegisterExtension
+ * private static final ConnectionExtension connectionExtension =
+ * ConnectionExtension.createAsyncConnectionExtension(
+ * miniClusterExtension::createConnection);
+ * }
+ *
+ */
+public final class ConnectionExtension implements BeforeAllCallback, AfterAllCallback {
+
+ private final Supplier connectionSupplier;
+ private final Supplier> asyncConnectionSupplier;
+
+ private Connection connection;
+ private AsyncConnection asyncConnection;
+
+ public static ConnectionExtension
+ createConnectionExtension(final Supplier connectionSupplier) {
+ return new ConnectionExtension(connectionSupplier, null);
+ }
+
+ public static ConnectionExtension createAsyncConnectionExtension(
+ final Supplier> asyncConnectionSupplier) {
+ return new ConnectionExtension(null, asyncConnectionSupplier);
+ }
+
+ public static ConnectionExtension createConnectionExtension(
+ final Supplier connectionSupplier,
+ final Supplier> asyncConnectionSupplier) {
+ return new ConnectionExtension(connectionSupplier, asyncConnectionSupplier);
+ }
+
+ private ConnectionExtension(final Supplier connectionSupplier,
+ final Supplier> asyncConnectionSupplier) {
+ this.connectionSupplier = connectionSupplier;
+ this.asyncConnectionSupplier = asyncConnectionSupplier;
+ }
+
+ public Connection getConnection() {
+ if (connection == null) {
+ throw new IllegalStateException(
+ "ConnectionExtension not initialized with a synchronous connection.");
+ }
+ return connection;
+ }
+
+ public AsyncConnection getAsyncConnection() {
+ if (asyncConnection == null) {
+ throw new IllegalStateException(
+ "ConnectionExtension not initialized with an asynchronous connection.");
+ }
+ return asyncConnection;
+ }
+
+ @Override
+ public void beforeAll(ExtensionContext context) {
+ if (connectionSupplier != null) {
+ this.connection = connectionSupplier.get();
+ }
+ if (asyncConnectionSupplier != null) {
+ this.asyncConnection = asyncConnectionSupplier.get().join();
+ }
+ }
+
+ @Override
+ public void afterAll(ExtensionContext context) {
+ CompletableFuture closeConnection = CompletableFuture.runAsync(() -> {
+ if (this.connection != null) {
+ try {
+ connection.close();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ });
+ CompletableFuture closeAsyncConnection = CompletableFuture.runAsync(() -> {
+ if (this.asyncConnection != null) {
+ try {
+ asyncConnection.close();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ });
+ CompletableFuture.allOf(closeConnection, closeAsyncConnection).join();
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/ConnectionRule.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/ConnectionRule.java
index 9945ad7d68d6..bad2b498ce38 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/ConnectionRule.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/ConnectionRule.java
@@ -46,7 +46,14 @@
* }
* }
*
+ *
+ * @deprecated Use {@link ConnectionExtension} instead, once we finish the migration of JUnit5,
+ * which means we do not need {@link ConnectionRule} any more, we can remove these
+ * dependencies, see
+ * HBASE-23671 for more
+ * details.
*/
+@Deprecated
public final class ConnectionRule extends ExternalResource {
private final Supplier connectionSupplier;
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniClusterExtension.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniClusterExtension.java
new file mode 100644
index 000000000000..8be469f6fa5b
--- /dev/null
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniClusterExtension.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase;
+
+import java.io.IOException;
+import java.util.concurrent.CompletableFuture;
+import java.util.function.Supplier;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.client.AsyncConnection;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
+import org.junit.jupiter.api.extension.AfterAllCallback;
+import org.junit.jupiter.api.extension.BeforeAllCallback;
+import org.junit.jupiter.api.extension.Extension;
+import org.junit.jupiter.api.extension.ExtensionContext;
+
+/**
+ * An {@link Extension} that manages an instance of the {@link MiniHBaseCluster}. Built on top of an
+ * instance of {@link HBaseTestingUtility}, so be weary of intermixing direct use of that class with
+ * this Extension.
+ *
+ * Use in combination with {@link ConnectionExtension}, for example:
+ *
+ *
+ * {
+ * @code
+ * public class TestMyClass {
+ *
+ * @RegisterExtension
+ * public static final MiniClusterExtension miniClusterExtension =
+ * MiniClusterExtension.newBuilder().build();
+ *
+ * @RegisterExtension
+ * public final ConnectionExtension connectionExtension = ConnectionExtension
+ * .createAsyncConnectionExtension(miniClusterExtension::createAsyncConnection);
+ * }
+ * }
+ *
+ */
+public final class MiniClusterExtension implements BeforeAllCallback, AfterAllCallback {
+
+ /**
+ * A builder for fluent composition of a new {@link MiniClusterExtension}.
+ */
+ public static final class Builder {
+
+ private StartMiniClusterOption miniClusterOption;
+ private Configuration conf;
+
+ /**
+ * Use the provided {@link StartMiniClusterOption} to construct the {@link MiniHBaseCluster}.
+ */
+ public Builder setMiniClusterOption(final StartMiniClusterOption miniClusterOption) {
+ this.miniClusterOption = miniClusterOption;
+ return this;
+ }
+
+ /**
+ * Seed the underlying {@link HBaseTestingUtility} with the provided {@link Configuration}.
+ */
+ public Builder setConfiguration(final Configuration conf) {
+ this.conf = conf;
+ return this;
+ }
+
+ public Builder setConfiguration(final Supplier supplier) {
+ return setConfiguration(supplier.get());
+ }
+
+ public MiniClusterExtension build() {
+ return new MiniClusterExtension(conf,
+ miniClusterOption != null ? miniClusterOption : StartMiniClusterOption.builder().build());
+ }
+ }
+
+ /**
+ * Returns the underlying instance of {@link HBaseTestingUtility}
+ */
+ private final HBaseTestingUtility testingUtility;
+ private final StartMiniClusterOption miniClusterOptions;
+
+ private MiniHBaseCluster miniCluster;
+
+ private MiniClusterExtension(final Configuration conf,
+ final StartMiniClusterOption miniClusterOptions) {
+ this.testingUtility = new HBaseTestingUtility(conf);
+ this.miniClusterOptions = miniClusterOptions;
+ }
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ /**
+ * Returns the underlying instance of {@link HBaseTestingUtility}
+ */
+ public HBaseTestingUtility getTestingUtility() {
+ return testingUtility;
+ }
+
+ /**
+ * Create a {@link Connection} to the managed {@link MiniHBaseCluster}. It's up to the caller to
+ * {@link Connection#close() close()} the connection when finished.
+ */
+ public Connection createConnection() {
+ if (miniCluster == null) {
+ throw new IllegalStateException("test cluster not initialized");
+ }
+ try {
+ return ConnectionFactory.createConnection(miniCluster.getConf());
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Create a {@link AsyncConnection} to the managed {@link MiniHBaseCluster}. It's up to the caller
+ * to {@link AsyncConnection#close() close()} the connection when finished.
+ */
+ public CompletableFuture createAsyncConnection() {
+ if (miniCluster == null) {
+ throw new IllegalStateException("test cluster not initialized");
+ }
+ return ConnectionFactory.createAsyncConnection(miniCluster.getConf());
+ }
+
+ @Override
+ public void beforeAll(ExtensionContext context) throws Exception {
+ miniCluster = testingUtility.startMiniCluster(miniClusterOptions);
+ }
+
+ @Override
+ public void afterAll(ExtensionContext context) {
+ try {
+ testingUtility.shutdownMiniCluster();
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniClusterRule.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniClusterRule.java
index 5919a088a77e..2d035c87235f 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniClusterRule.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MiniClusterRule.java
@@ -49,7 +49,14 @@
* }
* }
*
+ *
+ * @deprecated Use {@link MiniClusterExtension} instead, Once we finish the migration of JUnit5,
+ * which means we do not need {@link MiniClusterRule} any more, we can remove these
+ * dependencies, see
+ * HBASE-23671 for more
+ * details.
*/
+@Deprecated
public final class MiniClusterRule extends ExternalResource {
/**
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAccessControlAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAccessControlAdminApi.java
index 07204eac2614..6585bd5a6bac 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAccessControlAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAccessControlAdminApi.java
@@ -17,13 +17,14 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.List;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.access.GetUserPermissionsRequest;
@@ -34,24 +35,23 @@
import org.apache.hadoop.hbase.security.access.UserPermission;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
-@RunWith(Parameterized.class)
-@Category({ ClientTests.class, SmallTests.class })
+@Tag(ClientTests.TAG)
+@Tag(SmallTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: admin = {0}")
public class TestAsyncAccessControlAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncAccessControlAdminApi.class);
+ public TestAsyncAccessControlAdminApi(Supplier admin) {
+ super(admin);
+ }
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
SecureTestUtil.enableSecurity(TEST_UTIL.getConfiguration());
TEST_UTIL.startMiniCluster(1);
@@ -59,7 +59,12 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
}
- @Test
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @TestTemplate
public void test() throws Exception {
TableName tableName = TableName.valueOf("test-table");
String userName1 = "user1";
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
index e20e70509c36..77beca2d008d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminBase.java
@@ -20,25 +20,20 @@
import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
import java.io.IOException;
-import java.util.Arrays;
-import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ForkJoinPool;
import java.util.function.Supplier;
import java.util.regex.Pattern;
+import java.util.stream.Stream;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.StartMiniClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Rule;
-import org.junit.rules.TestName;
-import org.junit.runners.Parameterized.Parameter;
-import org.junit.runners.Parameterized.Parameters;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.params.provider.Arguments;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -58,9 +53,12 @@ public abstract class TestAsyncAdminBase extends AbstractTestUpdateConfiguration
protected static AsyncConnection ASYNC_CONN;
protected AsyncAdmin admin;
- @Parameter
public Supplier getAdmin;
+ public TestAsyncAdminBase(Supplier admin) {
+ this.getAdmin = admin;
+ }
+
private static AsyncAdmin getRawAsyncAdmin() {
return ASYNC_CONN.getAdmin();
}
@@ -69,17 +67,13 @@ private static AsyncAdmin getAsyncAdmin() {
return ASYNC_CONN.getAdmin(ForkJoinPool.commonPool());
}
- @Parameters
- public static List params() {
- return Arrays.asList(new Supplier>[] { TestAsyncAdminBase::getRawAsyncAdmin },
- new Supplier>[] { TestAsyncAdminBase::getAsyncAdmin });
+ public static Stream parameters() {
+ return Stream.of(Arguments.of((Supplier) TestAsyncAdminBase::getRawAsyncAdmin),
+ Arguments.of((Supplier) TestAsyncAdminBase::getAsyncAdmin));
}
- @Rule
- public TestName testName = new TestName();
protected TableName tableName;
- @BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -91,20 +85,19 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
}
- @AfterClass
public static void tearDownAfterClass() throws Exception {
Closeables.close(ASYNC_CONN, true);
TEST_UTIL.shutdownMiniCluster();
}
- @Before
- public void setUp() throws Exception {
+ @BeforeEach
+ public void setUp(TestInfo testInfo) throws Exception {
admin = getAdmin.get();
- String methodName = testName.getMethodName();
+ String methodName = testInfo.getTestMethod().get().getName();
tableName = TableName.valueOf(methodName.substring(0, methodName.length() - 3));
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
admin.listTableNames(Pattern.compile(tableName.getNameAsString() + ".*"), false)
.whenCompleteAsync((tables, err) -> {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminClearMasterStubCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminClearMasterStubCache.java
index 6f73b4dec36a..460dcc4ecce7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminClearMasterStubCache.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminClearMasterStubCache.java
@@ -17,12 +17,13 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.net.Socket;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.ServerName;
@@ -30,36 +31,46 @@
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.FutureUtils;
import org.apache.hadoop.hbase.util.JVMClusterUtil.MasterThread;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Testcase for HBASE-29214
*/
-@RunWith(Parameterized.class)
-@Category({ ClientTests.class, MediumTests.class })
+@Tag(ClientTests.TAG)
+@Tag(MediumTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncAdminClearMasterStubCache extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncAdminClearMasterStubCache.class);
+ public TestAsyncAdminClearMasterStubCache(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
- @Before
+ @BeforeEach
public void waitMasterReady() throws Exception {
assertTrue(TEST_UTIL.getHBaseCluster().waitForActiveAndReadyMaster(30000));
}
- @After
+ @AfterEach
public void clearPortConfig() {
TEST_UTIL.getHBaseCluster().getConf().setInt(HConstants.MASTER_PORT, 0);
}
- @Test
+ @TestTemplate
public void testClearMasterStubCache() throws Exception {
// cache master stub
assertNotNull(FutureUtils.get(admin.getClusterMetrics()));
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminMasterSwitch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminMasterSwitch.java
index f8a8f0ed5079..4d1251d0f16c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminMasterSwitch.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminMasterSwitch.java
@@ -17,33 +17,44 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.EnumSet;
+import java.util.function.Supplier;
import org.apache.hadoop.hbase.ClusterMetrics;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Testcase for HBASE-22135.
*/
-@RunWith(Parameterized.class)
-@Category({ MediumTests.class, ClientTests.class })
+@Tag(MediumTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncAdminMasterSwitch extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncAdminMasterSwitch.class);
+ public TestAsyncAdminMasterSwitch(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
- @Test
+ @TestTemplate
public void testSwitch() throws IOException, InterruptedException {
assertEquals(TEST_UTIL.getHBaseCluster().getRegionServerThreads().size(),
admin.getClusterMetrics(EnumSet.of(ClusterMetrics.Option.SERVERS_NAME)).join()
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminModifyStoreFileTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminModifyStoreFileTracker.java
index fb6f55c4df0f..9aaa9e043950 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminModifyStoreFileTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminModifyStoreFileTracker.java
@@ -17,14 +17,15 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.IOException;
+import java.util.function.Supplier;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException;
@@ -34,24 +35,34 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FutureUtils;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncAdminModifyStoreFileTracker extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncAdminModifyStoreFileTracker.class);
-
private static final String SRC_IMPL = "hbase.store.file-tracker.migration.src.impl";
private static final String DST_IMPL = "hbase.store.file-tracker.migration.dst.impl";
+ public TestAsyncAdminModifyStoreFileTracker(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
private void verifyModifyTableResult(TableName tableName, byte[] family, byte[] qual, byte[] row,
byte[] value, String sft) throws IOException {
TableDescriptor td = admin.getDescriptor(tableName).join();
@@ -64,7 +75,7 @@ private void verifyModifyTableResult(TableName tableName, byte[] family, byte[]
}
}
- @Test
+ @TestTemplate
public void testModifyTableStoreFileTracker() throws IOException {
byte[] family = Bytes.toBytes("info");
byte[] qual = Bytes.toBytes("q");
@@ -123,7 +134,7 @@ private void verifyModifyColumnFamilyResult(TableName tableName, byte[] family,
}
}
- @Test
+ @TestTemplate
public void testModifyColumnFamilyStoreFileTracker() throws IOException {
byte[] family = Bytes.toBytes("info");
byte[] qual = Bytes.toBytes("q");
@@ -173,7 +184,7 @@ public void testModifyColumnFamilyStoreFileTracker() throws IOException {
StoreFileTrackerFactory.Trackers.DEFAULT.name());
}
- @Test
+ @TestTemplate
public void testModifyStoreFileTrackerError() throws IOException {
byte[] family = Bytes.toBytes("info");
TEST_UTIL.createTable(tableName, family).close();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminWithRegionReplicas.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminWithRegionReplicas.java
index 033401cdc32f..b4feb8f06d40 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminWithRegionReplicas.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncAdminWithRegionReplicas.java
@@ -19,15 +19,16 @@
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
@@ -35,22 +36,21 @@
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncAdminWithRegionReplicas extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncAdminWithRegionReplicas.class);
+ public TestAsyncAdminWithRegionReplicas(Supplier admin) {
+ super(admin);
+ }
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TestAsyncAdminBase.setUpBeforeClass();
HBaseTestingUtility.setReplicas(TEST_UTIL.getAdmin(), TableName.META_TABLE_NAME, 3);
@@ -60,6 +60,11 @@ public static void setUpBeforeClass() throws Exception {
}
}
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
private void testMoveNonDefaultReplica(TableName tableName)
throws InterruptedException, ExecutionException {
AsyncTableRegionLocator locator = ASYNC_CONN.getRegionLocator(tableName);
@@ -74,7 +79,7 @@ private void testMoveNonDefaultReplica(TableName tableName)
locator.getRegionLocation(HConstants.EMPTY_START_ROW, 2, true).get());
}
- @Test
+ @TestTemplate
public void testMoveNonDefaultReplica()
throws InterruptedException, ExecutionException, IOException {
createTableWithDefaultConf(tableName, 3);
@@ -82,7 +87,7 @@ public void testMoveNonDefaultReplica()
testMoveNonDefaultReplica(TableName.META_TABLE_NAME);
}
- @Test
+ @TestTemplate
public void testSplitNonDefaultReplica()
throws InterruptedException, ExecutionException, IOException {
createTableWithDefaultConf(tableName, 3);
@@ -100,7 +105,7 @@ public void testSplitNonDefaultReplica()
}
}
- @Test
+ @TestTemplate
public void testMergeNonDefaultReplicas()
throws InterruptedException, ExecutionException, IOException {
byte[][] splitRows = new byte[][] { Bytes.toBytes(0) };
@@ -128,14 +133,14 @@ public void testMergeNonDefaultReplicas()
}
}
- @Test
+ @TestTemplate
public void testCloneTableSchema() throws IOException, InterruptedException, ExecutionException {
createTableWithDefaultConf(tableName, 3);
admin.cloneTableSchema(tableName, TableName.valueOf(tableName.getNameAsString() + "_new"), true)
.get();
}
- @Test
+ @TestTemplate
public void testGetTableRegions() throws InterruptedException, ExecutionException, IOException {
List metaRegions = admin.getRegions(TableName.META_TABLE_NAME).get();
assertEquals(3, metaRegions.size());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
index 30cb95d80d16..fac6b7ef06fb 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
@@ -28,10 +28,11 @@
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
+import java.util.function.Supplier;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterMetrics;
import org.apache.hadoop.hbase.ClusterMetrics.Option;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.RegionMetrics;
@@ -44,25 +45,24 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
-@RunWith(Parameterized.class)
-@Category({ ClientTests.class, LargeTests.class })
+@Tag(ClientTests.TAG)
+@Tag(LargeTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncClusterAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncClusterAdminApi.class);
+ public TestAsyncClusterAdminApi(Supplier admin) {
+ super(admin);
+ }
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
setUpConfigurationFiles(TEST_UTIL);
@@ -77,13 +77,18 @@ public static void setUpBeforeClass() throws Exception {
addResourceToRegionServerConfiguration(TEST_UTIL);
}
- @Test
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @TestTemplate
public void testGetMasterInfoPort() throws Exception {
assertEquals(TEST_UTIL.getHBaseCluster().getMaster().getInfoServer().getPort(),
(int) admin.getMasterInfoPort().get());
}
- @Test
+ @TestTemplate
public void testRegionServerOnlineConfigChange() throws Exception {
replaceHBaseSiteXML();
admin.getRegionServers().get().forEach(server -> admin.updateConfiguration(server).join());
@@ -97,7 +102,7 @@ public void testRegionServerOnlineConfigChange() throws Exception {
restoreHBaseSiteXML();
}
- @Test
+ @TestTemplate
public void testMasterOnlineConfigChange() throws Exception {
replaceHBaseSiteXML();
ServerName master = admin.getMaster().get();
@@ -114,7 +119,7 @@ public void testMasterOnlineConfigChange() throws Exception {
restoreHBaseSiteXML();
}
- @Test
+ @TestTemplate
public void testAllClusterOnlineConfigChange() throws IOException {
replaceHBaseSiteXML();
admin.updateConfiguration().join();
@@ -134,7 +139,7 @@ public void testAllClusterOnlineConfigChange() throws IOException {
restoreHBaseSiteXML();
}
- @Test
+ @TestTemplate
public void testRollWALWALWriter() throws Exception {
setUpforLogRolling();
String className = this.getClass().getName();
@@ -214,7 +219,7 @@ private HRegionServer startAndWriteData(TableName tableName, byte[] value) throw
return regionServer;
}
- @Test
+ @TestTemplate
public void testGetRegionLoads() throws Exception {
// Turn off the balancer
admin.balancerSwitch(false).join();
@@ -257,7 +262,7 @@ public void testGetRegionLoads() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testGetRegionServers() throws Exception {
List serverNames = new ArrayList<>(admin.getRegionServers(true).get());
assertEquals(2, serverNames.size());
@@ -280,8 +285,8 @@ public void testGetRegionServers() throws Exception {
private void compareRegionLoads(Collection regionLoadCluster,
Collection regionLoads) {
- assertEquals("No of regionLoads from clusterStatus and regionloads from RS doesn't match",
- regionLoadCluster.size(), regionLoads.size());
+ assertEquals(regionLoadCluster.size(), regionLoads.size(),
+ "No of regionLoads from clusterStatus and regionloads from RS doesn't match");
for (RegionMetrics loadCluster : regionLoadCluster) {
boolean matched = false;
@@ -291,22 +296,23 @@ private void compareRegionLoads(Collection regionLoadCluster,
continue;
}
}
- assertTrue("The contents of region load from cluster and server should match", matched);
+ assertTrue(matched, "The contents of region load from cluster and server should match");
}
}
private void checkRegionsAndRegionLoads(Collection regions,
Collection regionLoads) {
- assertEquals("No of regions and regionloads doesn't match", regions.size(), regionLoads.size());
+ assertEquals(regions.size(), regionLoads.size(), "No of regions and regionloads doesn't match");
Map regionLoadMap = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
for (RegionMetrics regionLoad : regionLoads) {
regionLoadMap.put(regionLoad.getRegionName(), regionLoad);
}
for (RegionInfo info : regions) {
- assertTrue("Region not in regionLoadMap region:" + info.getRegionNameAsString()
- + " regionMap: " + regionLoadMap, regionLoadMap.containsKey(info.getRegionName()));
+ assertTrue(regionLoadMap.containsKey(info.getRegionName()),
+ "Region not in regionLoadMap region:" + info.getRegionNameAsString() + " regionMap: "
+ + regionLoadMap);
}
}
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java
index 40cb0a66c77d..513aad374d6a 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncClusterAdminApi2.java
@@ -18,37 +18,40 @@
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.api.TestTemplate;
import org.apache.hbase.thirdparty.com.google.common.io.Closeables;
/**
* Only used to test stopMaster/stopRegionServer/shutdown methods.
*/
-@Category({ ClientTests.class, MediumTests.class })
+@Tag(ClientTests.TAG)
+@Tag(MediumTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncClusterAdminApi2 extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncClusterAdminApi2.class);
+ public TestAsyncClusterAdminApi2(Supplier admin) {
+ super(admin);
+ }
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -56,27 +59,27 @@ public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(START_LOG_ERRORS_AFTER_COUNT_KEY, 0);
}
- @AfterClass
+ @AfterAll
public static void tearDownAfterClass() throws Exception {
// do nothing
}
- @Before
+ @BeforeEach
@Override
- public void setUp() throws Exception {
+ public void setUp(TestInfo testInfo) throws Exception {
TEST_UTIL.startMiniCluster(3);
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
admin = ASYNC_CONN.getAdmin();
}
- @After
+ @AfterEach
@Override
public void tearDown() throws Exception {
Closeables.close(ASYNC_CONN, true);
TEST_UTIL.shutdownMiniCluster();
}
- @Test
+ @TestTemplate
public void testStop() throws Exception {
HRegionServer rs = TEST_UTIL.getMiniHBaseCluster().getRegionServer(0);
assertFalse(rs.isStopped());
@@ -89,7 +92,7 @@ public void testStop() throws Exception {
assertTrue(master.isStopped());
}
- @Test
+ @TestTemplate
public void testShutdown() throws Exception {
TEST_UTIL.getMiniHBaseCluster().getMasterThreads().forEach(thread -> {
assertFalse(thread.getMaster().isStopped());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java
index 659aa0d05c68..5bc1aff3a2f3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncDecommissionAdminApi.java
@@ -17,34 +17,45 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.ClusterMetrics.Option;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-@Category({ ClientTests.class, MediumTests.class })
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+
+@Tag(ClientTests.TAG)
+@Tag(MediumTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncDecommissionAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncDecommissionAdminApi.class);
+ public TestAsyncDecommissionAdminApi(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
- @Test
+ @TestTemplate
public void testAsyncDecommissionRegionServers() throws Exception {
admin.balancerSwitch(false, true);
List decommissionedRegionServers = admin.listDecommissionedRegionServers().get();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
index 2df8f269d2c6..13f75ecfc00e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncNamespaceAdminApi.java
@@ -18,14 +18,15 @@
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.concurrent.Callable;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -38,29 +39,28 @@
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous namespace admin operations.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncNamespaceAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncNamespaceAdminApi.class);
-
private String prefix = "TestNamespace";
private static HMaster master;
private static ZKNamespaceManager zkNamespaceManager;
- @BeforeClass
+ public TestAsyncNamespaceAdminApi(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -74,7 +74,12 @@ public static void setUpBeforeClass() throws Exception {
LOG.info("Done initializing cluster");
}
- @Test
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @TestTemplate
public void testCreateAndDelete() throws Exception {
String testName = "testCreateAndDelete";
String nsName = prefix + "_" + testName;
@@ -98,7 +103,7 @@ public boolean evaluate() throws Exception {
assertNull(zkNamespaceManager.get(nsName));
}
- @Test
+ @TestTemplate
public void testDeleteReservedNS() throws Exception {
boolean exceptionCaught = false;
try {
@@ -120,7 +125,7 @@ public void testDeleteReservedNS() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testNamespaceOperations() throws Exception {
admin.createNamespace(NamespaceDescriptor.create(prefix + "ns1").build()).join();
admin.createNamespace(NamespaceDescriptor.create(prefix + "ns2").build()).join();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
index 4cc9d1275da6..c4f25453a295 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncProcedureAdminApi.java
@@ -18,14 +18,15 @@
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ThreadLocalRandom;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.master.snapshot.SnapshotManager;
import org.apache.hadoop.hbase.procedure.ProcedureManagerHost;
@@ -34,26 +35,24 @@
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.Assert;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous procedure admin operations.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncProcedureAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncProcedureAdminApi.class);
+ public TestAsyncProcedureAdminApi(Supplier admin) {
+ super(admin);
+ }
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -68,7 +67,12 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
}
- @Test
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @TestTemplate
public void testExecProcedure() throws Exception {
String snapshotString = "offlineTableSnapshot";
try {
@@ -89,21 +93,21 @@ public void testExecProcedure() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testExecProcedureWithRet() throws Exception {
byte[] result = admin.execProcedureWithReturn(SimpleMasterProcedureManager.SIMPLE_SIGNATURE,
"myTest2", new HashMap<>()).get();
- assertArrayEquals("Incorrect return data from execProcedure",
- Bytes.toBytes(SimpleMasterProcedureManager.SIMPLE_DATA), result);
+ assertArrayEquals(Bytes.toBytes(SimpleMasterProcedureManager.SIMPLE_DATA), result,
+ "Incorrect return data from execProcedure");
}
- @Test
+ @TestTemplate
public void listProcedure() throws Exception {
String procList = admin.getProcedures().get();
assertTrue(procList.startsWith("["));
}
- @Test
+ @TestTemplate
public void isProcedureFinished() throws Exception {
boolean failed = false;
try {
@@ -111,10 +115,10 @@ public void isProcedureFinished() throws Exception {
} catch (Exception e) {
failed = true;
}
- Assert.assertTrue(failed);
+ assertTrue(failed);
}
- @Test
+ @TestTemplate
public void abortProcedure() throws Exception {
long procId = ThreadLocalRandom.current().nextLong();
boolean abortResult = admin.abortProcedure(procId, true).get();
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java
index ab18244af191..26ff7f83a9fc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncQuotaAdminApi.java
@@ -18,13 +18,14 @@
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.Objects;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.quotas.QuotaCache;
@@ -37,22 +38,21 @@
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-@Category({ ClientTests.class, MediumTests.class })
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
+
+@Tag(ClientTests.TAG)
+@Tag(MediumTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncQuotaAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncQuotaAdminApi.class);
+ public TestAsyncQuotaAdminApi(Supplier admin) {
+ super(admin);
+ }
- @BeforeClass
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setBoolean(QuotaUtil.QUOTA_CONF_KEY, true);
TEST_UTIL.getConfiguration().setInt(QuotaCache.REFRESH_CONF_KEY, 2000);
@@ -65,7 +65,12 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
}
- @Test
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @TestTemplate
public void testThrottleType() throws Exception {
String userName = User.getCurrent().getShortName();
@@ -101,7 +106,7 @@ public void testThrottleType() throws Exception {
assertNumResults(0, null);
}
- @Test
+ @TestTemplate
public void testQuotaRetrieverFilter() throws Exception {
TableName[] tables = new TableName[] { TableName.valueOf("T0"), TableName.valueOf("T01"),
TableName.valueOf("NS0:T2"), };
@@ -181,7 +186,7 @@ public void testQuotaRetrieverFilter() throws Exception {
assertNumResults(0, null);
}
- @Test
+ @TestTemplate
public void testSwitchRpcThrottle() throws Exception {
CompletableFuture future1 = ASYNC_CONN.getAdmin().switchRpcThrottle(true);
assertEquals(true, future1.get().booleanValue());
@@ -189,7 +194,7 @@ public void testSwitchRpcThrottle() throws Exception {
assertEquals(true, future2.get().booleanValue());
}
- @Test
+ @TestTemplate
public void testSwitchExceedThrottleQuota() throws Exception {
AsyncAdmin admin = ASYNC_CONN.getAdmin();
assertEquals(false, admin.exceedThrottleQuotaSwitch(false).get().booleanValue());
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
index d3b4f51e1d21..400b9a87678c 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi.java
@@ -19,11 +19,11 @@
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.util.ArrayList;
@@ -32,9 +32,10 @@
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
@@ -52,25 +53,36 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.JVMClusterUtil;
import org.apache.hadoop.hbase.util.Threads;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous region admin operations.
* @see TestAsyncRegionAdminApi2 This test and it used to be joined it was taking longer than our
* ten minute timeout so they were split.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncRegionAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncRegionAdminApi.class);
- @Test
+ public TestAsyncRegionAdminApi(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @TestTemplate
public void testAssignRegionAndUnassignRegion() throws Exception {
createTableWithDefaultConf(tableName);
@@ -126,7 +138,7 @@ RegionInfo createTableAndGetOneRegion(final TableName tableName)
}
}
- @Test
+ @TestTemplate
public void testGetRegionByStateOfTable() throws Exception {
RegionInfo hri = createTableAndGetOneRegion(tableName);
@@ -139,7 +151,7 @@ public void testGetRegionByStateOfTable() throws Exception {
.anyMatch(r -> RegionInfo.COMPARATOR.compare(r, hri) == 0));
}
- @Test
+ @TestTemplate
public void testMoveRegion() throws Exception {
admin.balancerSwitch(false).join();
@@ -178,7 +190,7 @@ public void testMoveRegion() throws Exception {
admin.balancerSwitch(true).join();
}
- @Test
+ @TestTemplate
public void testGetOnlineRegions() throws Exception {
createTableAndGetOneRegion(tableName);
AtomicInteger regionServerCount = new AtomicInteger(0);
@@ -196,7 +208,7 @@ public void testGetOnlineRegions() throws Exception {
regionServerCount.get());
}
- @Test
+ @TestTemplate
public void testFlushTableAndRegion() throws Exception {
RegionInfo hri = createTableAndGetOneRegion(tableName);
ServerName serverName = TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager()
@@ -247,7 +259,7 @@ private void waitUntilMobCompactionFinished(TableName tableName)
assertEquals(CompactionState.NONE, state);
}
- @Test
+ @TestTemplate
public void testCompactMob() throws Exception {
ColumnFamilyDescriptor columnDescriptor = ColumnFamilyDescriptorBuilder
.newBuilder(Bytes.toBytes("mob")).setMobEnabled(true).setMobThreshold(0).build();
@@ -268,7 +280,7 @@ public void testCompactMob() throws Exception {
waitUntilMobCompactionFinished(tableName);
}
- @Test
+ @TestTemplate
public void testCompactRegionServer() throws Exception {
byte[][] families = { Bytes.toBytes("f1"), Bytes.toBytes("f2"), Bytes.toBytes("f3") };
createTableWithDefaultConf(tableName, null, families);
@@ -297,7 +309,7 @@ public void testCompactRegionServer() throws Exception {
assertEquals(3, countAfterMajorCompaction);
}
- @Test
+ @TestTemplate
public void testCompactionSwitchStates() throws Exception {
// Create a table with regions
byte[] family = Bytes.toBytes("family");
@@ -312,20 +324,20 @@ public void testCompactionSwitchStates() throws Exception {
admin.compactionSwitch(true, new ArrayList<>());
Map pairs = listCompletableFuture.get();
for (Map.Entry p : pairs.entrySet()) {
- assertEquals("Default compaction state, expected=enabled actual=disabled", true,
- p.getValue());
+ assertEquals(true, p.getValue(),
+ "Default compaction state, expected=enabled actual=disabled");
}
CompletableFuture> listCompletableFuture1 =
admin.compactionSwitch(false, new ArrayList<>());
Map pairs1 = listCompletableFuture1.get();
for (Map.Entry p : pairs1.entrySet()) {
- assertEquals("Last compaction state, expected=enabled actual=disabled", true, p.getValue());
+ assertEquals(true, p.getValue(), "Last compaction state, expected=enabled actual=disabled");
}
CompletableFuture> listCompletableFuture2 =
admin.compactionSwitch(true, new ArrayList<>());
Map pairs2 = listCompletableFuture2.get();
for (Map.Entry p : pairs2.entrySet()) {
- assertEquals("Last compaction state, expected=disabled actual=enabled", false, p.getValue());
+ assertEquals(false, p.getValue(), "Last compaction state, expected=disabled actual=enabled");
}
ServerName serverName = TEST_UTIL.getHBaseCluster().getRegionServer(0).getServerName();
List serverNameList = new ArrayList();
@@ -335,18 +347,18 @@ public void testCompactionSwitchStates() throws Exception {
Map pairs3 = listCompletableFuture3.get();
assertEquals(pairs3.entrySet().size(), 1);
for (Map.Entry p : pairs3.entrySet()) {
- assertEquals("Last compaction state, expected=enabled actual=disabled", true, p.getValue());
+ assertEquals(true, p.getValue(), "Last compaction state, expected=enabled actual=disabled");
}
CompletableFuture> listCompletableFuture4 =
admin.compactionSwitch(true, serverNameList);
Map pairs4 = listCompletableFuture4.get();
assertEquals(pairs4.entrySet().size(), 1);
for (Map.Entry p : pairs4.entrySet()) {
- assertEquals("Last compaction state, expected=disabled actual=enabled", false, p.getValue());
+ assertEquals(false, p.getValue(), "Last compaction state, expected=disabled actual=enabled");
}
}
- @Test
+ @TestTemplate
public void testCompact() throws Exception {
compactionTest(TableName.valueOf("testCompact1"), 15, CompactionState.MINOR, false);
compactionTest(TableName.valueOf("testCompact2"), 15, CompactionState.MINOR, true);
@@ -454,12 +466,12 @@ private void compactionTest(final TableName tableName, final int flushes,
if (expectedState == CompactionState.MAJOR) {
assertEquals(1, countAfterSingleFamily);
} else {
- assertTrue("" + countAfterSingleFamily, 1 <= countAfterSingleFamily);
+ assertTrue(1 <= countAfterSingleFamily, "" + countAfterSingleFamily);
}
}
}
- @Test
+ @TestTemplate
public void testNonExistentTableCompaction() {
testNonExistentTableCompaction(CompactionState.MINOR);
testNonExistentTableCompaction(CompactionState.MAJOR);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi2.java
index 44106fb4ccf0..5f8367f07258 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncRegionAdminApi2.java
@@ -20,19 +20,20 @@
import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutionException;
+import java.util.function.Supplier;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
@@ -42,27 +43,37 @@
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous region admin operations.
* @see TestAsyncRegionAdminApi This test and it used to be joined it was taking longer than our ten
* minute timeout so they were split.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncRegionAdminApi2 extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncRegionAdminApi2.class);
+ public TestAsyncRegionAdminApi2(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
- @Test
+ @TestTemplate
public void testGetRegionLocation() throws Exception {
RawAsyncHBaseAdmin rawAdmin = (RawAsyncHBaseAdmin) ASYNC_CONN.getAdmin();
TEST_UTIL.createMultiRegionTable(tableName, HConstants.CATALOG_FAMILY);
@@ -76,7 +87,7 @@ public void testGetRegionLocation() throws Exception {
assertTrue(Bytes.equals(regionName, location.getRegion().getRegionName()));
}
- @Test
+ @TestTemplate
public void testSplitSwitch() throws Exception {
createTableWithDefaultConf(tableName);
byte[][] families = { FAMILY };
@@ -106,8 +117,8 @@ public void testSplitSwitch() throws Exception {
assertTrue(originalCount < count);
}
- @Test
- @Ignore
+ @TestTemplate
+ @Disabled
// It was ignored in TestSplitOrMergeStatus, too
public void testMergeSwitch() throws Exception {
createTableWithDefaultConf(tableName);
@@ -125,8 +136,8 @@ public void testMergeSwitch() throws Exception {
while ((postSplitCount = admin.getRegions(tableName).get().size()) == originalCount) {
Threads.sleep(100);
}
- assertTrue("originalCount=" + originalCount + ", postSplitCount=" + postSplitCount,
- originalCount != postSplitCount);
+ assertTrue(originalCount != postSplitCount,
+ "originalCount=" + originalCount + ", postSplitCount=" + postSplitCount);
// Merge switch is off so merge should NOT succeed.
assertTrue(admin.mergeSwitch(false).get());
@@ -134,7 +145,7 @@ public void testMergeSwitch() throws Exception {
assertTrue(regions.size() > 1);
admin.mergeRegions(regions.get(0).getRegionName(), regions.get(1).getRegionName(), true).join();
int count = admin.getRegions(tableName).get().size();
- assertTrue("postSplitCount=" + postSplitCount + ", count=" + count, postSplitCount == count);
+ assertTrue(postSplitCount == count, "postSplitCount=" + postSplitCount + ", count=" + count);
// Merge switch is on so merge should succeed.
assertFalse(admin.mergeSwitch(true).get());
@@ -154,7 +165,7 @@ private void initSplitMergeSwitch() throws Exception {
assertTrue(admin.isMergeEnabled().get());
}
- @Test
+ @TestTemplate
public void testMergeRegions() throws Exception {
byte[][] splitRows = new byte[][] { Bytes.toBytes("3"), Bytes.toBytes("6") };
createTableWithDefaultConf(tableName, splitRows);
@@ -200,7 +211,7 @@ public void testMergeRegions() throws Exception {
assertEquals(1, regionLocations.size());
}
- @Test
+ @TestTemplate
public void testMergeRegionsInvalidRegionCount() throws Exception {
byte[][] splitRows = new byte[][] { Bytes.toBytes("3"), Bytes.toBytes("6") };
createTableWithDefaultConf(tableName, splitRows);
@@ -224,7 +235,7 @@ public void testMergeRegionsInvalidRegionCount() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testSplitTable() throws Exception {
initSplitMergeSwitch();
splitTest(TableName.valueOf("testSplitTable"), 3000, false, null);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.java
index 59d9e1d8dec8..0c71d74c25ed 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApi.java
@@ -21,12 +21,12 @@
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.startsWith;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.io.IOException;
import java.util.ArrayList;
@@ -37,8 +37,9 @@
import java.util.Set;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
+import java.util.function.Supplier;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
import org.apache.hadoop.hbase.ServerName;
@@ -52,31 +53,30 @@
import org.apache.hadoop.hbase.replication.regionserver.HBaseInterClusterReplicationEndpoint;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
-import org.junit.After;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous replication admin operations.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncReplicationAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncReplicationAdminApi.class);
-
private final String ID_ONE = "1";
private static String KEY_ONE;
private final String ID_TWO = "2";
private static String KEY_TWO;
- @BeforeClass
+ public TestAsyncReplicationAdminApi(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -88,7 +88,12 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN = ConnectionFactory.createAsyncConnection(TEST_UTIL.getConfiguration()).get();
}
- @After
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @AfterEach
public void clearPeerAndQueues() throws IOException, ReplicationException {
try {
admin.removeReplicationPeer(ID_ONE).join();
@@ -107,7 +112,7 @@ public void clearPeerAndQueues() throws IOException, ReplicationException {
}
}
- @Test
+ @TestTemplate
public void testAddRemovePeer() throws Exception {
ReplicationPeerConfig rpc1 = new ReplicationPeerConfig();
rpc1.setClusterKey(KEY_ONE);
@@ -141,7 +146,7 @@ public void testAddRemovePeer() throws Exception {
assertEquals(0, admin.listReplicationPeers().get().size());
}
- @Test
+ @TestTemplate
public void testPeerConfig() throws Exception {
ReplicationPeerConfig config = new ReplicationPeerConfig();
config.setClusterKey(KEY_ONE);
@@ -159,7 +164,7 @@ public void testPeerConfig() throws Exception {
admin.removeReplicationPeer(ID_ONE).join();
}
- @Test
+ @TestTemplate
public void testEnableDisablePeer() throws Exception {
ReplicationPeerConfig rpc1 = new ReplicationPeerConfig();
rpc1.setClusterKey(KEY_ONE);
@@ -175,7 +180,7 @@ public void testEnableDisablePeer() throws Exception {
admin.removeReplicationPeer(ID_ONE).join();
}
- @Test
+ @TestTemplate
public void testAppendPeerTableCFs() throws Exception {
ReplicationPeerConfig rpc1 = new ReplicationPeerConfig();
rpc1.setClusterKey(KEY_ONE);
@@ -208,8 +213,8 @@ public void testAppendPeerTableCFs() throws Exception {
admin.appendReplicationPeerTableCFs(ID_ONE, tableCFs).join();
result = admin.getReplicationPeerConfig(ID_ONE).get().getTableCFsMap();
assertEquals(2, result.size());
- assertTrue("Should contain t1", result.containsKey(tableName1));
- assertTrue("Should contain t2", result.containsKey(tableName2));
+ assertTrue(result.containsKey(tableName1), "Should contain t1");
+ assertTrue(result.containsKey(tableName2), "Should contain t2");
assertNull(result.get(tableName1));
assertNull(result.get(tableName2));
@@ -220,9 +225,9 @@ public void testAppendPeerTableCFs() throws Exception {
admin.appendReplicationPeerTableCFs(ID_ONE, tableCFs).join();
result = admin.getReplicationPeerConfig(ID_ONE).get().getTableCFsMap();
assertEquals(3, result.size());
- assertTrue("Should contain t1", result.containsKey(tableName1));
- assertTrue("Should contain t2", result.containsKey(tableName2));
- assertTrue("Should contain t3", result.containsKey(tableName3));
+ assertTrue(result.containsKey(tableName1), "Should contain t1");
+ assertTrue(result.containsKey(tableName2), "Should contain t2");
+ assertTrue(result.containsKey(tableName3), "Should contain t3");
assertNull(result.get(tableName1));
assertNull(result.get(tableName2));
assertEquals(1, result.get(tableName3).size());
@@ -236,10 +241,10 @@ public void testAppendPeerTableCFs() throws Exception {
admin.appendReplicationPeerTableCFs(ID_ONE, tableCFs).join();
result = admin.getReplicationPeerConfig(ID_ONE).get().getTableCFsMap();
assertEquals(4, result.size());
- assertTrue("Should contain t1", result.containsKey(tableName1));
- assertTrue("Should contain t2", result.containsKey(tableName2));
- assertTrue("Should contain t3", result.containsKey(tableName3));
- assertTrue("Should contain t4", result.containsKey(tableName4));
+ assertTrue(result.containsKey(tableName1), "Should contain t1");
+ assertTrue(result.containsKey(tableName2), "Should contain t2");
+ assertTrue(result.containsKey(tableName3), "Should contain t3");
+ assertTrue(result.containsKey(tableName4), "Should contain t4");
assertNull(result.get(tableName1));
assertNull(result.get(tableName2));
assertEquals(1, result.get(tableName3).size());
@@ -258,7 +263,7 @@ public void testAppendPeerTableCFs() throws Exception {
admin.appendReplicationPeerTableCFs(ID_ONE, tableCFs).join();
result = admin.getReplicationPeerConfig(ID_ONE).get().getTableCFsMap();
assertEquals(5, result.size());
- assertTrue("Should contain t5", result.containsKey(tableName5));
+ assertTrue(result.containsKey(tableName5), "Should contain t5");
// null means replication all cfs of tab5
assertNull(result.get(tableName5));
@@ -272,14 +277,14 @@ public void testAppendPeerTableCFs() throws Exception {
admin.appendReplicationPeerTableCFs(ID_ONE, tableCFs).join();
result = admin.getReplicationPeerConfig(ID_ONE).get().getTableCFsMap();
assertEquals(6, result.size());
- assertTrue("Should contain t6", result.containsKey(tableName6));
+ assertTrue(result.containsKey(tableName6), "Should contain t6");
// null means replication all cfs of tab6
assertNull(result.get(tableName6));
admin.removeReplicationPeer(ID_ONE).join();
}
- @Test
+ @TestTemplate
public void testRemovePeerTableCFs() throws Exception {
ReplicationPeerConfig rpc1 = new ReplicationPeerConfig();
rpc1.setClusterKey(KEY_ONE);
@@ -319,8 +324,8 @@ public void testRemovePeerTableCFs() throws Exception {
Map> result =
admin.getReplicationPeerConfig(ID_ONE).get().getTableCFsMap();
assertEquals(2, result.size());
- assertTrue("Should contain t1", result.containsKey(tableName1));
- assertTrue("Should contain t2", result.containsKey(tableName2));
+ assertTrue(result.containsKey(tableName1), "Should contain t1");
+ assertTrue(result.containsKey(tableName2), "Should contain t2");
assertNull(result.get(tableName1));
assertEquals(1, result.get(tableName2).size());
assertEquals("cf1", result.get(tableName2).get(0));
@@ -365,7 +370,7 @@ public void testRemovePeerTableCFs() throws Exception {
admin.removeReplicationPeer(ID_ONE);
}
- @Test
+ @TestTemplate
public void testSetPeerNamespaces() throws Exception {
String ns1 = "ns1";
String ns2 = "ns2";
@@ -401,7 +406,7 @@ public void testSetPeerNamespaces() throws Exception {
admin.removeReplicationPeer(ID_ONE).join();
}
- @Test
+ @TestTemplate
public void testNamespacesAndTableCfsConfigConflict() throws Exception {
String ns1 = "ns1";
String ns2 = "ns2";
@@ -451,7 +456,7 @@ public void testNamespacesAndTableCfsConfigConflict() throws Exception {
admin.removeReplicationPeer(ID_ONE).join();
}
- @Test
+ @TestTemplate
public void testPeerBandwidth() throws Exception {
ReplicationPeerConfig rpc = new ReplicationPeerConfig();
rpc.setClusterKey(KEY_ONE);
@@ -467,7 +472,7 @@ public void testPeerBandwidth() throws Exception {
admin.removeReplicationPeer(ID_ONE).join();
}
- @Test
+ @TestTemplate
public void testInvalidClusterKey() throws InterruptedException {
try {
admin.addReplicationPeer(ID_ONE,
@@ -478,7 +483,7 @@ public void testInvalidClusterKey() throws InterruptedException {
}
}
- @Test
+ @TestTemplate
public void testClusterKeyWithTrailingSpace() throws Exception {
admin.addReplicationPeer(ID_ONE,
ReplicationPeerConfig.newBuilder().setClusterKey(KEY_ONE + " ").build()).get();
@@ -486,7 +491,7 @@ public void testClusterKeyWithTrailingSpace() throws Exception {
assertEquals(KEY_ONE, clusterKey);
}
- @Test
+ @TestTemplate
public void testInvalidReplicationEndpoint() throws InterruptedException {
try {
admin.addReplicationPeer(ID_ONE,
@@ -498,7 +503,7 @@ public void testInvalidReplicationEndpoint() throws InterruptedException {
}
}
- @Test
+ @TestTemplate
public void testSetReplicationEndpoint() throws InterruptedException, ExecutionException {
// make sure that we do not need to set cluster key when we use customized ReplicationEndpoint
admin
@@ -519,10 +524,10 @@ public void testSetReplicationEndpoint() throws InterruptedException, ExecutionE
}
}
- /*
+ /**
* Tests that admin api throws ReplicationPeerNotFoundException if peer doesn't exist.
*/
- @Test
+ @TestTemplate
public void testReplicationPeerNotFoundException() throws InterruptedException {
String dummyPeer = "dummy_peer";
try {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java
index 80a640dde9c2..78ffb0c815ef 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncReplicationAdminApiWithClusters.java
@@ -18,21 +18,21 @@
package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.AsyncProcess.START_LOG_ERRORS_AFTER_COUNT_KEY;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
-import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ForkJoinPool;
+import java.util.function.Supplier;
import java.util.regex.Pattern;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
@@ -41,26 +41,20 @@
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous replication admin operations when more than 1 cluster
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncReplicationAdminApiWithClusters extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncReplicationAdminApiWithClusters.class);
-
private final static String ID_SECOND = "2";
private static HBaseTestingUtility TEST_UTIL2;
@@ -68,7 +62,11 @@ public class TestAsyncReplicationAdminApiWithClusters extends TestAsyncAdminBase
private static AsyncAdmin admin2;
private static AsyncConnection connection;
- @BeforeClass
+ public TestAsyncReplicationAdminApiWithClusters(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 60000);
TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 120000);
@@ -90,13 +88,14 @@ public static void setUpBeforeClass() throws Exception {
ASYNC_CONN.getAdmin().addReplicationPeer(ID_SECOND, rpc).join();
}
- @AfterClass
- public static void clearUp() throws IOException {
+ @AfterAll
+ public static void clearUp() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
connection.close();
}
@Override
- @After
+ @AfterEach
public void tearDown() throws Exception {
Pattern pattern = Pattern.compile(tableName.getNameAsString() + ".*");
cleanupTables(admin, pattern);
@@ -124,7 +123,7 @@ private void createTableWithDefaultConf(AsyncAdmin admin, TableName tableName) {
admin.createTable(builder.build()).join();
}
- @Test
+ @TestTemplate
public void testEnableAndDisableTableReplication() throws Exception {
// default replication scope is local
createTableWithDefaultConf(tableName);
@@ -141,7 +140,7 @@ public void testEnableAndDisableTableReplication() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testEnableReplicationWhenSlaveClusterDoesntHaveTable() throws Exception {
// Only create table in source cluster
createTableWithDefaultConf(tableName);
@@ -150,7 +149,7 @@ public void testEnableReplicationWhenSlaveClusterDoesntHaveTable() throws Except
assertTrue(admin2.tableExists(tableName).get());
}
- @Test
+ @TestTemplate
public void testEnableReplicationWhenTableDescriptorIsNotSameInClusters() throws Exception {
createTableWithDefaultConf(admin, tableName);
createTableWithDefaultConf(admin2, tableName);
@@ -179,7 +178,7 @@ public void testEnableReplicationWhenTableDescriptorIsNotSameInClusters() throws
}
}
- @Test
+ @TestTemplate
public void testDisableReplicationForNonExistingTable() throws Exception {
try {
admin.disableTableReplication(tableName).join();
@@ -188,7 +187,7 @@ public void testDisableReplicationForNonExistingTable() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testEnableReplicationForNonExistingTable() throws Exception {
try {
admin.enableTableReplication(tableName).join();
@@ -197,7 +196,7 @@ public void testEnableReplicationForNonExistingTable() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testDisableReplicationWhenTableNameAsNull() throws Exception {
try {
admin.disableTableReplication(null).join();
@@ -206,7 +205,7 @@ public void testDisableReplicationWhenTableNameAsNull() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testEnableReplicationWhenTableNameAsNull() throws Exception {
try {
admin.enableTableReplication(null).join();
@@ -219,15 +218,15 @@ public void testEnableReplicationWhenTableNameAsNull() throws Exception {
* Test enable table replication should create table only in user explicit specified table-cfs.
* HBASE-14717
*/
- @Test
+ @TestTemplate
public void testEnableReplicationForExplicitSetTableCfs() throws Exception {
TableName tableName2 = TableName.valueOf(tableName.getNameAsString() + "2");
// Only create table in source cluster
createTableWithDefaultConf(tableName);
createTableWithDefaultConf(tableName2);
- assertFalse("Table should not exists in the peer cluster", admin2.tableExists(tableName).get());
- assertFalse("Table should not exists in the peer cluster",
- admin2.tableExists(tableName2).get());
+ assertFalse(admin2.tableExists(tableName).get(), "Table should not exists in the peer cluster");
+ assertFalse(admin2.tableExists(tableName2).get(),
+ "Table should not exists in the peer cluster");
Map> tableCfs = new HashMap<>();
tableCfs.put(tableName, null);
@@ -238,17 +237,16 @@ public void testEnableReplicationForExplicitSetTableCfs() throws Exception {
// Only add tableName to replication peer config
admin.updateReplicationPeerConfig(ID_SECOND, rpc).join();
admin.enableTableReplication(tableName2).join();
- assertFalse("Table should not be created if user has set table cfs explicitly for the "
- + "peer and this is not part of that collection", admin2.tableExists(tableName2).get());
+ assertFalse(admin2.tableExists(tableName2).get(), "Table should not be created if user "
+ + "has set table cfs explicitly for the peer and this is not part of that collection");
// Add tableName2 to replication peer config, too
tableCfs.put(tableName2, null);
rpc.setTableCFsMap(tableCfs);
admin.updateReplicationPeerConfig(ID_SECOND, rpc).join();
admin.enableTableReplication(tableName2).join();
- assertTrue(
- "Table should be created if user has explicitly added table into table cfs collection",
- admin2.tableExists(tableName2).get());
+ assertTrue(admin2.tableExists(tableName2).get(),
+ "Table should be created if user has explicitly added table into table cfs collection");
} finally {
rpc.setTableCFsMap(null);
rpc.setReplicateAllUserTables(true);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java
index 70cd00981340..4fa6a2603a04 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncSnapshotAdminApi.java
@@ -17,49 +17,61 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
+import java.util.function.Supplier;
import java.util.regex.Pattern;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.After;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestInfo;
+import org.junit.jupiter.api.TestTemplate;
+
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncSnapshotAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncSnapshotAdminApi.class);
-
private static final Pattern MATCH_ALL = Pattern.compile(".*");
String snapshotName1 = "snapshotName1";
String snapshotName2 = "snapshotName2";
String snapshotName3 = "snapshotName3";
- @After
+ public TestAsyncSnapshotAdminApi(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @AfterEach
public void cleanup() throws Exception {
admin.deleteSnapshots(MATCH_ALL).get();
admin.listTableNames().get().forEach(t -> admin.disableTable(t).join());
admin.listTableNames().get().forEach(t -> admin.deleteTable(t).join());
}
- @Test
+ @TestTemplate
public void testTakeSnapshot() throws Exception {
Admin syncAdmin = TEST_UTIL.getAdmin();
@@ -88,7 +100,7 @@ public void testTakeSnapshot() throws Exception {
assertEquals(SnapshotType.FLUSH, snapshots.get(1).getType());
}
- @Test
+ @TestTemplate
public void testCloneSnapshot() throws Exception {
TableName tableName2 = TableName.valueOf("testCloneSnapshot2");
Admin syncAdmin = TEST_UTIL.getAdmin();
@@ -138,7 +150,7 @@ private void assertResult(TableName tableName, int expectedRowCount) throws IOEx
}
}
- @Test
+ @TestTemplate
public void testRestoreSnapshot() throws Exception {
Table table = TEST_UTIL.createTable(tableName, Bytes.toBytes("f1"));
for (int i = 0; i < 3000; i++) {
@@ -162,8 +174,9 @@ public void testRestoreSnapshot() throws Exception {
assertResult(tableName, 3000);
}
- @Test
- public void testListSnapshots() throws Exception {
+ @TestTemplate
+ public void testListSnapshots(TestInfo testInfo) throws Exception {
+ tableName = TableName.valueOf(testInfo.getTestMethod().get().getName());
Table table = TEST_UTIL.createTable(tableName, Bytes.toBytes("f1"));
for (int i = 0; i < 3000; i++) {
table.put(new Put(Bytes.toBytes(i)).addColumn(Bytes.toBytes("f1"), Bytes.toBytes("cq"),
@@ -191,7 +204,7 @@ public void testListSnapshots() throws Exception {
.size());
}
- @Test
+ @TestTemplate
public void testDeleteSnapshots() throws Exception {
Table table = TEST_UTIL.createTable(tableName, Bytes.toBytes("f1"));
for (int i = 0; i < 3000; i++) {
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
index 93f4362b9ff3..429c52321c3e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi.java
@@ -19,10 +19,10 @@
import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.ArrayList;
import java.util.HashMap;
@@ -31,8 +31,9 @@
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletionException;
+import java.util.function.Supplier;
import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
@@ -44,11 +45,10 @@
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous table admin operations.
@@ -56,23 +56,34 @@
* minute timeout so they were split.
* @see TestAsyncTableAdminApi3 Another split out from this class so each runs under ten minutes.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncTableAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncTableAdminApi.class);
+ public TestAsyncTableAdminApi(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
- @Test
+ @TestTemplate
public void testCreateTable() throws Exception {
List tables = admin.listTableDescriptors().get();
int numTables = tables.size();
createTableWithDefaultConf(tableName);
tables = admin.listTableDescriptors().get();
assertEquals(numTables + 1, tables.size());
- assertTrue("Table must be enabled.", TEST_UTIL.getHBaseCluster().getMaster()
- .getTableStateManager().isTableState(tableName, TableState.State.ENABLED));
+ assertTrue(TEST_UTIL.getHBaseCluster().getMaster().getTableStateManager()
+ .isTableState(tableName, TableState.State.ENABLED), "Table must be enabled.");
assertEquals(TableState.State.ENABLED, getStateFromMeta(tableName));
}
@@ -83,26 +94,26 @@ static TableState.State getStateFromMeta(TableName table) throws Exception {
return state.get().getState();
}
- @Test
+ @TestTemplate
public void testCreateTableNumberOfRegions() throws Exception {
AsyncTable metaTable = ASYNC_CONN.getTable(META_TABLE_NAME);
createTableWithDefaultConf(tableName);
List regionLocations =
AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName).get();
- assertEquals("Table should have only 1 region", 1, regionLocations.size());
+ assertEquals(1, regionLocations.size(), "Table should have only 1 region");
final TableName tableName2 = TableName.valueOf(tableName.getNameAsString() + "_2");
createTableWithDefaultConf(tableName2, new byte[][] { new byte[] { 42 } });
regionLocations = AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName2).get();
- assertEquals("Table should have only 2 region", 2, regionLocations.size());
+ assertEquals(2, regionLocations.size(), "Table should have only 2 region");
final TableName tableName3 = TableName.valueOf(tableName.getNameAsString() + "_3");
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName3);
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY));
admin.createTable(builder.build(), Bytes.toBytes("a"), Bytes.toBytes("z"), 3).join();
regionLocations = AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName3).get();
- assertEquals("Table should have only 3 region", 3, regionLocations.size());
+ assertEquals(3, regionLocations.size(), "Table should have only 3 region");
final TableName tableName4 = TableName.valueOf(tableName.getNameAsString() + "_4");
builder = TableDescriptorBuilder.newBuilder(tableName4);
@@ -119,10 +130,10 @@ public void testCreateTableNumberOfRegions() throws Exception {
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY));
admin.createTable(builder.build(), new byte[] { 1 }, new byte[] { 127 }, 16).join();
regionLocations = AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName5).get();
- assertEquals("Table should have 16 region", 16, regionLocations.size());
+ assertEquals(16, regionLocations.size(), "Table should have 16 region");
}
- @Test
+ @TestTemplate
public void testCreateTableWithRegions() throws Exception {
byte[][] splitKeys = { new byte[] { 1, 1, 1 }, new byte[] { 2, 2, 2 }, new byte[] { 3, 3, 3 },
new byte[] { 4, 4, 4 }, new byte[] { 5, 5, 5 }, new byte[] { 6, 6, 6 },
@@ -132,16 +143,15 @@ public void testCreateTableWithRegions() throws Exception {
createTableWithDefaultConf(tableName, splitKeys);
boolean tableAvailable = admin.isTableAvailable(tableName, splitKeys).get();
- assertTrue("Table should be created with splitKyes + 1 rows in META", tableAvailable);
+ assertTrue(tableAvailable, "Table should be created with splitKyes + 1 rows in META");
AsyncTable metaTable = ASYNC_CONN.getTable(META_TABLE_NAME);
List regions =
AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName).get();
Iterator hris = regions.iterator();
- assertEquals(
- "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size(),
- expectedRegions, regions.size());
+ assertEquals(expectedRegions, regions.size(),
+ "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size());
System.err.println("Found " + regions.size() + " regions");
RegionInfo hri;
@@ -195,9 +205,8 @@ public void testCreateTableWithRegions() throws Exception {
admin.createTable(builder.build(), startKey, endKey, expectedRegions).join();
regions = AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName2).get();
- assertEquals(
- "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size(),
- expectedRegions, regions.size());
+ assertEquals(expectedRegions, regions.size(),
+ "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size());
System.err.println("Found " + regions.size() + " regions");
hris = regions.iterator();
@@ -247,9 +256,8 @@ public void testCreateTableWithRegions() throws Exception {
admin.createTable(builder.build(), startKey, endKey, expectedRegions).join();
regions = AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName3).get();
- assertEquals(
- "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size(),
- expectedRegions, regions.size());
+ assertEquals(expectedRegions, regions.size(),
+ "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size());
System.err.println("Found " + regions.size() + " regions");
if (tablesOnMaster) {
// This don't work if master is not carrying regions. FIX. TODO.
@@ -289,7 +297,7 @@ private void verifyRoundRobinDistribution(List regions, int exp
});
}
- @Test
+ @TestTemplate
public void testCreateTableWithOnlyEmptyStartRow() throws Exception {
byte[][] splitKeys = new byte[1][];
splitKeys[0] = HConstants.EMPTY_BYTE_ARRAY;
@@ -301,7 +309,7 @@ public void testCreateTableWithOnlyEmptyStartRow() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testCreateTableWithEmptyRowInTheSplitKeys() throws Exception {
byte[][] splitKeys = new byte[3][];
splitKeys[0] = "region1".getBytes();
@@ -315,7 +323,7 @@ public void testCreateTableWithEmptyRowInTheSplitKeys() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testDeleteTable() throws Exception {
createTableWithDefaultConf(tableName);
assertTrue(admin.tableExists(tableName).get());
@@ -324,12 +332,12 @@ public void testDeleteTable() throws Exception {
assertFalse(admin.tableExists(tableName).get());
}
- @Test
+ @TestTemplate
public void testTruncateTable() throws Exception {
testTruncateTable(tableName, false);
}
- @Test
+ @TestTemplate
public void testTruncateTablePreservingSplits() throws Exception {
testTruncateTable(tableName, true);
}
@@ -364,13 +372,13 @@ private void testTruncateTable(final TableName tableName, boolean preserveSplits
}
}
- @Test
+ @TestTemplate
public void testCloneTableSchema() throws Exception {
final TableName newTableName = TableName.valueOf(tableName.getNameAsString() + "_new");
testCloneTableSchema(tableName, newTableName, false);
}
- @Test
+ @TestTemplate
public void testCloneTableSchemaPreservingSplits() throws Exception {
final TableName newTableName = TableName.valueOf(tableName.getNameAsString() + "_new");
testCloneTableSchema(tableName, newTableName, true);
@@ -396,8 +404,8 @@ private void testCloneTableSchema(final TableName tableName, final TableName new
admin.createTable(tableDesc, splitKeys).join();
assertEquals(NUM_REGIONS, TEST_UTIL.getHBaseCluster().getRegions(tableName).size());
- assertTrue("Table should be created with splitKyes + 1 rows in META",
- admin.isTableAvailable(tableName, splitKeys).get());
+ assertTrue(admin.isTableAvailable(tableName).get(),
+ "Table should be created with splitKyes + 1 rows in META");
// Clone & Verify
admin.cloneTableSchema(tableName, newTableName, preserveSplits).join();
@@ -414,14 +422,14 @@ private void testCloneTableSchema(final TableName tableName, final TableName new
if (preserveSplits) {
assertEquals(NUM_REGIONS, TEST_UTIL.getHBaseCluster().getRegions(newTableName).size());
- assertTrue("New table should be created with splitKyes + 1 rows in META",
- admin.isTableAvailable(newTableName, splitKeys).get());
+ assertTrue(admin.isTableAvailable(newTableName).get(),
+ "New table should be created with splitKyes + 1 rows in META");
} else {
assertEquals(1, TEST_UTIL.getHBaseCluster().getRegions(newTableName).size());
}
}
- @Test
+ @TestTemplate
public void testCloneTableSchemaWithNonExistentSourceTable() throws Exception {
final TableName newTableName = TableName.valueOf(tableName.getNameAsString() + "_new");
// test for non-existent source table
@@ -433,7 +441,7 @@ public void testCloneTableSchemaWithNonExistentSourceTable() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testCloneTableSchemaWithExistentDestinationTable() throws Exception {
final TableName newTableName = TableName.valueOf(tableName.getNameAsString() + "_new");
byte[] FAMILY_0 = Bytes.toBytes("cf0");
@@ -448,7 +456,7 @@ public void testCloneTableSchemaWithExistentDestinationTable() throws Exception
}
}
- @Test
+ @TestTemplate
public void testIsTableAvailableWithInexistantTable() throws Exception {
final TableName newTableName = TableName.valueOf(tableName.getNameAsString() + "_new");
// test for inexistant table
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java
index 347b355acc85..7675a4091ff8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi2.java
@@ -17,15 +17,16 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.Optional;
import java.util.Set;
+import java.util.function.Supplier;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.testclassification.ClientTests;
@@ -35,26 +36,36 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.junit.Assert;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous table admin operations
* @see TestAsyncTableAdminApi This test and it used to be joined it was taking longer than our ten
* minute timeout so they were split.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncTableAdminApi2 extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncTableAdminApi2.class);
+ public TestAsyncTableAdminApi2(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
- @Test
+ @TestTemplate
public void testDisableCatalogTable() throws Exception {
try {
this.admin.disableTable(TableName.META_TABLE_NAME).join();
@@ -66,7 +77,7 @@ public void testDisableCatalogTable() throws Exception {
createTableWithDefaultConf(tableName);
}
- @Test
+ @TestTemplate
public void testAddColumnFamily() throws Exception {
// Create a table with two families
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
@@ -81,7 +92,7 @@ public void testAddColumnFamily() throws Exception {
verifyTableDescriptor(tableName, FAMILY_0, FAMILY_1);
}
- @Test
+ @TestTemplate
public void testAddSameColumnFamilyTwice() throws Exception {
// Create a table with one families
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
@@ -104,7 +115,7 @@ public void testAddSameColumnFamilyTwice() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testModifyColumnFamily() throws Exception {
TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(FAMILY_0);
@@ -124,7 +135,7 @@ public void testModifyColumnFamily() throws Exception {
assertTrue(hcfd.getBlocksize() == newBlockSize);
}
- @Test
+ @TestTemplate
public void testModifyNonExistingColumnFamily() throws Exception {
TableDescriptorBuilder tdBuilder = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(FAMILY_0);
@@ -146,7 +157,7 @@ public void testModifyNonExistingColumnFamily() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testDeleteColumnFamily() throws Exception {
// Create a table with two families
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
@@ -162,7 +173,7 @@ public void testDeleteColumnFamily() throws Exception {
verifyTableDescriptor(tableName, FAMILY_0);
}
- @Test
+ @TestTemplate
public void testDeleteSameColumnFamilyTwice() throws Exception {
// Create a table with two families
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
@@ -205,20 +216,20 @@ private void verifyTableDescriptor(final TableDescriptor htd, final TableName ta
assertEquals(tableName, htd.getTableName());
assertEquals(families.length, htdFamilies.size());
for (byte[] familyName : families) {
- assertTrue("Expected family " + Bytes.toString(familyName), htdFamilies.contains(familyName));
+ assertTrue(htdFamilies.contains(familyName), "Expected family " + Bytes.toString(familyName));
}
}
- @Test
+ @TestTemplate
public void testTableAvailableWithRandomSplitKeys() throws Exception {
createTableWithDefaultConf(tableName);
byte[][] splitKeys = new byte[1][];
splitKeys = new byte[][] { new byte[] { 1, 1, 1 }, new byte[] { 2, 2, 2 } };
boolean tableAvailable = admin.isTableAvailable(tableName, splitKeys).get();
- assertFalse("Table should be created with 1 row in META", tableAvailable);
+ assertFalse(tableAvailable, "Table should be created with 1 row in META");
}
- @Test
+ @TestTemplate
public void testCompactionTimestamps() throws Exception {
createTableWithDefaultConf(tableName);
AsyncTable> table = ASYNC_CONN.getTable(tableName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java
index b55518892c12..775e5f894ded 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableAdminApi3.java
@@ -21,19 +21,20 @@
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutionException;
+import java.util.function.Supplier;
import java.util.regex.Pattern;
import org.apache.hadoop.hbase.AsyncMetaTableAccessor;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDescriptor;
@@ -41,25 +42,36 @@
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Class to test asynchronous table admin operations.
* @see TestAsyncTableAdminApi2 This test and it used to be joined it was taking longer than our ten
* minute timeout so they were split.
*/
-@RunWith(Parameterized.class)
-@Category({ LargeTests.class, ClientTests.class })
+@Tag(LargeTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncTableAdminApi3 extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncTableAdminApi3.class);
- @Test
+ public TestAsyncTableAdminApi3(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
+
+ @TestTemplate
public void testTableExist() throws Exception {
boolean exist;
exist = admin.tableExists(tableName).get();
@@ -74,7 +86,7 @@ public void testTableExist() throws Exception {
assertTrue(exist);
}
- @Test
+ @TestTemplate
public void testListTables() throws Exception {
int numTables = admin.listTableDescriptors().get().size();
final TableName tableName1 = TableName.valueOf(tableName.getNameAsString() + "1");
@@ -96,7 +108,7 @@ public void testListTables() throws Exception {
break;
}
}
- assertTrue("Not found: " + tables[i], found);
+ assertTrue(found, "Not found: " + tables[i]);
}
List tableNames = admin.listTableNames().get();
@@ -110,7 +122,7 @@ public void testListTables() throws Exception {
break;
}
}
- assertTrue("Not found: " + tables[i], found);
+ assertTrue(found, "Not found: " + tables[i]);
}
tableNames = new ArrayList(tables.length + 1);
@@ -124,8 +136,8 @@ public void testListTables() throws Exception {
size = tableDescs.size();
assertEquals(tables.length + 1, size);
for (int i = 0, j = 0; i < tables.length && j < size; i++, j++) {
- assertTrue("tableName should be equal in order",
- tableDescs.get(j).getTableName().equals(tables[i]));
+ assertTrue(tableDescs.get(j).getTableName().equals(tables[i]),
+ "tableName should be equal in order");
}
assertTrue(tableDescs.get(size - 1).getTableName().equals(TableName.META_TABLE_NAME));
@@ -135,12 +147,12 @@ public void testListTables() throws Exception {
}
tableDescs = admin.listTableDescriptors(true).get();
- assertTrue("Not found system tables", tableDescs.size() > 0);
+ assertTrue(tableDescs.size() > 0, "Not found system tables");
tableNames = admin.listTableNames(true).get();
- assertTrue("Not found system tables", tableNames.size() > 0);
+ assertTrue(tableNames.size() > 0, "Not found system tables");
}
- @Test
+ @TestTemplate
public void testGetTableDescriptor() throws Exception {
byte[][] families = { FAMILY, FAMILY_0, FAMILY_1 };
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
@@ -157,7 +169,7 @@ public void testGetTableDescriptor() throws Exception {
assertEquals(0, TableDescriptor.COMPARATOR.compare(desc, confirmedHtd));
}
- @Test
+ @TestTemplate
public void testDisableAndEnableTable() throws Exception {
createTableWithDefaultConf(tableName);
AsyncTable> table = ASYNC_CONN.getTable(tableName);
@@ -172,8 +184,8 @@ public void testDisableAndEnableTable() throws Exception {
table.get(get).get();
this.admin.disableTable(tableName).join();
- assertTrue("Table must be disabled.", TEST_UTIL.getHBaseCluster().getMaster()
- .getTableStateManager().isTableState(tableName, TableState.State.DISABLED));
+ assertTrue(TEST_UTIL.getHBaseCluster().getMaster().getTableStateManager()
+ .isTableState(tableName, TableState.State.DISABLED), "Table must be disabled.");
assertEquals(TableState.State.DISABLED, TestAsyncTableAdminApi.getStateFromMeta(tableName));
// Test that table is disabled
@@ -194,8 +206,8 @@ public void testDisableAndEnableTable() throws Exception {
}
assertTrue(ok);
this.admin.enableTable(tableName).join();
- assertTrue("Table must be enabled.", TEST_UTIL.getHBaseCluster().getMaster()
- .getTableStateManager().isTableState(tableName, TableState.State.ENABLED));
+ assertTrue(TEST_UTIL.getHBaseCluster().getMaster().getTableStateManager()
+ .isTableState(tableName, TableState.State.ENABLED), "Table must be enabled.");
assertEquals(TableState.State.ENABLED, TestAsyncTableAdminApi.getStateFromMeta(tableName));
// Test that table is enabled
@@ -215,7 +227,7 @@ public void testDisableAndEnableTable() throws Exception {
}
}
- @Test
+ @TestTemplate
public void testDisableAndEnableTables() throws Exception {
final TableName tableName1 = TableName.valueOf(tableName.getNameAsString() + "1");
final TableName tableName2 = TableName.valueOf(tableName.getNameAsString() + "2");
@@ -279,7 +291,7 @@ public void testDisableAndEnableTables() throws Exception {
assertEquals(TableState.State.ENABLED, TestAsyncTableAdminApi.getStateFromMeta(tableName2));
}
- @Test
+ @TestTemplate
public void testEnableTableRetainAssignment() throws Exception {
byte[][] splitKeys = { new byte[] { 1, 1, 1 }, new byte[] { 2, 2, 2 }, new byte[] { 3, 3, 3 },
new byte[] { 4, 4, 4 }, new byte[] { 5, 5, 5 }, new byte[] { 6, 6, 6 },
@@ -290,9 +302,8 @@ public void testEnableTableRetainAssignment() throws Exception {
AsyncTable metaTable = ASYNC_CONN.getTable(META_TABLE_NAME);
List regions =
AsyncMetaTableAccessor.getTableHRegionLocations(metaTable, tableName).get();
- assertEquals(
- "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size(),
- expectedRegions, regions.size());
+ assertEquals(expectedRegions, regions.size(),
+ "Tried to create " + expectedRegions + " regions " + "but only found " + regions.size());
// Disable table.
admin.disableTable(tableName).join();
@@ -306,7 +317,7 @@ public void testEnableTableRetainAssignment() throws Exception {
assertTrue(regions2.containsAll(regions));
}
- @Test
+ @TestTemplate
public void testIsTableEnabledAndDisabled() throws Exception {
createTableWithDefaultConf(tableName);
assertTrue(admin.isTableEnabled(tableName).get());
@@ -320,7 +331,7 @@ public void testIsTableEnabledAndDisabled() throws Exception {
assertFalse(admin.isTableDisabled(TableName.META_TABLE_NAME).get());
}
- @Test
+ @TestTemplate
public void testIsTableAvailable() throws Exception {
createTableWithDefaultConf(tableName);
TEST_UTIL.waitTableAvailable(tableName);
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncToolAdminApi.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncToolAdminApi.java
index 1bab0b89a4a3..d00af13183fc 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncToolAdminApi.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestAsyncToolAdminApi.java
@@ -17,29 +17,40 @@
*/
package org.apache.hadoop.hbase.client;
-import static org.junit.Assert.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
-import org.apache.hadoop.hbase.HBaseClassTestRule;
+import java.util.function.Supplier;
+import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate;
import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.MediumTests;
-import org.junit.ClassRule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Tag;
+import org.junit.jupiter.api.TestTemplate;
/**
* Test the admin operations for Balancer, Normalizer, CleanerChore, and CatalogJanitor.
*/
-@RunWith(Parameterized.class)
-@Category({ MediumTests.class, ClientTests.class })
+@Tag(MediumTests.TAG)
+@Tag(ClientTests.TAG)
+@HBaseParameterizedTestTemplate(name = "{index}: policy = {0}")
public class TestAsyncToolAdminApi extends TestAsyncAdminBase {
- @ClassRule
- public static final HBaseClassTestRule CLASS_RULE =
- HBaseClassTestRule.forClass(TestAsyncToolAdminApi.class);
+ public TestAsyncToolAdminApi(Supplier admin) {
+ super(admin);
+ }
+
+ @BeforeAll
+ public static void setUpBeforeClass() throws Exception {
+ TestAsyncAdminBase.setUpBeforeClass();
+ }
+
+ @AfterAll
+ public static void tearDownAfterClass() throws Exception {
+ TestAsyncAdminBase.tearDownAfterClass();
+ }
- @Test
+ @TestTemplate
public void testBalancer() throws Exception {
boolean initialState = admin.isBalancerEnabled().get();
@@ -62,7 +73,7 @@ public void testBalancer() throws Exception {
assertEquals(initialState, admin.isBalancerEnabled().get());
}
- @Test
+ @TestTemplate
public void testNormalizer() throws Exception {
boolean initialState = admin.isNormalizerEnabled().get();
@@ -85,7 +96,7 @@ public void testNormalizer() throws Exception {
assertEquals(initialState, admin.isNormalizerEnabled().get());
}
- @Test
+ @TestTemplate
public void testCleanerChore() throws Exception {
boolean initialState = admin.isCleanerChoreEnabled().get();
@@ -108,7 +119,7 @@ public void testCleanerChore() throws Exception {
assertEquals(initialState, admin.isCleanerChoreEnabled().get());
}
- @Test
+ @TestTemplate
public void testCatalogJanitor() throws Exception {
boolean initialState = admin.isCatalogJanitorEnabled().get();