From c2cd0bd8b0eb9307614d47f2ebecf40faf37eb16 Mon Sep 17 00:00:00 2001 From: jiaguodongF <272436024@qq.com> Date: Fri, 16 Jul 2021 20:31:05 +0800 Subject: [PATCH] HDFS-16119. start balancer with parameters -hotBlockTimeInterval xxx is invalid --- .../hdfs/server/balancer/TestBalancer.java | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java index 9af8ade0ea50c..5c9f925bb4bab 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java @@ -38,6 +38,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY; +import java.lang.reflect.Field; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.junit.AfterClass; import static org.junit.Assert.assertEquals; @@ -67,6 +68,7 @@ import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.lang3.StringUtils; +import org.junit.Assert; import org.junit.Before; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -1219,6 +1221,50 @@ public void testBalancerCliParseBlockpools() { assertEquals(1, p.getBlockPools().size()); } + @Test + public void testBalancerCliParseHotBlockTimeInterval() { + String[] parameters = new String[]{"-hotBlockTimeInterval", "1000"}; + BalancerParameters p = Balancer.Cli.parse(parameters); + assertEquals(1000, p.getHotBlockTimeInterval()); + } + + @Test + public void testBalancerDispatchHotBlockTimeInterval() { + String[] parameters = new String[]{"-hotBlockTimeInterval", "1000"}; + BalancerParameters p = Balancer.Cli.parse(parameters); + Configuration conf = new HdfsConfiguration(); + initConf(conf); + try { + cluster = new MiniDFSCluster + .Builder(conf) + .numDataNodes(0) + .setNNRedundancyConsiderLoad(false) + .build(); + cluster.getConfiguration(0).setInt(DFSConfigKeys.DFS_REPLICATION_KEY, + DFSConfigKeys.DFS_REPLICATION_DEFAULT); + conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, + DFSConfigKeys.DFS_REPLICATION_DEFAULT); + cluster.waitClusterUp(); + cluster.waitActive(); + Collection namenodes = DFSUtil.getInternalNsRpcUris(conf); + List connectors = NameNodeConnector.newNameNodeConnectors(namenodes, + Balancer.class.getSimpleName(), Balancer.BALANCER_ID_PATH, conf, + BalancerParameters.DEFAULT.getMaxIdleIteration()); + + Balancer run = new Balancer(connectors.get(0), p, new HdfsConfiguration()); + Field field = run.getClass().getDeclaredField("dispatcher"); + field.setAccessible(true); + Object dispatcher = field.get(run); + + Field field1 = dispatcher.getClass().getDeclaredField("hotBlockTimeInterval"); + field1.setAccessible(true); + Object hotBlockTimeInterval = field1.get(dispatcher); + assertEquals(1000, Long.parseLong(String.valueOf(hotBlockTimeInterval))); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + /** * Verify balancer exits 0 on success. */