diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java index 2c4a386de591c..3a74f835f4ad8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeMetrics.java @@ -821,13 +821,12 @@ public Boolean get() { @Test public void testDataNodeDatasetLockMetrics() throws IOException { Configuration conf = new HdfsConfiguration(); - MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); - try { + try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build()) { FileSystem fs = cluster.getFileSystem(); // Create and read a 1 byte file Path tmpfile = new Path("/tmp.txt"); DFSTestUtil.createFile(fs, tmpfile, - (long)1, (short)1, 1L); + (long)1, (short)1, 1L); DFSTestUtil.readFile(fs, tmpfile); List datanodes = cluster.getDataNodes(); assertEquals(datanodes.size(), 1); @@ -835,10 +834,6 @@ public void testDataNodeDatasetLockMetrics() throws IOException { MetricsRecordBuilder rb = getMetrics(datanode.getMetrics().name()); assertCounterGt("AcquireDatasetWriteLockNumOps", (long)1, rb); assertCounterGt("AcquireDatasetReadLockNumOps", (long)1, rb); - } finally { - if (cluster != null) { - cluster.shutdown(); - } } } }