Skip to content

Commit 56230df

Browse files
committed
HDDS-11463. Improve Some Code.
1 parent 0390295 commit 56230df

File tree

3 files changed

+70
-10
lines changed

3 files changed

+70
-10
lines changed

hadoop-ozone/tools/src/main/java/org/apache/hadoop/ozone/admin/scm/ScmAdmin.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,8 @@
4141
TransferScmLeaderSubCommand.class,
4242
DeletedBlocksTxnCommands.class,
4343
DecommissionScmSubcommand.class,
44-
RotateKeySubCommand.class
44+
RotateKeySubCommand.class,
45+
VolumeFailureSubCommand.class,
4546
})
4647
@MetaInfServices(SubcommandWithParent.class)
4748
public class ScmAdmin extends GenericCli implements SubcommandWithParent {
Lines changed: 33 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,19 +15,21 @@
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
18-
package org.apache.hadoop.hdds.scm.cli.datanode;
18+
package org.apache.hadoop.ozone.admin.scm;
1919

2020
import org.apache.hadoop.hdds.cli.HddsVersionProvider;
2121
import org.apache.hadoop.hdds.scm.cli.ScmSubcommand;
2222
import org.apache.hadoop.hdds.scm.client.ScmClient;
2323
import org.apache.hadoop.hdds.scm.datanode.VolumeFailureInfo;
2424
import org.apache.hadoop.hdds.server.JsonUtils;
25+
import org.apache.hadoop.ozone.utils.FormattingCLIUtils;
2526
import org.apache.hadoop.util.StringUtils;
2627
import picocli.CommandLine.Command;
2728
import picocli.CommandLine.Option;
2829

2930
import java.io.IOException;
3031
import java.text.SimpleDateFormat;
32+
import java.util.Arrays;
3133
import java.util.List;
3234
import java.util.Locale;
3335

@@ -44,17 +46,44 @@ public class VolumeFailureSubCommand extends ScmSubcommand {
4446
description = "Format output as JSON")
4547
private boolean json;
4648

47-
private SimpleDateFormat sdf = new SimpleDateFormat(
49+
// Display it in TABLE format.
50+
@Option(names = { "--table" },
51+
defaultValue = "false",
52+
description = "Format output as Table")
53+
private boolean table;
54+
55+
private static final SimpleDateFormat sdf = new SimpleDateFormat(
4856
"EEE MMM dd HH:mm:ss Z yyyy", Locale.ENGLISH);
49-
57+
58+
private static final String DATANODE_VOLUME_FAILURES_TITLE = "Datanode Volume Failures";
59+
60+
private static final List<String> DATANODE_VOLUME_FAILURES_HEADER = Arrays.asList(
61+
"Node", "Volume Name", "Capacity Lost", "Failure Date");
62+
5063
@Override
51-
protected void execute(ScmClient client) throws IOException {
64+
public void execute(ScmClient client) throws IOException {
5265
List<VolumeFailureInfo> volumeFailureInfos = client.getVolumeFailureInfos();
66+
5367
if (json) {
5468
System.out.print(
5569
JsonUtils.toJsonStringWithDefaultPrettyPrinter(volumeFailureInfos));
5670
return;
5771
}
72+
73+
if(table) {
74+
FormattingCLIUtils formattingCLIUtils = new FormattingCLIUtils(DATANODE_VOLUME_FAILURES_TITLE)
75+
.addHeaders(DATANODE_VOLUME_FAILURES_HEADER);
76+
for (VolumeFailureInfo info : volumeFailureInfos) {
77+
String capacityLost = StringUtils.byteDesc(info.getCapacityLost());
78+
String failureDate = sdf.format(info.getFailureDate());
79+
String[] values = new String[]{info.getNode(), info.getVolumeName(),
80+
capacityLost, failureDate};
81+
formattingCLIUtils.addLine(values);
82+
}
83+
System.out.println(formattingCLIUtils.render());
84+
return;
85+
}
86+
5887
System.out.printf("Datanode Volume Failures (%d Volumes)%n%n", volumeFailureInfos.size());
5988
volumeFailureInfos.forEach(this::printInfo);
6089
}
Lines changed: 35 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,12 @@
1515
* See the License for the specific language governing permissions and
1616
* limitations under the License.
1717
*/
18-
package org.apache.hadoop.hdds.scm.cli.datanode;
18+
package org.apache.hadoop.ozone.scm;
1919

20-
import com.fasterxml.jackson.databind.JsonNode;
21-
import com.fasterxml.jackson.databind.ObjectMapper;
22-
import org.apache.hadoop.hdds.protocol.MockDatanodeDetails;
20+
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
2321
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
2422
import org.apache.hadoop.hdds.scm.client.ScmClient;
23+
import org.apache.hadoop.ozone.admin.scm.VolumeFailureSubCommand;
2524
import org.apache.hadoop.hdds.scm.datanode.VolumeFailureInfo;
2625
import org.apache.hadoop.util.Time;
2726
import org.junit.jupiter.api.AfterEach;
@@ -36,7 +35,11 @@
3635
import java.nio.charset.StandardCharsets;
3736
import java.util.ArrayList;
3837
import java.util.List;
38+
import java.util.Random;
39+
import java.util.UUID;
40+
import java.util.concurrent.ThreadLocalRandom;
3941

42+
import static org.apache.hadoop.hdds.protocol.DatanodeDetails.Port.Name.ALL_PORTS;
4043
import static org.mockito.Mockito.mock;
4144
import static org.mockito.Mockito.when;
4245

@@ -71,6 +74,16 @@ public void testCorrectJsonValuesInReport() throws IOException {
7174
cmd.execute(scmClient);
7275
}
7376

77+
@Test
78+
public void testCorrectTableValuesInReport() throws IOException {
79+
ScmClient scmClient = mock(ScmClient.class);
80+
when(scmClient.getVolumeFailureInfos()).thenAnswer(invocation -> getUsageProto());
81+
82+
CommandLine c = new CommandLine(cmd);
83+
c.parseArgs("--table");
84+
cmd.execute(scmClient);
85+
}
86+
7487
private List<VolumeFailureInfo> getUsageProto() {
7588
List<VolumeFailureInfo> result = new ArrayList<>();
7689
for (int i = 0; i < 5; i++) {
@@ -90,6 +103,23 @@ private List<VolumeFailureInfo> getUsageProto() {
90103
}
91104

92105
private HddsProtos.DatanodeDetailsProto createDatanodeDetails() {
93-
return MockDatanodeDetails.randomDatanodeDetails().getProtoBufMessage();
106+
Random random = ThreadLocalRandom.current();
107+
String ipAddress = random.nextInt(256)
108+
+ "." + random.nextInt(256)
109+
+ "." + random.nextInt(256)
110+
+ "." + random.nextInt(256);
111+
112+
DatanodeDetails.Builder dn = DatanodeDetails.newBuilder()
113+
.setUuid(UUID.randomUUID())
114+
.setHostName("localhost" + "-" + ipAddress)
115+
.setIpAddress(ipAddress)
116+
.setPersistedOpState(HddsProtos.NodeOperationalState.IN_SERVICE)
117+
.setPersistedOpStateExpiry(0);
118+
119+
for (DatanodeDetails.Port.Name name : ALL_PORTS) {
120+
dn.addPort(DatanodeDetails.newPort(name, 0));
121+
}
122+
123+
return dn.build().getProtoBufMessage();
94124
}
95125
}

0 commit comments

Comments
 (0)