@Test
  public void testCommandsUtils() {

    String name01 = " , ,";
    List<String> test01 = CommandsUtils.inputsConvert(name01);
    List<String> testDef01 = new ArrayList<String>(0);
    System.out.println(test01.toString());
    assertEquals(test01, testDef01);

    String name02 = " , ,.";
    List<String> test02 = CommandsUtils.inputsConvert(name02);
    List<String> testDef02 = new ArrayList<String>(0);
    testDef02.add(".");
    System.out.println(test02.toString());
    assertEquals(test02, testDef02);

    String name03 = " a, b,c";
    List<String> test03 = CommandsUtils.inputsConvert(name03);
    List<String> testDef03 = new ArrayList<String>(0);
    testDef03.add("a");
    testDef03.add("b");
    testDef03.add("c");
    System.out.println(test03.toString());
    assertEquals(test03, testDef03);
  }
 @Test
 public void testIsBank() {
   assertEquals(CommandsUtils.isBlank(null), true);
   assertEquals(CommandsUtils.isBlank(""), true);
   assertEquals(CommandsUtils.isBlank(" "), true);
   assertEquals(CommandsUtils.isBlank("12345"), false);
   assertEquals(CommandsUtils.isBlank(" 12345 "), false);
 }
 @Test
 public void testRemoveProperties() {
   Properties propertise = new Properties();
   propertise.put("Cookie", "JSESSIONID=2AAF431F59ACEE1CC68B43C87772C54F");
   CommandsUtils.writeProperties(propertise, Constants.PROPERTY_FILE);
   removeProperties(Constants.PROPERTY_FILE, "Cookie");
   propertise = CommandsUtils.readProperties(Constants.PROPERTY_FILE);
   assertNotNull(propertise);
   if (propertise != null) {
     assertEquals(propertise.getProperty("Cookie"), null);
   }
 }
 private void removeProperties(String propertiesFilePath, String... propertiesName) {
   if (propertiesName.length > 0) {
     Properties properties = CommandsUtils.readProperties(propertiesFilePath);
     if (properties != null && !properties.isEmpty()) {
       for (String propertieName : propertiesName) {
         if (properties.keySet().contains(propertieName)) {
           properties.remove(propertieName);
         }
       }
       FileOutputStream fos = null;
       try {
         fos = new FileOutputStream(propertiesFilePath);
         properties.store(fos, "");
       } catch (FileNotFoundException e) {
         // nothing to do
       } catch (IOException e) {
         // nothing to do
       } finally {
         if (fos != null) {
           try {
             fos.close();
           } catch (IOException e) {
             // nothing to do
           }
         }
       }
     }
   }
 }
 @Test
 public void testDataFromFileWithUTF8() throws Exception {
   final String specFilePath = "src/test/resources/hadoop_cluster_cn.json";
   try {
     ClusterCreate clusterSpec =
         CommandsUtils.getObjectByJsonString(
             ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
     NodeGroupCreate[] nodeGroups = clusterSpec.getNodeGroups();
     assertEquals(nodeGroups.length, 3);
     assertEquals(nodeGroups[0].getName(), "主节点");
     assertEquals(nodeGroups[1].getName(), "协作节点");
     assertEquals(nodeGroups[2].getName(), "客户端");
   } catch (Exception ex) {
     System.out.println(ex.getMessage());
     assert (true);
   }
 }
 @Test
 public void testGracefulRackTopologyOutput() throws Exception {
   String topologyFile = "src/test/resources/topology.data";
   Map<String, String> racksTopology = new HashMap<String, String>();
   racksTopology.put("192.168.0.1", "/rack1/host1");
   racksTopology.put("192.168.0.2", "/rack1/host2");
   racksTopology.put("192.168.0.3", "/rack1/host1");
   racksTopology.put("192.168.0.4", "/rack2/host3");
   CommandsUtils.gracefulRackTopologyOutput(racksTopology, topologyFile, ",");
   File f = new File(topologyFile);
   assertTrue(f.exists());
   if (f.exists()) {
     String topologyInfo = CommandsUtils.dataFromFile(topologyFile);
     assertTrue(topologyInfo.contains("192.168.0.4"));
     assertTrue(topologyInfo.contains("/rack2/host3"));
     assertTrue(topologyInfo.contains("192.168.0.2 /rack1/host2"));
     assertEquals(topologyInfo.split(",").length, 4);
     f.delete();
   }
 }
 @Test
 public void testPrettyJsonOutputWithUTF8() throws Exception {
   final String specFilePath = "src/test/resources/hadoop_cluster_cn.json";
   final String exportFilePath = "src/test/resources/hadoop_cluster_cn_export.json";
   ClusterCreate clusterSpec =
       CommandsUtils.getObjectByJsonString(
           ClusterCreate.class, CommandsUtils.dataFromFile(specFilePath));
   CommandsUtils.prettyJsonOutput(clusterSpec, exportFilePath);
   File exportFile = new File(exportFilePath);
   assertTrue(exportFile.exists());
   ClusterCreate exportClusterSpec =
       CommandsUtils.getObjectByJsonString(
           ClusterCreate.class, CommandsUtils.dataFromFile(exportFilePath));
   NodeGroupCreate[] nodeGroups = exportClusterSpec.getNodeGroups();
   assertEquals(nodeGroups.length, 3);
   assertEquals(nodeGroups[0].getName(), "主节点");
   assertEquals(nodeGroups[1].getName(), "协作节点");
   assertEquals(nodeGroups[2].getName(), "客户端");
   exportFile.delete();
 }
  @Test(enabled = false)
  public void testDataFromFile() throws Exception {
    String path01 = "C:/Users/weiw/aurora_bigdata/spec.txt";
    String path02 = "spec.txt";
    String path03 = "C:/Users/weiw/aurora_bigdata/spectt.txt";

    String test01 = CommandsUtils.dataFromFile(path01);
    System.out.println(test01);
    try {
      NodeGroupCreate[] NodeGroupsCreate01 =
          CommandsUtils.getObjectByJsonString(NodeGroupCreate[].class, test01);

      System.out.println("Test: " + NodeGroupsCreate01[0].getStorage().getSizeGB());
      assertEquals(NodeGroupsCreate01.length, 3);
      assertEquals(NodeGroupsCreate01[0].getName(), "master");
      assertEquals(NodeGroupsCreate01[0].getStorage().getDsNames().size(), 2);
      assertEquals(NodeGroupsCreate01[1].getName(), "worker");
      assertEquals(NodeGroupsCreate01[1].getStorage().getDsNames(), null);
      assertEquals(NodeGroupsCreate01[2].getName(), "client");
    } catch (Exception ex) {
      System.out.println(ex.getMessage());
      assert (true);
    }

    try {
      CommandsUtils.dataFromFile(path02);
    } catch (Exception e) {
      assert (true);
    }
    String test03 = CommandsUtils.dataFromFile(path03);
    System.out.println(test03);

    NodeGroupCreate[] NodeGroupsCreate03 =
        CommandsUtils.getObjectByJsonString(NodeGroupCreate[].class, test03);

    assertEquals(NodeGroupsCreate03.length, 3);
    assertEquals(NodeGroupsCreate03[0].getName(), "master");
    assertEquals(NodeGroupsCreate03[1].getName(), "worker");
    assertEquals(NodeGroupsCreate03[2].getName(), "client");
  }
 @Test
 public void testGetExceptionMessage() {
   try {
     int number = 5 / 0;
   } catch (ArithmeticException ae) {
     try {
       throw new RuntimeException(ae);
     } catch (RuntimeException re) {
       assertFalse(
           CommandsUtils.getExceptionMessage(re).contains("java.lang.ArithmeticException:"));
     }
   }
 }
 @Test
 @SuppressWarnings("unchecked")
 public void testGetObjectByJsonString()
     throws JsonParseException, JsonMappingException, IOException {
   StringBuilder jsonBuff = new StringBuilder();
   jsonBuff
       .append("{  ")
       .append(" \"nodeGroups\": [ ")
       .append("      {            ")
       .append("        \"name\": \"master\"  ,  ")
       .append("        \"roles\": [             ")
       .append("        \"hadoop_namenode\"   ,  ")
       .append("        \"hadoop_jobtracker\"   ")
       .append("         ],                      ")
       .append("        \"instanceNum\": 1,             ")
       .append("        \"cpuNum\": 2,                  ")
       .append("        \"memCapacityMB\":2048,         ")
       .append("        \"storage\": {                  ")
       .append("        \"type\": \"SHARED\",           ")
       .append("        \"sizeGB\": 10                  ")
       .append("         },                               ")
       .append("    \"configuration\": {            ")
       .append("       \"hadoop\": {                ")
       .append("           \"core-site.xml\" : {           ")
       .append("           \"fs.default.name\": \"hdfs://localhost:8020\" ")
       .append("        },                            ")
       .append("       \"hdfs-site.xml\" : {           ")
       .append("          \"dfs.replication\": 4          ")
       .append("       },                               ")
       .append("       \"mapred-site.xml\" : {         ")
       .append("          \"mapred.map.tasks\": 5          ")
       .append("      },                             ")
       .append("      \"hadoop-env.sh\" : {           ")
       .append("         \"JAVA_HOME\": \"/path/to/javahome\"              ")
       .append("      },                              ")
       .append("     \"log4j.properties\" : {        ")
       .append("       \"hadoop.root.logger\": \"DEBUG,console\" ")
       .append("      }                                          ")
       .append("    }                                          ")
       .append("  }                                          ")
       .append("}, ")
       .append("{")
       .append("      \"name\": \"worker\",  ")
       .append("      \"roles\": [           ")
       .append("          \"hadoop_datanode\",   ")
       .append("          \"hadoop_tasktracker\" ")
       .append("       ], ")
       .append("      \"instanceNum\": 3, ")
       .append("      \"cpuNum\": 2, ")
       .append("      \"memCapacityMB\":2048, ")
       .append("      \"storage\": {          ")
       .append("      \"type\": \"SHARED\",   ")
       .append("      \"sizeGB\": 10          ")
       .append("     }                        ")
       .append("   }                          ")
       .append("], ")
       .append(" \"configuration\": {   ")
       .append(" \"hadoop\": {          ")
       .append(" \"core-site.xml\" : {  ")
       .append(" \"fs.default.name\": \"hdfs://fqdn_or_ip:8020\",")
       .append(" \"dfs.data.dir\":\"/data/\", ")
       .append(" \"dfs.http.address\":\"localhost\" ")
       .append("}, ")
       .append(" \"hdfs-site.xml\" : {  ")
       .append(" \"dfs.repliation\": 2   ")
       .append("}, ")
       .append(" \"mapred-site.xml\" : { ")
       .append(" \"mapred.map.tasks\": 3 ")
       .append(" }, ")
       .append(" \"hadoop-env.sh\" : {   ")
       .append(" \"JAVA_HOME\": \"/path/to/javahome\" ")
       .append(" }, ")
       .append("\"log4j.properties\" : {              ")
       .append("\"hadoop.root.logger\": \"DEBUG,console\" ")
       .append("  } ")
       .append("}  ")
       .append("} ")
       .append("}");
   ClusterCreate clusterCreate =
       CommandsUtils.getObjectByJsonString(ClusterCreate.class, jsonBuff.toString());
   assertNotNull(clusterCreate);
   Map<String, Object> hadoopConfig =
       (Map<String, Object>) clusterCreate.getConfiguration().get("hadoop");
   Map<String, Object> coreSiteConfig = (Map<String, Object>) hadoopConfig.get("core-site.xml");
   assertEquals(coreSiteConfig.get("fs.default.name"), "hdfs://fqdn_or_ip:8020");
 }