/**
  * Closes an SSH connection and removes it from the map
  *
  * @param id Managed system ID to which the SSHAgent is connected
  */
 public void removeSSH(String id) {
   SSHAgent ssh = connections.get(id);
   if (ssh != null) {
     ssh.logout();
     connections.remove(id);
   }
 }
  public SSHAgent addSSH(String id, String host, Integer port, String username, File f) {
    log.debug("Creating new SSH connection for ID:" + id);
    SSHAgent ssh = new SSHAgent(host, port, username, f);
    try {
      if (!ssh.connect()) ssh = null;

      connections.put(id, ssh);
    } catch (SSHException e) {
      log.error(e.getMessage());
      ssh = null;
    }

    return ssh;
  }
  /**
   * Fetch an SSHAgent associated with the given ID
   *
   * @param id Managed system ID to which the SSHAgent is connected
   * @return Null if the agent does not exist or the connection cannot be formed
   */
  public SSHAgent getSSH(String id) {
    SSHAgent ssh = connections.get(id);

    if (ssh != null) {
      log.debug("Using previously-opened connection: " + id);

      if (!ssh.isAuthenticationComplete()) {
        try {
          if (!ssh.connect()) ssh = null;
        } catch (SSHException e) {
          log.error(e.getMessage());
        }
      }
    }

    return ssh;
  }
Beispiel #4
0
  private void installFreeEed() throws Exception {
    String url = Settings.getSettings().getDownloadLink();
    logger.info("Installing FreeEed software from " + url);
    String cmd =
        "rm FreeEed.zip; "
            + "wget "
            + url
            + " -O FreeEed.zip --no-check-certificate; "
            + "rm -fr FreeEed; "
            + "unzip -P 4ushH7XZT1 FreeEed.zip";
    SSHAgent sshAgent = new SSHAgent();
    sshAgent.setUser(ParameterProcessing.CLUSTER_USER_NAME);
    sshAgent.setKey(ParameterProcessing.PEM_CERTIFICATE_NAME);
    sshAgent.setHost(cluster.getJobTracker().getDnsName());
    sshAgent.executeCommand(cmd);
    logger.info("Successfully installed FreeEed");
    // copy the settings to jobtracker
    Server server = cluster.getJobTracker();
    sshAgent.setHost(server.getDnsName());

    Settings cloneForS3 = Settings.getSettings().cloneForS3();
    String settingsFileToUse = "settings.properties.s3";
    Util.writeTextFile(settingsFileToUse, cloneForS3.toString());

    logger.info("Copying settings file: {}", settingsFileToUse);
    // TODO change passing the settings to the cloud
    // sshAgent.scpTo(settingsFileToUse, "FreeEed/" + ParameterProcessing.DEFAULT_SETTINGS);
  }
Beispiel #5
0
  private void verifyOperation() throws Exception {
    hadoopReady = false;

    String cmd;
    String[] output;

    SSHAgent sshAgent = new SSHAgent();
    sshAgent.setUser(ParameterProcessing.CLUSTER_USER_NAME);
    sshAgent.setKey(ParameterProcessing.PEM_CERTIFICATE_NAME);
    sshAgent.setHost(cluster.getJobTracker().getDnsName());
    logger.info("Cluster testing and verification started");
    cmd = "hadoop fs -mkdir /test";
    sshAgent.executeCommand(cmd);

    cmd = "hadoop fs -copyFromLocal *.xml /test/";
    sshAgent.executeCommand(cmd);

    cmd =
        "hadoop jar /usr/lib/hadoop/hadoop-0.20.2-cdh*-examples.jar grep /test /test-output 'dfs[a-z.]+'";
    output = sshAgent.executeCommand(cmd);
    logger.info(Util.arrayToString(output));

    cmd = "hadoop fs -ls /test-output";
    output = sshAgent.executeCommand(cmd);
    logger.info(Util.arrayToString(output));
    logger.info("Cluster testing and verification is complete");

    boolean success = false;
    for (String line : output) {
      if (line.contains("_SUCCESS")) {
        success = true;
        cluster.setReadyToUse(true);
        break;
      }
    }
    hadoopReady = success;
  }
Beispiel #6
0
  private void setupAndStartCluster() throws Exception {
    // form config files
    String masters = cluster.getMaster().getPrivateDnsName() + "\n";
    Files.write(masters.getBytes(), new File(mastersFile));

    List<String> slavesList = new ArrayList<String>();
    for (int i = 0; i < cluster.size(); ++i) {
      Server server = cluster.get(i);
      if (server.isTaskTracker()) {
        slavesList.add(server.getPrivateDnsName());
      }
    }
    String[] slaves = (String[]) slavesList.toArray(new String[0]);
    Files.write(Util.arrayToString(slaves).getBytes(), new File(slavesFile));

    String coreSite = Util.readTextFile("config/" + coreSiteFile);
    coreSite = coreSite.replaceFirst("localhost", cluster.getMaster().getPrivateDnsName());
    Files.write(coreSite.getBytes(), new File(coreSiteFile));

    String mapredSite = Util.readTextFile("config/" + mapredSiteFile);
    mapredSite = mapredSite.replaceFirst("localhost", cluster.getJobTracker().getPrivateDnsName());
    Files.write(mapredSite.getBytes(), new File(mapredSiteFile));

    String cmd;

    String[] output;
    // push config files to the cluster
    logger.info("Configuring the Hadoop cluster");
    ClusterCommand clusterCommand = new ClusterCommand(cluster);
    clusterCommand.runScpWaitForAll(mastersFile, mastersFile);
    clusterCommand.runScpWaitForAll(slavesFile, slavesFile);
    clusterCommand.runScpWaitForAll("config/" + hdfsSiteFile, hdfsSiteFile);
    clusterCommand.runScpWaitForAll(coreSiteFile, coreSiteFile);
    clusterCommand.runScpWaitForAll(mapredSiteFile, mapredSiteFile);
    // copy from home on remote to the config area
    clusterCommand.runCommandWaitForAll("sudo cp " + mastersFile + " /etc/hadoop/conf/");
    clusterCommand.runCommandWaitForAll("sudo cp " + slavesFile + " /etc/hadoop/conf/");
    clusterCommand.runCommandWaitForAll("sudo cp " + hdfsSiteFile + " /etc/hadoop/conf/");
    clusterCommand.runCommandWaitForAll("sudo cp " + coreSiteFile + " /etc/hadoop/conf/");
    clusterCommand.runCommandWaitForAll("sudo cp " + mapredSiteFile + " /etc/hadoop/conf/");
    // create /mnt/tmp for everyone to use
    clusterCommand.runCommandWaitForAll("sudo rm -fr /mnt/tmp");
    clusterCommand.runCommandWaitForAll("sudo mkdir /mnt/tmp");
    clusterCommand.runCommandWaitForAll("sudo chmod 777 /mnt/tmp");
    // create /mnt/tmp for hadoop tmp dir
    clusterCommand.runCommandWaitForAll("sudo mkdir /mnt/tmp/hadoop");
    clusterCommand.runCommandWaitForAll("sudo chmod 777 /mnt/tmp/hadoop");

    logger.info("Hadoop cluster configured, starting the services");
    // shut down all services
    // clean up dfs on slaves
    hadoopReady = false;
    cmd = "for service in /etc/init.d/hadoop-0.20-*; do sudo $service stop; done";
    clusterCommand.runCommandWaitForAll(cmd);
    cmd = "sudo rm -fr /var/lib/hadoop-0.20/cache/*";
    clusterCommand.runCommandWaitForAll(cmd);

    SSHAgent sshAgent = new SSHAgent();
    sshAgent.setUser(ParameterProcessing.CLUSTER_USER_NAME);
    sshAgent.setKey(ParameterProcessing.PEM_CERTIFICATE_NAME);
    sshAgent.setHost(cluster.getMaster().getDnsName());

    cmd = "sudo -u hdfs hadoop namenode -format";
    sshAgent.executeCommand(cmd);

    cmd = "sudo service hadoop-0.20-namenode start";
    output = sshAgent.executeCommand(cmd);
    logger.info(Util.arrayToString(output));

    // start all hdfs slaves
    clusterCommand = new ClusterCommand(cluster.getDataNodes());
    cmd = "sudo service hadoop-0.20-datanode start";
    clusterCommand.runCommandWaitForAll(cmd);
    // start all tasktrackers
    clusterCommand = new ClusterCommand(cluster.getTaskTrackers());
    cmd = "sudo service hadoop-0.20-tasktracker start";
    clusterCommand.runCommandWaitForAll(cmd);

    sshAgent.setHost(cluster.getJobTracker().getDnsName());
    cmd = "sudo service hadoop-0.20-jobtracker start";
    output = sshAgent.executeCommand(cmd);
    logger.info(Util.arrayToString(output));
    logger.info("Cluster configuration and startup is complete");

    cmd = "sudo rm /usr/lib/hadoop/lib/jets3t*.jar";
    clusterCommand = new ClusterCommand(cluster);
    clusterCommand.runCommandWaitForAll(cmd);
    // install a fresh version of FreeEed
    installFreeEed();
    // run a distributed grep app
    verifyOperation();
    if (callingUI != null) {
      callingUI.refreshStatus();
    }
  }