示例#1
0
  @Override
  public CompoundStatus checkTargetTTsSuccess(
      String opType, String[] affectedTTs, int totalTargetEnabled, HadoopCluster cluster) {
    CompoundStatus status = new CompoundStatus("checkTargetTTsSuccess");

    String scriptFileName = CHECK_SCRIPT_FILE_NAME;
    String scriptRemoteFilePath = DEFAULT_SCRIPT_DEST_PATH + scriptFileName;
    String listRemoteFilePath = null;
    String opDesc = "checkTargetTTsSuccess";

    _log.log(Level.INFO, "AffectedTTs:");
    for (String tt : affectedTTs) {
      _log.log(Level.INFO, tt);
    }
    HadoopConnection connection = getConnectionForCluster(cluster);
    setErrorParamsForCommand(opDesc, scriptRemoteFilePath, listRemoteFilePath);

    int rc = -1;
    int iterations = 0;
    do {
      if (iterations > 0) {
        _log.log(Level.INFO, "Target TTs not yet achieved...checking again - " + iterations);
      }

      OutputStream out = new ByteArrayOutputStream();
      rc =
          executeScriptWithCopyRetryOnFailure(
              connection,
              scriptFileName,
              new String[] {"" + totalTargetEnabled, connection.getHadoopHome()},
              out);
      try {
        out.flush();
      } catch (IOException e) {
        String errorMsg = "Unexpected exception in SSH OutputStream ";
        _log.log(Level.WARNING, errorMsg, e);
        status.registerTaskFailed(false, errorMsg + e.getMessage());
      }

      // _log.log(Level.INFO, "Output from SSH script execution:\n"+out.toString());

      /* Convert to String array and "nullify" last element (which happens to be "@@@..." or empty line) */
      String[] allActiveTTs = out.toString().split("\n");
      allActiveTTs[allActiveTTs.length - 1] = null;

      if (checkOpSuccess(opType, affectedTTs, allActiveTTs)) {
        _log.log(Level.INFO, "All selected TTs correctly %sed", opType.toLowerCase());
        rc = SUCCESS;
        break;
      }
      // TODO: out.close()?

    } while ((rc == ERROR_FEWER_TTS || rc == ERROR_EXCESS_TTS)
        && (++iterations <= MAX_CHECK_RETRY_ITERATIONS));

    status.addStatus(_errorCodes.interpretErrorCode(_log, rc, _errorParamValues));
    return status;
  }
示例#2
0
  private CompoundStatus decomRecomTTs(
      String opDesc,
      String[] tts,
      HadoopCluster cluster,
      String scriptFileName,
      String listFileName) {
    CompoundStatus status = new CompoundStatus("decomRecomTTs");

    if (!isValidTTList(tts)) {
      String errorMsg = opDesc + " failed due to bad TT list";
      _log.log(Level.SEVERE, opDesc + " failed due to bad TT list");
      status.registerTaskFailed(false, errorMsg);
      return status;
    }

    String scriptRemoteFilePath = DEFAULT_SCRIPT_DEST_PATH + scriptFileName;
    String listRemoteFilePath = DEFAULT_SCRIPT_DEST_PATH + listFileName;

    HadoopConnection connection = getConnectionForCluster(cluster);
    setErrorParamsForCommand(opDesc.toLowerCase(), scriptRemoteFilePath, listRemoteFilePath);

    OutputStream out = new ByteArrayOutputStream();
    String operationList = createVMList(tts);
    int rc =
        connection.copyDataToJobTracker(
            operationList.getBytes(), DEFAULT_SCRIPT_DEST_PATH, listFileName, false);
    if (rc == 0) {
      rc =
          executeScriptWithCopyRetryOnFailure(
              connection,
              scriptFileName,
              new String[] {
                listRemoteFilePath, connection.getExcludeFilePath(), connection.getHadoopHome()
              },
              out);
    }
    status.addStatus(_errorCodes.interpretErrorCode(_log, rc, _errorParamValues));
    return status;
  }
示例#3
0
 private int executeScriptWithCopyRetryOnFailure(
     HadoopConnection connection, String scriptFileName, String[] scriptArgs, OutputStream out) {
   int rc = -1;
   for (int i = 0; i < 2; i++) {
     rc = connection.executeScript(scriptFileName, DEFAULT_SCRIPT_DEST_PATH, scriptArgs, out);
     if (i == 0 && (rc == ERROR_COMMAND_NOT_FOUND || rc == ERROR_CATCHALL)) {
       _log.log(Level.INFO, scriptFileName + " not found...");
       // Changed this to accommodate using jar file...
       // String fullLocalPath =
       // HadoopAdaptor.class.getClassLoader().getResource(scriptFileName).getPath();
       // byte[] scriptData = loadLocalScript(DEFAULT_SCRIPT_SRC_PATH + scriptFileName);
       // byte[] scriptData = loadLocalScript(fullLocalPath);
       byte[] scriptData = loadLocalScript(scriptFileName);
       if ((scriptData != null)
           && (connection.copyDataToJobTracker(
                   scriptData, DEFAULT_SCRIPT_DEST_PATH, scriptFileName, true)
               == 0)) {
         continue;
       }
     }
     break;
   }
   return rc;
 }
示例#4
0
 private HadoopConnection getConnectionForCluster(HadoopCluster cluster) {
   HadoopConnection result = _connections.get(cluster.getClusterName());
   if (result == null) {
     /* TODO: SshUtils could be a single shared thread-safe object or non threadsafe object per connection */
     result = new HadoopConnection(cluster, _connectionProperties, new NonThreadSafeSshUtils());
     result.setHadoopCredentials(_credentials);
     result.setHadoopExcludeTTPath(_jtConfig.getExcludeTTPath());
     result.setHadoopHomePath(_jtConfig.getHadoopHomePath());
     _connections.put(cluster.getClusterName(), result);
   }
   setErrorParamValue(ParamTypes.HADOOP_HOME, result.getHadoopHome());
   setErrorParamValue(ParamTypes.JOBTRACKER, result.getJobTrackerName());
   setErrorParamValue(ParamTypes.EXCLUDE_FILE, result.getExcludeFilePath());
   return result;
 }