コード例 #1
0
  private boolean createMapredSiteXml(NodeConfig nodeConfig) {
    try {
      String mapredsite =
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.XML_MAPRED_SITE;

      String mapredsiteTemplate =
          HadoopUtils.getHadoopConfDir(this.compConfig) + "mapred-site.xml.template";

      AnkushTask moveFile = new Move(mapredsiteTemplate, mapredsite);
      Result res = nodeConfig.getConnection().exec(moveFile);
      if (res.rc != 0) {
        HadoopUtils.addAndLogError(
            LOG,
            clusterConfig,
            "Could not create Hadoop mapred-site.xml file",
            Constant.Component.Name.HADOOP,
            nodeConfig.getHost());
        return false;
      }
      return true;
    } catch (Exception e) {
      HadoopUtils.addAndLogError(
          LOG,
          clusterConfig,
          "Could not create Hadoop mapred-site.xml file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost(),
          e);
      return false;
    }
  }
コード例 #2
0
  /*
   * (non-Javadoc)
   *
   * @see com.impetus.ankush2.hadoop.deployer.configurator.HadoopConfigurator#
   * configureCoreSiteXml(com.impetus.ankush2.framework.config.NodeConfig)
   */
  @Override
  protected boolean configureCoreSiteXml(NodeConfig nodeConfig) {
    boolean status = true;
    try {
      // configuring core-site.xml file in $HADOOP_CONF_DIR
      String coreSiteXmlPath =
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.XML_CORE_SITE;

      LOG.info(
          "Configuring " + HadoopConstants.FileName.ConfigurationFile.XML_CORE_SITE + " file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost());

      String valFsDefaultFS = HadoopUtils.getHdfsUri(compConfig);
      Map<String, String> paramList = new HashMap<String, String>();
      paramList.put("fs.defaultFS", valFsDefaultFS);
      paramList.put(
          "hadoop.tmp.dir",
          (String)
              this.compConfig.getAdvanceConfProperty(
                  HadoopConstants.AdvanceConfKeys.TMP_DIR_HADOOP));
      // Constant.URI_FILE_PREFIX + conf.getHadoopTmpDir());

      for (String propertyName : paramList.keySet()) {
        status =
            HadoopUtils.addPropertyToFile(
                clusterConfig,
                nodeConfig,
                coreSiteXmlPath,
                propertyName,
                paramList.get(propertyName));

        if (!status) {
          HadoopUtils.addAndLogError(
              LOG,
              clusterConfig,
              "Could not add " + propertyName + " property to " + coreSiteXmlPath + " file",
              Constant.Component.Name.HADOOP,
              nodeConfig.getHost());
          return false;
        }
      }
      return true;
    } catch (Exception e) {
      HadoopUtils.addAndLogError(
          LOG,
          clusterConfig,
          "Could not update Hadoop core-site.xml file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost(),
          e);
      return false;
    }
  }
コード例 #3
0
  private boolean setJavaInHadoop(NodeConfig nodeConfig) {
    try {
      LOG.info(
          "Configuring Java in Hadoop environment scripts",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost());

      LinkedHashMap<String, String> scriptTargetTextMap = new LinkedHashMap<String, String>();
      scriptTargetTextMap.put(
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.ENV_HADOOP,
          AppStoreWrapper.getAnkushConfReader()
              .getStringValue("hadoop2.javahome.hadoopenv.targettext"));
      scriptTargetTextMap.put(
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.ENV_YARN,
          AppStoreWrapper.getAnkushConfReader()
              .getStringValue("hadoop2.javahome.yarnenv.targettext"));

      scriptTargetTextMap.put(
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.ENV_MAPRED,
          AppStoreWrapper.getAnkushConfReader()
              .getStringValue("hadoop2.javahome.yarnenv.targettext"));

      for (String hadoopEnvScript : scriptTargetTextMap.keySet()) {
        if (!this.setJavaInHadoopEnvScript(
            nodeConfig, hadoopEnvScript, scriptTargetTextMap.get(hadoopEnvScript))) {
          return false;
        }
      }
      return true;
    } catch (Exception e) {
      String errMsg = "Could not update Java in Hadoop environment scripts";
      HadoopUtils.addAndLogError(
          LOG, clusterConfig, errMsg, Constant.Component.Name.HADOOP, nodeConfig.getHost(), e);
      return false;
    }
  }
コード例 #4
0
  private boolean configureHadoopEnvScripts(NodeConfig nodeConfig) {
    Result res = null;
    try {
      // Set Environment Variables in Hadoop Environment Script Files
      String envVariables = this.getBashrcContents();
      List<String> envFileList = new ArrayList<String>();
      envFileList.add(
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.ENV_HADOOP);
      envFileList.add(
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.ENV_YARN);
      envFileList.add(
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.ENV_MAPRED);

      for (String fileName : envFileList) {
        LOG.info(
            "Configuring variables in " + fileName + " file",
            Constant.Component.Name.HADOOP,
            nodeConfig.getHost());
        AnkushTask updateEnvFile = new AppendFileUsingEcho(envVariables, fileName);
        res = nodeConfig.getConnection().exec(updateEnvFile);
        if (res.rc != 0) {
          String errMsg = "Could not add environment variables for Hadoop in " + fileName + " file";
          HadoopUtils.addAndLogError(
              LOG, clusterConfig, errMsg, Constant.Component.Name.HADOOP, nodeConfig.getHost());
          return false;
        }
      }
      return true;
    } catch (Exception e) {
      String errMsg = "Could not update Hadoop environment scripts";
      HadoopUtils.addAndLogError(
          LOG, clusterConfig, errMsg, Constant.Component.Name.HADOOP, nodeConfig.getHost(), e);
      return false;
    }
  }
コード例 #5
0
  /*
   * (non-Javadoc)
   *
   * @see com.impetus.ankush2.hadoop.deployer.configurator.HadoopConfigurator#
   * configureMapredSiteXml(com.impetus.ankush2.framework.config.NodeConfig)
   */
  @Override
  protected boolean configureMapredSiteXml(NodeConfig nodeConfig) {
    try {
      if (!createMapredSiteXml(nodeConfig)) {
        return false;
      }
      String mapredSiteXmlPath =
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.XML_MAPRED_SITE;

      LOG.info(
          "Configuring " + HadoopConstants.FileName.ConfigurationFile.XML_MAPRED_SITE + " file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost());

      Map<String, String> paramList = new HashMap<String, String>();
      String mapreduceFramework =
          (String)
              this.compConfig.getAdvanceConfProperty(
                  HadoopConstants.AdvanceConfKeys.MAPREDUCE_FRAMEWORK);
      if (mapreduceFramework == null) {
        mapreduceFramework = Hadoop2Configurator.DEFAULT_MAPREDUCE_FRAMEWORK_YARN;
      }

      paramList.put("mapreduce.framework.name", mapreduceFramework);

      paramList.put(
          "mapreduce.cluster.temp.dir",
          HadoopConstants.URI_FILE_PREFIX
              + (String)
                  this.compConfig.getAdvanceConfProperty(
                      HadoopConstants.AdvanceConfKeys.TMP_DIR_MAPRED));
      boolean jobHistoryServerEnabled =
          (Boolean)
              this.compConfig.getAdvanceConfProperty(
                  HadoopConstants.AdvanceConfKeys.JOBHISTORYSERVER_ENABLED);

      if (jobHistoryServerEnabled) {
        paramList.put(
            "mapreduce.jobhistory.address",
            HadoopUtils.getJobHistoryServerHost(this.compConfig)
                + ":"
                + Hadoop2Configurator.DEFAULT_PORT_REST_JOBHISTORYSERVER);
      }

      for (String propertyName : paramList.keySet()) {
        if (!HadoopUtils.addPropertyToFile(
            clusterConfig,
            nodeConfig,
            mapredSiteXmlPath,
            propertyName,
            paramList.get(propertyName))) {
          HadoopUtils.addAndLogError(
              LOG,
              clusterConfig,
              "Could not add " + propertyName + " property to " + mapredSiteXmlPath + " file",
              Constant.Component.Name.HADOOP,
              nodeConfig.getHost());
          return false;
        }
      }
      return true;
    } catch (Exception e) {
      HadoopUtils.addAndLogError(
          LOG,
          clusterConfig,
          "Could not update Hadoop mapred-site.xml  file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost(),
          e);
      return false;
    }
  }
コード例 #6
0
  /*
   * (non-Javadoc)
   *
   * @see com.impetus.ankush2.hadoop.deployer.configurator.HadoopConfigurator#
   * configureHdfsSiteXml(com.impetus.ankush2.framework.config.NodeConfig)
   */
  @Override
  protected boolean configureHdfsSiteXml(NodeConfig nodeConfig) {
    boolean status = true;
    try {
      String hdfsSiteXmlPath =
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.XML_HDFS_SITE;

      LOG.info(
          "Configuring " + HadoopConstants.FileName.ConfigurationFile.XML_HDFS_SITE + " file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost());

      Map<String, String> paramList = new HashMap<String, String>();
      paramList.put(
          "dfs.replication",
          (String)
              this.compConfig.getAdvanceConfProperty(
                  HadoopConstants.AdvanceConfKeys.DFS_REPLICATION_FACTOR));
      paramList.put("dfs.permissions.enabled", "true");
      paramList.put(
          "dfs.namenode.name.dir",
          HadoopConstants.URI_FILE_PREFIX
              + (String)
                  this.compConfig.getAdvanceConfProperty(
                      HadoopConstants.AdvanceConfKeys.DFS_NAME_DIR));
      paramList.put(
          "dfs.datanode.data.dir",
          HadoopConstants.URI_FILE_PREFIX
              + (String)
                  this.compConfig.getAdvanceConfProperty(
                      HadoopConstants.AdvanceConfKeys.DFS_DATA_DIR));

      boolean isHaEnabled =
          (Boolean)
              this.compConfig.getAdvanceConfProperty(HadoopConstants.AdvanceConfKeys.HA_ENABLED);

      if (isHaEnabled) {
        String nameserviceId =
            (String)
                this.compConfig.getAdvanceConfProperty(
                    HadoopConstants.AdvanceConfKeys.HA_NAMESERVICEID);
        String namenodeId1 =
            (String)
                this.compConfig.getAdvanceConfProperty(
                    HadoopConstants.AdvanceConfKeys.HA_NAMENODEID1);
        String namenodeId2 =
            (String)
                this.compConfig.getAdvanceConfProperty(
                    HadoopConstants.AdvanceConfKeys.HA_NAMENODEID2);

        String activeNameNodeHost = HadoopUtils.getActiveNameNodeHost(this.compConfig);
        String standByNameNodeHost = HadoopUtils.getStandByNameNodeHost(this.compConfig);

        paramList.put("dfs.nameservices", nameserviceId);
        paramList.put("dfs.ha.namenodes." + nameserviceId, namenodeId1 + "," + namenodeId2);
        paramList.put(
            "dfs.namenode.rpc-address." + nameserviceId + "." + namenodeId1,
            activeNameNodeHost + ":" + Hadoop2Configurator.DEFAULT_PORT_RPC_NAMENODE);

        paramList.put(
            "dfs.namenode.rpc-address." + nameserviceId + "." + namenodeId2,
            standByNameNodeHost + ":" + Hadoop2Configurator.DEFAULT_PORT_RPC_NAMENODE);

        paramList.put(
            "dfs.namenode.http-address." + nameserviceId + "." + namenodeId1,
            activeNameNodeHost + ":" + Hadoop2Configurator.DEFAULT_PORT_HTTP_NAMENODE);

        paramList.put(
            "dfs.namenode.http-address." + nameserviceId + "." + namenodeId2,
            standByNameNodeHost + ":" + Hadoop2Configurator.DEFAULT_PORT_HTTP_NAMENODE);

        StringBuilder journalNodeList = new StringBuilder("qjournal://");
        for (String journalNodeHost : HadoopUtils.getJournalNodeHosts(this.compConfig)) {
          journalNodeList.append(
              journalNodeHost + ":" + Hadoop2Configurator.DEFAULT_PORT_RPC_JOURNAL_NODE);
          journalNodeList.append(";");
        }
        String valJournalNodeProp =
            journalNodeList.toString().substring(0, journalNodeList.length() - 1)
                + "/"
                + nameserviceId;
        paramList.put("dfs.namenode.shared.edits.dir", valJournalNodeProp);
        paramList.put(
            "dfs.journalnode.edits.dir",
            (String)
                this.compConfig.getAdvanceConfProperty(
                    HadoopConstants.AdvanceConfKeys.HA_JOURNALNODE_EDITS_DIR));
        paramList.put(
            "dfs.client.failover.proxy.provider." + nameserviceId,
            "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
        paramList.put("dfs.ha.fencing.methods", "shell(/bin/true)");

        boolean isAutomaticFailoverEnabled =
            (Boolean)
                this.compConfig.getAdvanceConfProperty(
                    HadoopConstants.AdvanceConfKeys.HA_AUTOMATIC_FAILOVER_ENABLED);

        if (isAutomaticFailoverEnabled) {
          paramList.put("dfs.ha.automatic-failover.enabled." + nameserviceId, "true");

          String zkEnsembleId =
              (String)
                  this.compConfig.getAdvanceConfProperty(
                      HadoopConstants.AdvanceConfKeys.HA_ZK_ENSEMBLEID);

          String zkQuorumValue =
              ZookeeperUtils.getZookeeperConnectionString(this.clusterConfig, zkEnsembleId);
          paramList.put("ha.zookeeper.quorum", zkQuorumValue);
        } else {
          paramList.put("dfs.ha.automatic-failover.enabled." + nameserviceId, "false");
        }
      } else if (HadoopUtils.getSecondaryNameNodeHost(this.compConfig) != null) {
        String valAddrSNNIp = HadoopUtils.getSecondaryNameNodeHost(this.compConfig);
        paramList.put(
            "dfs.namenode.secondary.http-address",
            valAddrSNNIp + ":" + Hadoop2Configurator.DEFAULT_PORT_HTTP_SECONDARYNAMENODE);
      }

      for (String propertyName : paramList.keySet()) {
        status =
            HadoopUtils.addPropertyToFile(
                clusterConfig,
                nodeConfig,
                hdfsSiteXmlPath,
                propertyName,
                paramList.get(propertyName));

        if (!status) {
          HadoopUtils.addAndLogError(
              LOG,
              clusterConfig,
              "Could not add " + propertyName + " property to " + hdfsSiteXmlPath + " file",
              Constant.Component.Name.HADOOP,
              nodeConfig.getHost());
          return false;
        }
      }
      return true;

    } catch (Exception e) {
      HadoopUtils.addAndLogError(
          LOG,
          clusterConfig,
          "Could not update Hadoop yarn-site.xml  file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost(),
          e);
      return false;
    }
  }
コード例 #7
0
  /**
   * Configure yarn site xml.
   *
   * @param nodeConfig the node config
   * @return true, if successful
   */
  protected boolean configureYarnSiteXml(NodeConfig nodeConfig) {
    boolean status = true;
    try {
      String yarnSiteXmlPath =
          HadoopUtils.getHadoopConfDir(this.compConfig)
              + HadoopConstants.FileName.ConfigurationFile.XML_YARN_SITE;

      LOG.info(
          "Configuring " + HadoopConstants.FileName.ConfigurationFile.XML_YARN_SITE + " file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost());

      String valAuxServices = "mapreduce_shuffle";
      String keyMRShuffleClass = "yarn.nodemanager.aux-services.mapreduce_shuffle.class";

      if (this.isCdh4_4_0()) {
        valAuxServices =
            AppStoreWrapper.getAnkushConfReader()
                .getStringValue("hadoop2.propval.auxservices." + this.compConfig.getVersion());

        keyMRShuffleClass =
            AppStoreWrapper.getAnkushConfReader()
                .getStringValue("hadoop2.propname.mrshuffleclass." + this.compConfig.getVersion());
      }

      Map<String, String> paramList = new HashMap<String, String>();
      String resourceManagerNode = HadoopUtils.getResourceManagerHost(this.compConfig);

      paramList.put("yarn.nodemanager.aux-services", valAuxServices);
      paramList.put(keyMRShuffleClass, "org.apache.hadoop.mapred.ShuffleHandler");

      paramList.put("yarn.resourcemanager.hostname", resourceManagerNode);

      paramList.put(
          "yarn.resourcemanager.address",
          resourceManagerNode + ":" + Hadoop2Configurator.DEFAULT_PORT_RPC_RESOURCEMANAGER);
      paramList.put(
          "yarn.resourcemanager.resource-tracker.address",
          resourceManagerNode + ":" + Hadoop2Configurator.DEFAULT_PORT_RPC_RM_RESOURCETRACKER);
      paramList.put(
          "yarn.resourcemanager.scheduler.address",
          resourceManagerNode + ":" + Hadoop2Configurator.DEFAULT_PORT_RPC_RM_SCHEDULER);
      paramList.put(
          "yarn.resourcemanager.scheduler.class",
          "org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler");

      boolean webAppProxyServerEnabled =
          (Boolean)
              this.compConfig.getAdvanceConfProperty(
                  HadoopConstants.AdvanceConfKeys.WEBAPPPROXYSERVER_ENABLED);

      if (webAppProxyServerEnabled) {
        paramList.put(
            "yarn.web-proxy.address",
            HadoopUtils.getWebAppProxyServerHost(this.compConfig)
                + ":"
                + Hadoop2Configurator.DEFAULT_PORT_RPC_RM_WEBAPPPROXYSERVER);
      }

      for (String propertyName : paramList.keySet()) {
        status =
            HadoopUtils.addPropertyToFile(
                clusterConfig,
                nodeConfig,
                yarnSiteXmlPath,
                propertyName,
                paramList.get(propertyName));

        if (!status) {
          HadoopUtils.addAndLogError(
              LOG,
              clusterConfig,
              "Could not add " + propertyName + " property to " + yarnSiteXmlPath + " file",
              Constant.Component.Name.HADOOP,
              nodeConfig.getHost());
          return false;
        }
      }
      return true;

    } catch (Exception e) {
      HadoopUtils.addAndLogError(
          LOG,
          clusterConfig,
          "Could not update Hadoop yarn-site.xml  file",
          Constant.Component.Name.HADOOP,
          nodeConfig.getHost(),
          e);
      return false;
    }
  }
コード例 #8
0
  private boolean configureLinuxEnvironmentFile(NodeConfig nodeConfig) {
    Result res = null;
    LOG.info(
        "Configuring variables in " + Constant.LinuxEnvFiles.BASHRC + " file",
        Constant.Component.Name.HADOOP,
        nodeConfig.getHost());
    try {
      String componentHome = this.compConfig.getHomeDir();

      // set environment variables in /etc/environment file
      LinkedHashMap<String, String> envVariableMap = new LinkedHashMap<String, String>();
      envVariableMap.put(HadoopUtils.KEY_HADOOP_HOME, componentHome);
      envVariableMap.put(HadoopUtils.KEY_HADOOP_PREFIX, componentHome);
      envVariableMap.put(
          HadoopUtils.KEY_HADOOP_CONF_DIR, HadoopUtils.getHadoopConfDir(this.compConfig));

      // String pathVariableVal = HadoopUtils
      // .getPathVariableValue(nodeConfig)
      // + ":"
      // + HadoopUtils.getHadoopBinDir(this.compConfig)
      // + ":"
      // + HadoopUtils.getHadoopScriptDir(this.compConfig);
      //
      // envVariableMap.put(HadoopUtils.KEY_PATH_VARIABLE,
      // pathVariableVal);

      // envVariableMap.put(HadoopUtils.KEY_HADOOP_OPTS,
      // "-Djava.net.preferIPv4Stack=true");

      AnkushTask addEnvVariables =
          new AddEnvironmentVariables(
              envVariableMap, Constant.LinuxEnvFiles.BASHRC, Component.Name.HADOOP);
      res = nodeConfig.getConnection().exec(addEnvVariables);
      if (!res.isSuccess) {
        String errMsg =
            "Could not add environment variables for Hadoop in "
                + Constant.LinuxEnvFiles.BASHRC
                + " file";
        HadoopUtils.addAndLogError(
            LOG, clusterConfig, errMsg, Constant.Component.Name.HADOOP, nodeConfig.getHost());
        return false;
      }

      AnkushTask sourceFile = new SourceFile(Constant.LinuxEnvFiles.BASHRC);
      res = nodeConfig.getConnection().exec(sourceFile);

      if (res.rc != 0) {

        String errMsg = "Could not source " + Constant.LinuxEnvFiles.BASHRC + " file";

        HadoopUtils.addAndLogError(
            LOG, clusterConfig, errMsg, Constant.Component.Name.HADOOP, nodeConfig.getHost());
        return false;
      }
      return true;
    } catch (Exception e) {
      String errMsg =
          "Could not add environment variables for Hadoop in "
              + Constant.LinuxEnvFiles.BASHRC
              + " file";
      HadoopUtils.addAndLogError(
          LOG, clusterConfig, errMsg, Constant.Component.Name.HADOOP, nodeConfig.getHost(), e);
      return false;
    }
  }