private boolean manageYarnServices(String action) { boolean status = true; try { HadoopServiceManager hadoopServiceManager = HadoopUtils.getServiceManagerInstance(this.clusterConfig, this.compConfig); // Manage ResourceManager Process if (!hadoopServiceManager.manageServiceOnNode( HadoopUtils.getResourceManagerHost(this.compConfig), HadoopConstants.Roles.RESOURCEMANAGER, action)) { status = false; } // Manage NodeManager Process on each Slave Node if (!hadoopServiceManager.manageServiceOnNodes( HadoopUtils.getSlaveHosts(this.compConfig), HadoopConstants.Roles.NODEMANAGER, action)) { status = false; } boolean webAppProxyServerEnabled = (Boolean) this.compConfig.getAdvanceConfProperty( HadoopConstants.AdvanceConfKeys.WEBAPPPROXYSERVER_ENABLED); if (webAppProxyServerEnabled) { if (!hadoopServiceManager.manageServiceOnNode( HadoopUtils.getWebAppProxyServerHost(this.compConfig), HadoopConstants.Roles.WEBAPPPROXYSERVER, action)) { status = false; } } boolean jobHistoryServerEnabled = (Boolean) this.compConfig.getAdvanceConfProperty( HadoopConstants.AdvanceConfKeys.JOBHISTORYSERVER_ENABLED); if (jobHistoryServerEnabled) { if (!hadoopServiceManager.manageServiceOnNode( HadoopUtils.getJobHistoryServerHost(this.compConfig), HadoopConstants.Roles.JOBHISTORYSERVER, action)) { status = false; } } } catch (Exception e) { HadoopUtils.addAndLogError( LOG, clusterConfig, "Could not " + action + " Hadoop YARN services", Constant.Component.Name.HADOOP, e); status = false; } return status; }
/* * (non-Javadoc) * * @see com.impetus.ankush2.hadoop.deployer.configurator.HadoopConfigurator# * configureMapredSiteXml(com.impetus.ankush2.framework.config.NodeConfig) */ @Override protected boolean configureMapredSiteXml(NodeConfig nodeConfig) { try { if (!createMapredSiteXml(nodeConfig)) { return false; } String mapredSiteXmlPath = HadoopUtils.getHadoopConfDir(this.compConfig) + HadoopConstants.FileName.ConfigurationFile.XML_MAPRED_SITE; LOG.info( "Configuring " + HadoopConstants.FileName.ConfigurationFile.XML_MAPRED_SITE + " file", Constant.Component.Name.HADOOP, nodeConfig.getHost()); Map<String, String> paramList = new HashMap<String, String>(); String mapreduceFramework = (String) this.compConfig.getAdvanceConfProperty( HadoopConstants.AdvanceConfKeys.MAPREDUCE_FRAMEWORK); if (mapreduceFramework == null) { mapreduceFramework = Hadoop2Configurator.DEFAULT_MAPREDUCE_FRAMEWORK_YARN; } paramList.put("mapreduce.framework.name", mapreduceFramework); paramList.put( "mapreduce.cluster.temp.dir", HadoopConstants.URI_FILE_PREFIX + (String) this.compConfig.getAdvanceConfProperty( HadoopConstants.AdvanceConfKeys.TMP_DIR_MAPRED)); boolean jobHistoryServerEnabled = (Boolean) this.compConfig.getAdvanceConfProperty( HadoopConstants.AdvanceConfKeys.JOBHISTORYSERVER_ENABLED); if (jobHistoryServerEnabled) { paramList.put( "mapreduce.jobhistory.address", HadoopUtils.getJobHistoryServerHost(this.compConfig) + ":" + Hadoop2Configurator.DEFAULT_PORT_REST_JOBHISTORYSERVER); } for (String propertyName : paramList.keySet()) { if (!HadoopUtils.addPropertyToFile( clusterConfig, nodeConfig, mapredSiteXmlPath, propertyName, paramList.get(propertyName))) { HadoopUtils.addAndLogError( LOG, clusterConfig, "Could not add " + propertyName + " property to " + mapredSiteXmlPath + " file", Constant.Component.Name.HADOOP, nodeConfig.getHost()); return false; } } return true; } catch (Exception e) { HadoopUtils.addAndLogError( LOG, clusterConfig, "Could not update Hadoop mapred-site.xml file", Constant.Component.Name.HADOOP, nodeConfig.getHost(), e); return false; } }