public class CDH550Distribution extends AbstractDistribution
    implements HDFSComponent,
        MRComponent,
        HBaseComponent,
        PigComponent,
        HiveComponent,
        ImpalaComponent,
        HCatalogComponent,
        SparkBatchComponent,
        SparkStreamingComponent,
        HiveOnSparkComponent,
        SqoopComponent {

  public static final String VERSION = EHadoopVersion4Drivers.CLOUDERA_CDH5_5.getVersionValue();

  private static final String YARN_APPLICATION_CLASSPATH =
      "$HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/*,$HADOOP_COMMON_HOME/lib/*,$HADOOP_HDFS_HOME/*,$HADOOP_HDFS_HOME/lib/*,$HADOOP_MAPRED_HOME/*,$HADOOP_MAPRED_HOME/lib/*,$YARN_HOME/*,$YARN_HOME/lib/*,$HADOOP_YARN_HOME/*,$HADOOP_YARN_HOME/lib/*,$HADOOP_COMMON_HOME/share/hadoop/common/*,$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,$HADOOP_YARN_HOME/share/hadoop/yarn/*,$HADOOP_YARN_HOME/share/hadoop/yarn/lib/*"; //$NON-NLS-1$

  private static Map<ComponentType, Set<DistributionModuleGroup>> moduleGroups;

  private static Map<NodeComponentTypeBean, Set<DistributionModuleGroup>> nodeModuleGroups;

  private static Map<ComponentType, ComponentCondition> displayConditions;

  static {

    // Used to add a module group import for the components that have a HADOOP_DISTRIBUTION
    // parameter, aka. the
    // components that have the distribution list.
    moduleGroups = new HashMap<>();
    moduleGroups.put(ComponentType.HDFS, CDH550HDFSModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HBASE, CDH550HBaseModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HCATALOG, CDH550HCatalogModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.MAPREDUCE, CDH550MapReduceModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.PIG, CDH550PigModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.PIGOUTPUT, CDH550PigOutputModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.SQOOP, CDH550SqoopModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HIVE, CDH550HiveModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.IMPALA, CDH550ImpalaModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.SPARKBATCH, CDH550SparkBatchModuleGroup.getModuleGroups());
    moduleGroups.put(
        ComponentType.SPARKSTREAMING, CDH550SparkStreamingModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HIVEONSPARK, CDH550HiveOnSparkModuleGroup.getModuleGroups());

    // Used to add a module group import for a specific node. The given node must have a
    // HADOOP_LIBRARIES parameter.
    nodeModuleGroups = new HashMap<>();

    nodeModuleGroups.put(
        new NodeComponentTypeBean(ComponentType.MAPREDUCE, MRConstant.S3_INPUT_COMPONENT),
        CDH550MRS3NodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(ComponentType.MAPREDUCE, MRConstant.S3_OUTPUT_COMPONENT),
        CDH550MRS3NodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(ComponentType.PIG, PigOutputConstant.PIGSTORE_COMPONENT),
        CDH550PigOutputNodeModuleGroup.getModuleGroups());

    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_INPUT_COMPONENT),
        CDH550SparkBatchParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_OUTPUT_COMPONENT),
        CDH550SparkBatchParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKBATCH, SparkBatchConstant.S3_CONFIGURATION_COMPONENT),
        CDH550SparkBatchS3NodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_INPUT_COMPONENT),
        CDH550SparkStreamingParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_OUTPUT_COMPONENT),
        CDH550SparkStreamingParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_STREAM_INPUT_COMPONENT),
        CDH550SparkStreamingParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.S3_CONFIGURATION_COMPONENT),
        CDH550SparkStreamingS3NodeModuleGroup.getModuleGroups());

    Set<DistributionModuleGroup> kinesisNodeModuleGroups =
        CDH550SparkStreamingKinesisNodeModuleGroup.getModuleGroups();
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_COMPONENT),
        kinesisNodeModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_AVRO_COMPONENT),
        kinesisNodeModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_OUTPUT_COMPONENT),
        kinesisNodeModuleGroups);

    Set<DistributionModuleGroup> kafkaAssemblyModuleGroups =
        CDH550SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups();
    Set<DistributionModuleGroup> kafkaAvroModuleGroups =
        CDH550SparkStreamingKafkaAvroModuleGroup.getModuleGroups();
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_INPUT_COMPONENT),
        kafkaAssemblyModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_AVRO_INPUT_COMPONENT),
        kafkaAssemblyModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_AVRO_INPUT_COMPONENT),
        kafkaAvroModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_OUTPUT_COMPONENT),
        CDH550SparkStreamingKafkaClientModuleGroup.getModuleGroups());

    // Used to hide the distribution according to other parameters in the component.
    displayConditions = new HashMap<>();
  }

  @Override
  public String getDistribution() {
    return EHadoopDistributions.CLOUDERA.getName();
  }

  @Override
  public String getDistributionName() {
    return EHadoopDistributions.CLOUDERA.getDisplayName();
  }

  @Override
  public String getVersion() {
    return VERSION;
  }

  @Override
  public String getVersionName(ComponentType componentType) {
    return EHadoopVersion4Drivers.CLOUDERA_CDH5_5.getVersionDisplay();
  }

  @Override
  public EHadoopVersion getHadoopVersion() {
    return EHadoopVersion.HADOOP_2;
  }

  @Override
  public boolean doSupportKerberos() {
    return true;
  }

  @Override
  public Set<DistributionModuleGroup> getModuleGroups(ComponentType componentType) {
    return moduleGroups.get(componentType);
  }

  @Override
  public Set<DistributionModuleGroup> getModuleGroups(
      ComponentType componentType, String componentName) {
    return nodeModuleGroups.get(new NodeComponentTypeBean(componentType, componentName));
  }

  @Override
  public ComponentCondition getDisplayCondition(ComponentType componentType) {
    return displayConditions.get(componentType);
  }

  @Override
  public boolean doSupportCrossPlatformSubmission() {
    return true;
  }

  @Override
  public boolean doSupportUseDatanodeHostname() {
    return true;
  }

  @Override
  public boolean doSupportSequenceFileShortType() {
    return true;
  }

  @Override
  public String getYarnApplicationClasspath() {
    return YARN_APPLICATION_CLASSPATH;
  }

  @Override
  public boolean doSupportNewHBaseAPI() {
    return true;
  }

  @Override
  public boolean doSupportHCatalog() {
    return true;
  }

  @Override
  public boolean pigVersionPriorTo_0_12() {
    return false;
  }

  @Override
  public boolean doSupportHBase() {
    return true;
  }

  @Override
  public boolean doSupportImpersonation() {
    return true;
  }

  @Override
  public boolean doSupportEmbeddedMode() {
    return false;
  }

  @Override
  public boolean doSupportStandaloneMode() {
    return true;
  }

  @Override
  public boolean doSupportHive1() {
    return false;
  }

  @Override
  public boolean doSupportHive2() {
    return true;
  }

  @Override
  public boolean doSupportTezForHive() {
    return false;
  }

  @Override
  public boolean doSupportHBaseForHive() {
    return true;
  }

  @Override
  public boolean doSupportSSL() {
    return true;
  }

  @Override
  public boolean doSupportORCFormat() {
    return true;
  }

  @Override
  public boolean doSupportAvroFormat() {
    return true;
  }

  @Override
  public boolean doSupportParquetFormat() {
    return true;
  }

  @Override
  public boolean doSupportStoreAsParquet() {
    return true;
  }

  @Override
  public ESparkVersion getSparkVersion() {
    return ESparkVersion.SPARK_1_5;
  }

  @Override
  public boolean doSupportDynamicMemoryAllocation() {
    return true;
  }

  @Override
  public boolean isExecutedThroughSparkJobServer() {
    return false;
  }

  @Override
  public boolean doSupportCheckpointing() {
    return true;
  }

  @Override
  public boolean doSupportSparkStandaloneMode() {
    return true;
  }

  @Override
  public boolean doSupportSparkYarnClientMode() {
    return true;
  }

  @Override
  public boolean doSupportOldImportMode() {
    return false;
  }

  @Override
  public boolean doJavaAPISupportStorePasswordInFile() {
    return true;
  }

  @Override
  public boolean doJavaAPISqoopImportSupportDeleteTargetDir() {
    return true;
  }

  @Override
  public boolean doJavaAPISqoopImportAllTablesSupportExcludeTable() {
    return true;
  }

  @Override
  public boolean doSupportClouderaNavigator() {
    return true;
  }

  @Override
  public boolean doSupportBackpressure() {
    return true;
  }
}
 @Override
 public String getVersionName(ComponentType componentType) {
   return EHadoopVersion4Drivers.CLOUDERA_CDH5_5.getVersionDisplay();
 }