@Test
  public void testModuleGroups() throws Exception {
    Map<String, String> results = new HashMap<>();
    results.put(
        CDH550Constant.SPARK_KAFKA_ASSEMBLY_MRREQUIRED_MODULE_GROUP.getModuleName(),
        "((#[email protected]_CONFIGURATION.DISTRIBUTION=='CLOUDERA') AND (#[email protected]_CONFIGURATION.SPARK_VERSION=='Cloudera_CDH5_5')) AND (#[email protected]_CONFIGURATION.SPARK_LOCAL_MODE=='false')"); //$NON-NLS-1$
    results.put(
        CDH550Constant.SPARK_KAFKA_AVRO_MRREQUIRED_MODULE_GROUP.getModuleName(),
        "((#[email protected]_CONFIGURATION.DISTRIBUTION=='CLOUDERA') AND (#[email protected]_CONFIGURATION.SPARK_VERSION=='Cloudera_CDH5_5')) AND (#[email protected]_CONFIGURATION.SPARK_LOCAL_MODE=='false')"); //$NON-NLS-1$

    Set<DistributionModuleGroup> moduleGroups =
        CDH550SparkStreamingKafkaAvroModuleGroup.getModuleGroups(
            CDH550Distribution.DISTRIBUTION_NAME, CDH550Distribution.VERSION);
    assertEquals(results.size(), moduleGroups.size());
    moduleGroups.iterator();
    for (DistributionModuleGroup module : moduleGroups) {
      assertTrue(
          "Should contain module " + module.getModuleName(),
          results.containsKey(module.getModuleName())); // $NON-NLS-1$
      if (results.get(module.getModuleName()) == null) {
        assertTrue(
            "The condition of the module "
                + module.getModuleName()
                + " is not null.", //$NON-NLS-1$ //$NON-NLS-2$
            results.get(module.getModuleName()) == null);
      } else {
        assertTrue(
            "The condition of the module "
                + module.getModuleName()
                + " is null, but it should be " //$NON-NLS-1$ //$NON-NLS-2$
                + results.get(module.getModuleName())
                + ".",
            results.get(module.getModuleName()) != null); // $NON-NLS-1$
        assertEquals(
            results.get(module.getModuleName()), module.getRequiredIf().getConditionString());
      }
    }
  }
  static {

    // Used to add a module group import for the components that have a HADOOP_DISTRIBUTION
    // parameter, aka. the
    // components that have the distribution list.
    moduleGroups = new HashMap<>();
    moduleGroups.put(ComponentType.HDFS, CDH550HDFSModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HBASE, CDH550HBaseModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HCATALOG, CDH550HCatalogModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.MAPREDUCE, CDH550MapReduceModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.PIG, CDH550PigModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.PIGOUTPUT, CDH550PigOutputModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.SQOOP, CDH550SqoopModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HIVE, CDH550HiveModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.IMPALA, CDH550ImpalaModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.SPARKBATCH, CDH550SparkBatchModuleGroup.getModuleGroups());
    moduleGroups.put(
        ComponentType.SPARKSTREAMING, CDH550SparkStreamingModuleGroup.getModuleGroups());
    moduleGroups.put(ComponentType.HIVEONSPARK, CDH550HiveOnSparkModuleGroup.getModuleGroups());

    // Used to add a module group import for a specific node. The given node must have a
    // HADOOP_LIBRARIES parameter.
    nodeModuleGroups = new HashMap<>();

    nodeModuleGroups.put(
        new NodeComponentTypeBean(ComponentType.MAPREDUCE, MRConstant.S3_INPUT_COMPONENT),
        CDH550MRS3NodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(ComponentType.MAPREDUCE, MRConstant.S3_OUTPUT_COMPONENT),
        CDH550MRS3NodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(ComponentType.PIG, PigOutputConstant.PIGSTORE_COMPONENT),
        CDH550PigOutputNodeModuleGroup.getModuleGroups());

    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_INPUT_COMPONENT),
        CDH550SparkBatchParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKBATCH, SparkBatchConstant.PARQUET_OUTPUT_COMPONENT),
        CDH550SparkBatchParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKBATCH, SparkBatchConstant.S3_CONFIGURATION_COMPONENT),
        CDH550SparkBatchS3NodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_INPUT_COMPONENT),
        CDH550SparkStreamingParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_OUTPUT_COMPONENT),
        CDH550SparkStreamingParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.PARQUET_STREAM_INPUT_COMPONENT),
        CDH550SparkStreamingParquetNodeModuleGroup.getModuleGroups());
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.S3_CONFIGURATION_COMPONENT),
        CDH550SparkStreamingS3NodeModuleGroup.getModuleGroups());

    Set<DistributionModuleGroup> kinesisNodeModuleGroups =
        CDH550SparkStreamingKinesisNodeModuleGroup.getModuleGroups();
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_COMPONENT),
        kinesisNodeModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_INPUT_AVRO_COMPONENT),
        kinesisNodeModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KINESIS_OUTPUT_COMPONENT),
        kinesisNodeModuleGroups);

    Set<DistributionModuleGroup> kafkaAssemblyModuleGroups =
        CDH550SparkStreamingKafkaAssemblyModuleGroup.getModuleGroups();
    Set<DistributionModuleGroup> kafkaAvroModuleGroups =
        CDH550SparkStreamingKafkaAvroModuleGroup.getModuleGroups();
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_INPUT_COMPONENT),
        kafkaAssemblyModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_AVRO_INPUT_COMPONENT),
        kafkaAssemblyModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_AVRO_INPUT_COMPONENT),
        kafkaAvroModuleGroups);
    nodeModuleGroups.put(
        new NodeComponentTypeBean(
            ComponentType.SPARKSTREAMING, SparkStreamingConstant.KAFKA_OUTPUT_COMPONENT),
        CDH550SparkStreamingKafkaClientModuleGroup.getModuleGroups());

    // Used to hide the distribution according to other parameters in the component.
    displayConditions = new HashMap<>();
  }