/**
   * This will give the compaction sizes configured based on compaction type.
   *
   * @param compactionType
   * @return
   */
  public static long getCompactionSize(CompactionType compactionType) {

    long compactionSize = 0;
    switch (compactionType) {
      case MAJOR_COMPACTION:
        compactionSize = CarbonProperties.getInstance().getMajorCompactionSize();
        break;
      default: // this case can not come.
    }
    return compactionSize;
  }
  /**
   * checks number of loads to be preserved and returns remaining valid segments
   *
   * @param segments
   * @return
   */
  private static List<LoadMetadataDetails> checkPreserveSegmentsPropertyReturnRemaining(
      List<LoadMetadataDetails> segments) {

    int numberOfSegmentsToBePreserved = 0;
    // check whether the preserving of the segments from merging is enabled or not.
    // get the number of loads to be preserved.
    numberOfSegmentsToBePreserved =
        CarbonProperties.getInstance().getNumberOfSegmentsToBePreserved();
    // get the number of valid segments and retain the latest loads from merging.
    return CarbonDataMergerUtil.getValidLoadDetailsWithRetaining(
        segments, numberOfSegmentsToBePreserved);
  }
 /**
  * To check whether the merge property is enabled or not.
  *
  * @return
  */
 public static boolean checkIfAutoLoadMergingRequired() {
   // load merge is not supported as per new store format
   // moving the load merge check in early to avoid unnecessary load listing causing IOException
   // check whether carbons segment merging operation is enabled or not.
   // default will be false.
   String isLoadMergeEnabled =
       CarbonProperties.getInstance()
           .getProperty(
               CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
               CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE);
   if (isLoadMergeEnabled.equalsIgnoreCase("false")) {
     return false;
   }
   return true;
 }
  /**
   * Identify the segments to be merged based on the segment count
   *
   * @param listOfSegmentsAfterPreserve
   * @return
   */
  private static List<LoadMetadataDetails> identifySegmentsToBeMergedBasedOnSegCount(
      List<LoadMetadataDetails> listOfSegmentsAfterPreserve) {

    List<LoadMetadataDetails> mergedSegments =
        new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
    List<LoadMetadataDetails> unMergedSegments =
        new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);

    int[] noOfSegmentLevelsCount = CarbonProperties.getInstance().getCompactionSegmentLevelCount();

    int level1Size = 0;
    int level2Size = 0;
    boolean first = true;

    for (int levelCount : noOfSegmentLevelsCount) {
      if (first) {
        level1Size = levelCount;
        first = false;
      } else {
        level2Size = levelCount;
        break;
        // breaking as we are doing only 2 levels
      }
    }

    int unMergeCounter = 0;
    int mergeCounter = 0;

    // check size of each segment , sum it up across partitions
    for (LoadMetadataDetails segment : listOfSegmentsAfterPreserve) {

      String segName = segment.getLoadName();

      // if a segment is already merged 2 levels then it s name will become .2
      // need to exclude those segments from minor compaction.
      // if a segment is major compacted then should not be considered for minor.
      if (segName.endsWith(CarbonCommonConstants.LEVEL2_COMPACTION_INDEX)
          || (segment.isMajorCompacted() != null
              && segment.isMajorCompacted().equalsIgnoreCase("true"))) {
        continue;
      }

      // check if the segment is merged or not

      if (!isMergedSegment(segName)) {
        // if it is an unmerged segment then increment counter
        unMergeCounter++;
        unMergedSegments.add(segment);
        if (unMergeCounter == (level1Size)) {
          return unMergedSegments;
        }
      } else {
        mergeCounter++;
        mergedSegments.add(segment);
        if (mergeCounter == (level2Size)) {
          return mergedSegments;
        }
      }
    }
    return new ArrayList<>(0);
  }
  /**
   * This method will return the list of loads which are loaded at the same interval. This property
   * is configurable.
   *
   * @param listOfSegmentsBelowThresholdSize
   * @return
   */
  private static List<LoadMetadataDetails> identifySegmentsToBeMergedBasedOnLoadedDate(
      List<LoadMetadataDetails> listOfSegmentsBelowThresholdSize) {

    List<LoadMetadataDetails> loadsOfSameDate =
        new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);

    long numberOfDaysAllowedToMerge = 0;
    try {
      numberOfDaysAllowedToMerge =
          Long.parseLong(
              CarbonProperties.getInstance()
                  .getProperty(
                      CarbonCommonConstants.DAYS_ALLOWED_TO_COMPACT,
                      CarbonCommonConstants.DEFAULT_DAYS_ALLOWED_TO_COMPACT));
      if (numberOfDaysAllowedToMerge < 0 || numberOfDaysAllowedToMerge > 100) {
        LOGGER.error(
            "The specified value for property "
                + CarbonCommonConstants.DAYS_ALLOWED_TO_COMPACT
                + " is incorrect."
                + " Correct value should be in range of 0 -100. Taking the default value.");
        numberOfDaysAllowedToMerge =
            Long.parseLong(CarbonCommonConstants.DEFAULT_DAYS_ALLOWED_TO_COMPACT);
      }

    } catch (NumberFormatException e) {
      numberOfDaysAllowedToMerge =
          Long.parseLong(CarbonCommonConstants.DEFAULT_DAYS_ALLOWED_TO_COMPACT);
    }
    // if true then process loads according to the load date.
    if (numberOfDaysAllowedToMerge > 0) {

      // filter loads based on the loaded date
      boolean first = true;
      Date segDate1 = null;
      SimpleDateFormat sdf = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP);
      for (LoadMetadataDetails segment : listOfSegmentsBelowThresholdSize) {

        if (first) {
          segDate1 = initializeFirstSegment(loadsOfSameDate, segment, sdf);
          first = false;
          continue;
        }
        String segmentDate = segment.getLoadStartTime();
        Date segDate2 = null;
        try {
          segDate2 = sdf.parse(segmentDate);
        } catch (ParseException e) {
          LOGGER.error("Error while parsing segment start time" + e.getMessage());
        }

        if (isTwoDatesPresentInRequiredRange(segDate1, segDate2, numberOfDaysAllowedToMerge)) {
          loadsOfSameDate.add(segment);
        }
        // if the load is beyond merged date.
        // then reset everything and continue search for loads.
        else if (loadsOfSameDate.size() < 2) {
          loadsOfSameDate.clear();
          // need to add the next segment as first and  to check further
          segDate1 = initializeFirstSegment(loadsOfSameDate, segment, sdf);
        } else { // case where a load is beyond merge date and there is at least 2 loads to merge.
          break;
        }
      }
    } else {
      return listOfSegmentsBelowThresholdSize;
    }

    return loadsOfSameDate;
  }