@Override
 protected LocalDateDoubleTimeSeries getReturnSeries(
     LocalDateDoubleTimeSeries ts, ValueRequirement desiredValue) {
   LocalDateDoubleTimeSeries differenceSeries = super.getReturnSeries(ts, desiredValue);
   double lambda =
       Double.parseDouble(
           desiredValue.getConstraint(
               VolatilityWeightingFunctionUtils.VOLATILITY_WEIGHTING_LAMBDA_PROPERTY));
   TimeSeriesWeightedVolatilityOperator weightedVol =
       new TimeSeriesWeightedVolatilityOperator(lambda);
   LocalDateDoubleTimeSeries weightedVolSeries =
       (LocalDateDoubleTimeSeries) weightedVol.evaluate(ts);
   int n = weightedVolSeries.size();
   double endDateWeightedVol = weightedVolSeries.getLatestValueFast();
   double[] volWeightedDifferences = new double[n];
   for (int i = 0; i < n; i++) {
     System.out.println(
         differenceSeries.getTimeAtIndex(i)
             + ","
             + differenceSeries.getValueAtIndexFast(i)
             + ","
             + weightedVolSeries.getValueAtIndexFast(i));
     volWeightedDifferences[i] =
         differenceSeries.getValueAtIndexFast(i)
             * endDateWeightedVol
             / weightedVolSeries.getValueAtIndexFast(i);
   }
   LocalDateDoubleTimeSeries volWeightedDifferenceSeries =
       ImmutableLocalDateDoubleTimeSeries.of(
           weightedVolSeries.timesArrayFast(), volWeightedDifferences);
   return volWeightedDifferenceSeries;
 }
  /**
   * Updates an existing time-series in the master. If the time series provided has overlaps with
   * the existing time series, the old versions of intersecting points will be corrected to the new
   * ones. After that, points later than the existing latest point of the time series will be
   * appended.
   *
   * @param description a description of the time-series for display purposes, not null
   * @param dataSource the data source, not null
   * @param dataProvider the data provider, not null
   * @param dataField the data field, not null
   * @param observationTime the descriptive observation time key, e.g. LONDON_CLOSE, not null
   * @param oId the unique identifier of the time-series to be updated, not null
   * @param timeSeries the time-series, not null
   * @return the unique identifier of the time-series
   */
  public UniqueId writeTimeSeries(
      String description,
      String dataSource,
      String dataProvider,
      String dataField,
      String observationTime,
      ObjectId oId,
      LocalDateDoubleTimeSeries timeSeries) {

    UniqueId uId = oId.atLatestVersion();

    ManageableHistoricalTimeSeries existingManageableTs = _htsMaster.getTimeSeries(uId);
    LocalDateDoubleTimeSeries existingTs = existingManageableTs.getTimeSeries();

    if (existingTs.isEmpty()) {
      uId = _htsMaster.updateTimeSeriesDataPoints(oId, timeSeries);
      s_logger.debug(
          "Updating time series " + oId + "[" + dataField + "] with all as currently emtpy)");
    } else {
      // There is a non-empty matching time-series already in the master so update it to reflect the
      // new time-series
      // 1: 'correct' any differences in the subseries already present
      LocalDateDoubleTimeSeries tsIntersection =
          timeSeries.subSeries(
              existingTs.getEarliestTime(), true, existingTs.getLatestTime(), true);
      if (!tsIntersection.equals(existingTs)) {
        s_logger.debug(
            "Correcting time series "
                + oId
                + "["
                + dataField
                + "] from "
                + existingTs.getEarliestTime()
                + " to "
                + existingTs.getLatestTime());
        uId = _htsMaster.correctTimeSeriesDataPoints(oId, tsIntersection);
      }
      // 2: 'update' the time-series to add any new, later points
      if (existingTs.getLatestTime().isBefore(timeSeries.getLatestTime())) {
        LocalDateDoubleTimeSeries newSeries =
            timeSeries.subSeries(
                existingTs.getLatestTime(), false, timeSeries.getLatestTime(), true);
        if (newSeries.size() > 0) {
          s_logger.debug(
              "Updating time series "
                  + oId
                  + "["
                  + dataField
                  + "] from "
                  + newSeries.getEarliestTime()
                  + " to "
                  + newSeries.getLatestTime());
          uId = _htsMaster.updateTimeSeriesDataPoints(oId, newSeries);
        }
      }
    }
    return uId;
  }
  /**
   * @param x An array of DoubleTimeSeries. If the array has only one element, then this is assumed
   *     to be the price series and the result is the simple return. The dividend series is assumed
   *     to be the second element. It does not have to be the same length as the price series (in
   *     which case, dates without dividends are treated like the dividend was zero), and the
   *     dividend data points do not have to correspond to any of the dates in the price series (in
   *     which case, the result is the simple net return).
   * @throws IllegalArgumentException If the array is null
   * @throws TimeSeriesException Throws an exception if: the array is null; it has no elements; the
   *     time series has less than two entries; if the calculation mode is strict and there are
   *     zeroes in the price series.
   * @return A DoubleTimeSeries containing the return series. This will always be one element
   *     shorter than the original price series.
   */
  @Override
  public LocalDateDoubleTimeSeries evaluate(final LocalDateDoubleTimeSeries... x) {
    ArgumentChecker.notEmpty(x, "x");
    ArgumentChecker.notNull(x[0], "first time series");
    final LocalDateDoubleTimeSeries ts = x[0];
    if (ts.size() < 2) {
      throw new TimeSeriesException("Need at least two data points to calculate return series");
    }
    LocalDateDoubleTimeSeries d = null;
    if (x.length > 1) {
      if (x[1] != null) {
        d = x[1];
      }
    }

    final int[] resultDates = new int[ts.size() - 1];
    final double[] resultValues = new double[ts.size() - 1];
    int resultIndex = 0;

    final LocalDateDoubleEntryIterator it = ts.iterator();
    it.nextTimeFast();
    double previousValue = it.currentValue();

    double dividend;
    Double dividendTSData;
    while (it.hasNext()) {
      final int date = it.nextTimeFast();
      final double value = it.currentValue();

      if (isValueNonZero(previousValue) && isValueNonZero(value)) {
        resultDates[resultIndex] = date;
        if (d == null) {
          dividend = 0;
        } else {
          dividendTSData = d.getValue(date);
          dividend = dividendTSData == null ? 0 : dividendTSData;
        }
        resultValues[resultIndex++] = (value + dividend) / previousValue;
      }
      previousValue = value;
    }
    return getSeries(resultDates, resultValues, resultIndex);
  }
  /**
   * Updates an existing time-series in the master.
   *
   * @param uniqueId the unique identifier of the time-series to be updated, not null
   * @param timeSeries the time-series, not null
   * @return the unique identifier of the time-series
   */
  public UniqueId writeTimeSeries(UniqueId uniqueId, LocalDateDoubleTimeSeries timeSeries) {

    ManageableHistoricalTimeSeries existingManageableTs = _htsMaster.getTimeSeries(uniqueId);
    LocalDateDoubleTimeSeries existingTs = existingManageableTs.getTimeSeries();
    if (existingTs.isEmpty()) {
      _htsMaster.updateTimeSeriesDataPoints(uniqueId, timeSeries);
      s_logger.debug("Updating time series " + uniqueId + " with all as currently emtpy)");
    } else {
      // There is a matching time-series already in the master so update it to reflect the new
      // time-series
      // 1: 'correct' any differences in the subseries already present
      LocalDateDoubleTimeSeries tsIntersection =
          timeSeries.subSeries(
              existingTs.getEarliestTime(), true, existingTs.getLatestTime(), true);
      if (!tsIntersection.equals(existingTs)) {
        s_logger.debug(
            "Correcting time series "
                + uniqueId
                + " from "
                + existingTs.getEarliestTime()
                + " to "
                + existingTs.getLatestTime());
        uniqueId = _htsMaster.correctTimeSeriesDataPoints(uniqueId.getObjectId(), tsIntersection);
      }
      // 2: 'update' the time-series to add any new, later points
      if (existingTs.getLatestTime().isBefore(timeSeries.getLatestTime())) {
        LocalDateDoubleTimeSeries newSeries =
            timeSeries.subSeries(
                existingTs.getLatestTime(), false, timeSeries.getLatestTime(), true);
        if (newSeries.size() > 0) {
          s_logger.debug(
              "Updating time series "
                  + uniqueId
                  + " from "
                  + newSeries.getEarliestTime()
                  + " to "
                  + newSeries.getLatestTime());
          uniqueId = _htsMaster.updateTimeSeriesDataPoints(uniqueId, newSeries);
        }
      }
    }
    return uniqueId;
  }
  public void basicOperation() {
    NonVersionedRedisHistoricalTimeSeriesSource source =
        new NonVersionedRedisHistoricalTimeSeriesSource(getJedisPool(), getRedisPrefix());

    UniqueId id1 = UniqueId.of("Test", "1");
    UniqueId id2 = UniqueId.of("Test", "2");
    UniqueId id3 = UniqueId.of("Test", "3");

    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-04"), 14.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-05"), 15.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-06"), 16.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-07"), 17.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-08"), 18.0);

    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-04"), 24.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-05"), 25.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-06"), 26.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-07"), 27.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-08"), 28.0);

    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-04"), 34.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-05"), 35.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-06"), 36.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-07"), 37.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-08"), 38.0);

    Pair<LocalDate, Double> pair = null;
    HistoricalTimeSeries hts = null;
    LocalDateDoubleTimeSeries ts = null;

    pair = source.getLatestDataPoint(id3);
    assertNotNull(pair);
    assertEquals(LocalDate.parse("2013-06-08"), pair.getFirst());
    assertEquals(38.0, pair.getSecond(), 0.000001);

    assertNull(source.getHistoricalTimeSeries(UniqueId.of("Test", "5")));

    hts = source.getHistoricalTimeSeries(id2);
    assertNotNull(hts);
    assertEquals(id2, hts.getUniqueId());
    ts = hts.getTimeSeries();
    assertNotNull(ts);
    assertEquals(5, ts.size());
    assertEquals(24.0, ts.getValue(LocalDate.parse("2013-06-04")), 0.00001);
    assertEquals(25.0, ts.getValue(LocalDate.parse("2013-06-05")), 0.00001);
    assertEquals(26.0, ts.getValue(LocalDate.parse("2013-06-06")), 0.00001);
    assertEquals(27.0, ts.getValue(LocalDate.parse("2013-06-07")), 0.00001);
    assertEquals(28.0, ts.getValue(LocalDate.parse("2013-06-08")), 0.00001);

    hts =
        source.getHistoricalTimeSeries(
            ExternalIdBundle.of(ExternalId.of("Test", "1")),
            LocalDate.now(),
            "Data Source",
            "Data Provider",
            "Data Field");
    assertNotNull(hts);
    assertEquals(id1, hts.getUniqueId());
    ts = hts.getTimeSeries();
    assertNotNull(ts);
    assertEquals(5, ts.size());
    assertEquals(14.0, ts.getValue(LocalDate.parse("2013-06-04")), 0.00001);
    assertEquals(15.0, ts.getValue(LocalDate.parse("2013-06-05")), 0.00001);
    assertEquals(16.0, ts.getValue(LocalDate.parse("2013-06-06")), 0.00001);
    assertEquals(17.0, ts.getValue(LocalDate.parse("2013-06-07")), 0.00001);
    assertEquals(18.0, ts.getValue(LocalDate.parse("2013-06-08")), 0.00001);
  }