@Override
 protected LocalDateDoubleTimeSeries getReturnSeries(
     LocalDateDoubleTimeSeries ts, ValueRequirement desiredValue) {
   LocalDateDoubleTimeSeries differenceSeries = super.getReturnSeries(ts, desiredValue);
   double lambda =
       Double.parseDouble(
           desiredValue.getConstraint(
               VolatilityWeightingFunctionUtils.VOLATILITY_WEIGHTING_LAMBDA_PROPERTY));
   TimeSeriesWeightedVolatilityOperator weightedVol =
       new TimeSeriesWeightedVolatilityOperator(lambda);
   LocalDateDoubleTimeSeries weightedVolSeries =
       (LocalDateDoubleTimeSeries) weightedVol.evaluate(ts);
   int n = weightedVolSeries.size();
   double endDateWeightedVol = weightedVolSeries.getLatestValueFast();
   double[] volWeightedDifferences = new double[n];
   for (int i = 0; i < n; i++) {
     System.out.println(
         differenceSeries.getTimeAtIndex(i)
             + ","
             + differenceSeries.getValueAtIndexFast(i)
             + ","
             + weightedVolSeries.getValueAtIndexFast(i));
     volWeightedDifferences[i] =
         differenceSeries.getValueAtIndexFast(i)
             * endDateWeightedVol
             / weightedVolSeries.getValueAtIndexFast(i);
   }
   LocalDateDoubleTimeSeries volWeightedDifferenceSeries =
       ImmutableLocalDateDoubleTimeSeries.of(
           weightedVolSeries.timesArrayFast(), volWeightedDifferences);
   return volWeightedDifferenceSeries;
 }
 public void testCycle3() {
   LocalDateDoubleTimeSeries cycleObject = cycleObject(LocalDateDoubleTimeSeries.class, ts);
   assertEquals(ImmutableLocalDateDoubleTimeSeries.class, cycleObject.getClass());
   assertEquals(ts, cycleObject);
   cycleObject =
       cycleObject(
           LocalDateDoubleTimeSeries.class, ImmutableLocalDateDoubleTimeSeries.EMPTY_SERIES);
   assertEquals(ImmutableLocalDateDoubleTimeSeries.EMPTY_SERIES, cycleObject);
 }
  /**
   * @param x An array of DoubleTimeSeries. If the array has only one element, then this is assumed
   *     to be the price series and the result is the simple return. The dividend series is assumed
   *     to be the second element. It does not have to be the same length as the price series (in
   *     which case, dates without dividends are treated like the dividend was zero), and the
   *     dividend data points do not have to correspond to any of the dates in the price series (in
   *     which case, the result is the simple net return).
   * @throws IllegalArgumentException If the array is null
   * @throws TimeSeriesException Throws an exception if: the array is null; it has no elements; the
   *     time series has less than two entries; if the calculation mode is strict and there are
   *     zeroes in the price series.
   * @return A DoubleTimeSeries containing the return series. This will always be one element
   *     shorter than the original price series.
   */
  @Override
  public LocalDateDoubleTimeSeries evaluate(final LocalDateDoubleTimeSeries... x) {
    ArgumentChecker.notEmpty(x, "x");
    ArgumentChecker.notNull(x[0], "first time series");
    final LocalDateDoubleTimeSeries ts = x[0];
    if (ts.size() < 2) {
      throw new TimeSeriesException("Need at least two data points to calculate return series");
    }
    LocalDateDoubleTimeSeries d = null;
    if (x.length > 1) {
      if (x[1] != null) {
        d = x[1];
      }
    }

    final int[] resultDates = new int[ts.size() - 1];
    final double[] resultValues = new double[ts.size() - 1];
    int resultIndex = 0;

    final LocalDateDoubleEntryIterator it = ts.iterator();
    it.nextTimeFast();
    double previousValue = it.currentValue();

    double dividend;
    Double dividendTSData;
    while (it.hasNext()) {
      final int date = it.nextTimeFast();
      final double value = it.currentValue();

      if (isValueNonZero(previousValue) && isValueNonZero(value)) {
        resultDates[resultIndex] = date;
        if (d == null) {
          dividend = 0;
        } else {
          dividendTSData = d.getValue(date);
          dividend = dividendTSData == null ? 0 : dividendTSData;
        }
        resultValues[resultIndex++] = (value + dividend) / previousValue;
      }
      previousValue = value;
    }
    return getSeries(resultDates, resultValues, resultIndex);
  }
  public void getHistoricalWithInclusiveExclusiveDates() throws Exception {
    LocalDate end = DateUtils.previousWeekDay();
    LocalDate start = end.minusDays(7);

    HistoricalTimeSeriesInfoSearchRequest request =
        new HistoricalTimeSeriesInfoSearchRequest(IDENTIFIERS);
    request.setValidityDate(LocalDate.now());
    request.setDataSource(BBG_DATA_SOURCE);
    request.setDataProvider(CMPL_DATA_PROVIDER);
    request.setDataField(CLOSE_DATA_FIELD);
    LocalDateDoubleTimeSeries timeSeries = randomTimeSeries();

    HistoricalTimeSeriesInfoSearchResult searchResult = new HistoricalTimeSeriesInfoSearchResult();
    HistoricalTimeSeriesInfoDocument doc = new HistoricalTimeSeriesInfoDocument();
    doc.setUniqueId(UID);
    doc.getInfo().setTimeSeriesObjectId(UID.getObjectId());
    searchResult.getDocuments().add(doc);

    when(_mockResolver.resolve(
            IDENTIFIERS,
            LocalDate.now(),
            BBG_DATA_SOURCE,
            CMPL_DATA_PROVIDER,
            CLOSE_DATA_FIELD,
            null))
        .thenReturn(new HistoricalTimeSeriesResolutionResult(doc.getInfo()));

    for (boolean includeStart : new boolean[] {true, false}) {
      for (boolean includeEnd : new boolean[] {true, false}) {
        // Also test max points limit for various values
        for (Integer maxPoints : new Integer[] {null, -10, -1, 1, 0, -2, 2, 10}) {
          LocalDate startInput = start;
          LocalDate endInput = end;
          if (!includeStart) {
            startInput = start.plusDays(1);
          }
          if (!includeEnd) {
            endInput = end.minusDays(1);
          }

          ManageableHistoricalTimeSeries hts = new ManageableHistoricalTimeSeries();
          LocalDateDoubleTimeSeries lddts =
              (maxPoints == null)
                      || (Math.abs(maxPoints)
                          >= timeSeries.subSeries(start, includeStart, end, includeEnd).size())
                  ? timeSeries.subSeries(start, includeStart, end, includeEnd)
                  : (maxPoints >= 0)
                      ? timeSeries.subSeries(start, includeStart, end, includeEnd).head(maxPoints)
                      : timeSeries.subSeries(start, includeStart, end, includeEnd).tail(-maxPoints);
          hts.setUniqueId(UID);
          hts.setTimeSeries(lddts);
          when(_mockMaster.getTimeSeries(
                  UID.getObjectId(),
                  VersionCorrection.LATEST,
                  HistoricalTimeSeriesGetFilter.ofRange(startInput, endInput, maxPoints)))
              .thenReturn(hts);
          when(_mockMaster.search(request)).thenReturn(searchResult);

          HistoricalTimeSeries test =
              (maxPoints == null)
                  ? _tsSource.getHistoricalTimeSeries(
                      IDENTIFIERS,
                      BBG_DATA_SOURCE,
                      CMPL_DATA_PROVIDER,
                      CLOSE_DATA_FIELD,
                      start,
                      includeStart,
                      end,
                      includeEnd)
                  : _tsSource.getHistoricalTimeSeries(
                      IDENTIFIERS,
                      BBG_DATA_SOURCE,
                      CMPL_DATA_PROVIDER,
                      CLOSE_DATA_FIELD,
                      start,
                      includeStart,
                      end,
                      includeEnd,
                      maxPoints);

          assertEquals(UID, test.getUniqueId());
          assertEquals(hts.getTimeSeries(), test.getTimeSeries());
        }
      }
    }
  }
  public void basicOperation() {
    NonVersionedRedisHistoricalTimeSeriesSource source =
        new NonVersionedRedisHistoricalTimeSeriesSource(getJedisPool(), getRedisPrefix());

    UniqueId id1 = UniqueId.of("Test", "1");
    UniqueId id2 = UniqueId.of("Test", "2");
    UniqueId id3 = UniqueId.of("Test", "3");

    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-04"), 14.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-05"), 15.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-06"), 16.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-07"), 17.0);
    source.setTimeSeriesPoint(id1, LocalDate.parse("2013-06-08"), 18.0);

    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-04"), 24.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-05"), 25.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-06"), 26.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-07"), 27.0);
    source.setTimeSeriesPoint(id2, LocalDate.parse("2013-06-08"), 28.0);

    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-04"), 34.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-05"), 35.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-06"), 36.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-07"), 37.0);
    source.setTimeSeriesPoint(id3, LocalDate.parse("2013-06-08"), 38.0);

    Pair<LocalDate, Double> pair = null;
    HistoricalTimeSeries hts = null;
    LocalDateDoubleTimeSeries ts = null;

    pair = source.getLatestDataPoint(id3);
    assertNotNull(pair);
    assertEquals(LocalDate.parse("2013-06-08"), pair.getFirst());
    assertEquals(38.0, pair.getSecond(), 0.000001);

    assertNull(source.getHistoricalTimeSeries(UniqueId.of("Test", "5")));

    hts = source.getHistoricalTimeSeries(id2);
    assertNotNull(hts);
    assertEquals(id2, hts.getUniqueId());
    ts = hts.getTimeSeries();
    assertNotNull(ts);
    assertEquals(5, ts.size());
    assertEquals(24.0, ts.getValue(LocalDate.parse("2013-06-04")), 0.00001);
    assertEquals(25.0, ts.getValue(LocalDate.parse("2013-06-05")), 0.00001);
    assertEquals(26.0, ts.getValue(LocalDate.parse("2013-06-06")), 0.00001);
    assertEquals(27.0, ts.getValue(LocalDate.parse("2013-06-07")), 0.00001);
    assertEquals(28.0, ts.getValue(LocalDate.parse("2013-06-08")), 0.00001);

    hts =
        source.getHistoricalTimeSeries(
            ExternalIdBundle.of(ExternalId.of("Test", "1")),
            LocalDate.now(),
            "Data Source",
            "Data Provider",
            "Data Field");
    assertNotNull(hts);
    assertEquals(id1, hts.getUniqueId());
    ts = hts.getTimeSeries();
    assertNotNull(ts);
    assertEquals(5, ts.size());
    assertEquals(14.0, ts.getValue(LocalDate.parse("2013-06-04")), 0.00001);
    assertEquals(15.0, ts.getValue(LocalDate.parse("2013-06-05")), 0.00001);
    assertEquals(16.0, ts.getValue(LocalDate.parse("2013-06-06")), 0.00001);
    assertEquals(17.0, ts.getValue(LocalDate.parse("2013-06-07")), 0.00001);
    assertEquals(18.0, ts.getValue(LocalDate.parse("2013-06-08")), 0.00001);
  }
 public void testEmptyCycle3() {
   LocalDateDoubleTimeSeries cycleObject = cycleObject(LocalDateDoubleTimeSeries.class, empty);
   assertEquals(ImmutableLocalDateDoubleTimeSeries.class, cycleObject.getClass());
   assertEquals(empty, cycleObject);
 }
    @Override
    public Set<ComputedValue> execute(
        final FunctionExecutionContext executionContext,
        final FunctionInputs inputs,
        final ComputationTarget target,
        final Set<ValueRequirement> desiredValues) {
      final Position position = target.getPosition();
      final ValueRequirement desiredValue = desiredValues.iterator().next();
      final ValueProperties constraints = desiredValue.getConstraints();
      final Set<String> resultCurrencies = constraints.getValues(CURRENCY);
      final FXForwardSecurity security = (FXForwardSecurity) position.getSecurity();
      final MultipleCurrencyAmount mca =
          (MultipleCurrencyAmount) inputs.getValue(ValueRequirementNames.FX_CURRENCY_EXPOSURE);
      final Currency payCurrency = security.getPayCurrency();
      final Currency receiveCurrency = security.getReceiveCurrency();
      final CurrencyPair currencyPair =
          _currencyPairs.getCurrencyPair(payCurrency, receiveCurrency);
      final Currency baseCurrency = currencyPair.getBase();
      final Currency currencyNonBase = currencyPair.getCounter(); // The non-base currency
      final double exposure = mca.getAmount(currencyNonBase);

      final ValueSpecification spec =
          new ValueSpecification(
              ValueRequirementNames.PNL_SERIES,
              target.toSpecification(),
              desiredValue.getConstraints());
      if (resultCurrencies == null || resultCurrencies.size() != 1) {
        s_logger.warn("No Currency property - returning result in base currency");
        final LocalDateDoubleTimeSeries fxSpotReturnSeries =
            (LocalDateDoubleTimeSeries) inputs.getValue(ValueRequirementNames.RETURN_SERIES);
        final LocalDateDoubleTimeSeries pnlSeries =
            fxSpotReturnSeries.multiply(
                position.getQuantity().doubleValue()
                    * exposure); // The P/L time series is in the base currency
        return Collections.singleton(new ComputedValue(spec, pnlSeries));
      }
      final Currency resultCurrency = Currency.of(Iterables.getOnlyElement(resultCurrencies));
      final LocalDateDoubleTimeSeries conversionTS =
          (LocalDateDoubleTimeSeries) inputs.getValue(HISTORICAL_FX_TIME_SERIES);
      if (conversionTS == null) {
        throw new OpenGammaRuntimeException(
            "Asked for result in "
                + resultCurrency
                + " but could not get "
                + baseCurrency
                + "/"
                + resultCurrency
                + " conversion series");
      }
      if (resultCurrency.equals(baseCurrency)) {
        final LocalDateDoubleTimeSeries fxSpotReturnSeries =
            (LocalDateDoubleTimeSeries) inputs.getValue(ValueRequirementNames.RETURN_SERIES);
        final LocalDateDoubleTimeSeries convertedSeries =
            conversionTS
                .reciprocal()
                .multiply(
                    position.getQuantity().doubleValue()
                        * exposure); // The P/L time series is in the base currency
        final LocalDateDoubleTimeSeries pnlSeries =
            fxSpotReturnSeries.multiply(
                convertedSeries); // The P/L time series is in the base currency
        return Collections.singleton(new ComputedValue(spec, pnlSeries));
      }
      final LocalDateDoubleTimeSeries fxSpotReturnSeries =
          (LocalDateDoubleTimeSeries) inputs.getValue(ValueRequirementNames.RETURN_SERIES);
      final LocalDateDoubleTimeSeries convertedSeries =
          conversionTS.multiply(
              position.getQuantity().doubleValue()
                  * exposure); // The P/L time series is in the base currency
      final LocalDateDoubleTimeSeries pnlSeries = convertedSeries.multiply(fxSpotReturnSeries);
      return Collections.singleton(new ComputedValue(spec, pnlSeries));
    }
  /**
   * Updates an existing time-series in the master. If the time series provided has overlaps with
   * the existing time series, the old versions of intersecting points will be corrected to the new
   * ones. After that, points later than the existing latest point of the time series will be
   * appended.
   *
   * @param description a description of the time-series for display purposes, not null
   * @param dataSource the data source, not null
   * @param dataProvider the data provider, not null
   * @param dataField the data field, not null
   * @param observationTime the descriptive observation time key, e.g. LONDON_CLOSE, not null
   * @param oId the unique identifier of the time-series to be updated, not null
   * @param timeSeries the time-series, not null
   * @return the unique identifier of the time-series
   */
  public UniqueId writeTimeSeries(
      String description,
      String dataSource,
      String dataProvider,
      String dataField,
      String observationTime,
      ObjectId oId,
      LocalDateDoubleTimeSeries timeSeries) {

    UniqueId uId = oId.atLatestVersion();

    ManageableHistoricalTimeSeries existingManageableTs = _htsMaster.getTimeSeries(uId);
    LocalDateDoubleTimeSeries existingTs = existingManageableTs.getTimeSeries();

    if (existingTs.isEmpty()) {
      uId = _htsMaster.updateTimeSeriesDataPoints(oId, timeSeries);
      s_logger.debug(
          "Updating time series " + oId + "[" + dataField + "] with all as currently emtpy)");
    } else {
      // There is a non-empty matching time-series already in the master so update it to reflect the
      // new time-series
      // 1: 'correct' any differences in the subseries already present
      LocalDateDoubleTimeSeries tsIntersection =
          timeSeries.subSeries(
              existingTs.getEarliestTime(), true, existingTs.getLatestTime(), true);
      if (!tsIntersection.equals(existingTs)) {
        s_logger.debug(
            "Correcting time series "
                + oId
                + "["
                + dataField
                + "] from "
                + existingTs.getEarliestTime()
                + " to "
                + existingTs.getLatestTime());
        uId = _htsMaster.correctTimeSeriesDataPoints(oId, tsIntersection);
      }
      // 2: 'update' the time-series to add any new, later points
      if (existingTs.getLatestTime().isBefore(timeSeries.getLatestTime())) {
        LocalDateDoubleTimeSeries newSeries =
            timeSeries.subSeries(
                existingTs.getLatestTime(), false, timeSeries.getLatestTime(), true);
        if (newSeries.size() > 0) {
          s_logger.debug(
              "Updating time series "
                  + oId
                  + "["
                  + dataField
                  + "] from "
                  + newSeries.getEarliestTime()
                  + " to "
                  + newSeries.getLatestTime());
          uId = _htsMaster.updateTimeSeriesDataPoints(oId, newSeries);
        }
      }
    }
    return uId;
  }
  /**
   * Adds or updates a time-series in the master. Can be a sub-set of the data points present and
   * will not 'erase' points that are missing, only supplement them.
   *
   * @param description a description of the time-series for display purposes, not null
   * @param dataSource the data source, not null
   * @param dataProvider the data provider, not null
   * @param dataField the data field, not null
   * @param observationTime the descriptive observation time key, e.g. LONDON_CLOSE, not null
   * @param externalIdBundle the external identifiers with which the time-series is associated, not
   *     null
   * @param externalIdSearchType the external identifier search type for matching an existing
   *     time-series, null to use the default
   * @param timeSeries the time-series, not null
   * @return the unique identifier of the time-series
   */
  public UniqueId writeTimeSeries(
      String description,
      String dataSource,
      String dataProvider,
      String dataField,
      String observationTime,
      ExternalIdBundle externalIdBundle,
      ExternalIdSearchType externalIdSearchType,
      LocalDateDoubleTimeSeries timeSeries) {
    ArgumentChecker.notNull(description, "description");
    ArgumentChecker.notNull(dataSource, "dataSource");
    ArgumentChecker.notNull(dataProvider, "dataProvider");
    ArgumentChecker.notNull(dataField, "dataField");
    ArgumentChecker.notNull(observationTime, "observationTime");
    ArgumentChecker.notNull(externalIdBundle, "externalIdBundle");
    ArgumentChecker.notNull(timeSeries, "timeSeries");

    HistoricalTimeSeriesInfoSearchRequest htsSearchReq =
        new HistoricalTimeSeriesInfoSearchRequest();
    ExternalIdSearch idSearch = ExternalIdSearch.of(externalIdBundle);
    if (externalIdSearchType != null) {
      idSearch = idSearch.withSearchType(externalIdSearchType);
    }
    htsSearchReq.setExternalIdSearch(idSearch);
    htsSearchReq.setDataSource(dataSource);
    htsSearchReq.setDataProvider(dataProvider);
    htsSearchReq.setDataField(dataField);
    htsSearchReq.setObservationTime(observationTime);
    HistoricalTimeSeriesInfoSearchResult searchResult = _htsMaster.search(htsSearchReq);
    if (searchResult.getDocuments().size() > 0) {
      if (searchResult.getDocuments().size() > 1) {
        s_logger.warn(
            "Found multiple time-series matching search. Will only update the first. Search {} returned {}",
            htsSearchReq,
            searchResult.getInfoList());
      }
      // update existing time series
      HistoricalTimeSeriesInfoDocument existingTsDoc = searchResult.getFirstDocument();
      return writeTimeSeries(
          description,
          dataSource,
          dataProvider,
          dataField,
          observationTime,
          existingTsDoc.getObjectId(),
          timeSeries);
    } else {
      // add new time series
      ManageableHistoricalTimeSeriesInfo info = new ManageableHistoricalTimeSeriesInfo();
      info.setDataField(dataField);
      info.setDataSource(dataSource);
      info.setDataProvider(dataProvider);
      info.setObservationTime(observationTime);
      info.setExternalIdBundle(ExternalIdBundleWithDates.of(externalIdBundle));
      info.setName(description);
      HistoricalTimeSeriesInfoDocument htsInfoDoc = new HistoricalTimeSeriesInfoDocument();
      htsInfoDoc.setInfo(info);

      HistoricalTimeSeriesInfoDocument addedInfoDoc = _htsMaster.add(htsInfoDoc);
      s_logger.debug(
          "Adding time series "
              + externalIdBundle
              + " from "
              + timeSeries.getEarliestTime()
              + " to "
              + timeSeries.getLatestTime());
      return _htsMaster.updateTimeSeriesDataPoints(
          addedInfoDoc.getInfo().getTimeSeriesObjectId(), timeSeries);
    }
  }
  /**
   * Updates an existing time-series in the master.
   *
   * @param uniqueId the unique identifier of the time-series to be updated, not null
   * @param timeSeries the time-series, not null
   * @return the unique identifier of the time-series
   */
  public UniqueId writeTimeSeries(UniqueId uniqueId, LocalDateDoubleTimeSeries timeSeries) {

    ManageableHistoricalTimeSeries existingManageableTs = _htsMaster.getTimeSeries(uniqueId);
    LocalDateDoubleTimeSeries existingTs = existingManageableTs.getTimeSeries();
    if (existingTs.isEmpty()) {
      _htsMaster.updateTimeSeriesDataPoints(uniqueId, timeSeries);
      s_logger.debug("Updating time series " + uniqueId + " with all as currently emtpy)");
    } else {
      // There is a matching time-series already in the master so update it to reflect the new
      // time-series
      // 1: 'correct' any differences in the subseries already present
      LocalDateDoubleTimeSeries tsIntersection =
          timeSeries.subSeries(
              existingTs.getEarliestTime(), true, existingTs.getLatestTime(), true);
      if (!tsIntersection.equals(existingTs)) {
        s_logger.debug(
            "Correcting time series "
                + uniqueId
                + " from "
                + existingTs.getEarliestTime()
                + " to "
                + existingTs.getLatestTime());
        uniqueId = _htsMaster.correctTimeSeriesDataPoints(uniqueId.getObjectId(), tsIntersection);
      }
      // 2: 'update' the time-series to add any new, later points
      if (existingTs.getLatestTime().isBefore(timeSeries.getLatestTime())) {
        LocalDateDoubleTimeSeries newSeries =
            timeSeries.subSeries(
                existingTs.getLatestTime(), false, timeSeries.getLatestTime(), true);
        if (newSeries.size() > 0) {
          s_logger.debug(
              "Updating time series "
                  + uniqueId
                  + " from "
                  + newSeries.getEarliestTime()
                  + " to "
                  + newSeries.getLatestTime());
          uniqueId = _htsMaster.updateTimeSeriesDataPoints(uniqueId, newSeries);
        }
      }
    }
    return uniqueId;
  }