예제 #1
1
  public void testUpdateTimeSeriesAddNewLaterPointsOnly() {
    // Add the test series
    testAddTimeSeries();

    List<LocalDate> dates = ImmutableList.of(_today.plusDays(1));
    List<Double> values = ImmutableList.of(4d);
    ArrayLocalDateDoubleTimeSeries newTs = new ArrayLocalDateDoubleTimeSeries(dates, values);
    UniqueId id =
        _htsWriter.writeTimeSeries(
            DESCRIPTION,
            DATA_SOURCE,
            DATA_PROVIDER,
            DATA_FIELD,
            OBSERVATION_TIME,
            ExternalIdBundle.of(ID),
            newTs);

    ManageableHistoricalTimeSeries manageableTs = _htsMaster.getTimeSeries(id);
    LocalDateDoubleTimeSeries readTs = manageableTs.getTimeSeries();
    List<LocalDate> expectedDates =
        ImmutableList.of(_today.minusDays(2), _today.minusDays(1), _today, _today.plusDays(1));
    List<Double> expectedValues = ImmutableList.of(1d, 2d, 3d, 4d);
    ArrayLocalDateDoubleTimeSeries expectedTs =
        new ArrayLocalDateDoubleTimeSeries(expectedDates, expectedValues);
    assertEquals(expectedTs, readTs);
  }
예제 #2
0
  public void testAddUpdateTimeSeriesSingleExistingPoint() {
    List<LocalDate> dates = ImmutableList.of(_today);
    List<Double> origValues = ImmutableList.of(1d);
    ArrayLocalDateDoubleTimeSeries origTs = new ArrayLocalDateDoubleTimeSeries(dates, origValues);
    UniqueId id =
        _htsWriter.writeTimeSeries(
            DESCRIPTION,
            DATA_SOURCE,
            DATA_PROVIDER,
            DATA_FIELD,
            OBSERVATION_TIME,
            ExternalIdBundle.of(ID),
            origTs);

    ManageableHistoricalTimeSeries manageableTs = _htsMaster.getTimeSeries(id);
    LocalDateDoubleTimeSeries readTs = manageableTs.getTimeSeries();
    assertEquals(origTs, readTs);

    List<Double> updatedValues = ImmutableList.of(2d);
    ArrayLocalDateDoubleTimeSeries updatedTs =
        new ArrayLocalDateDoubleTimeSeries(dates, updatedValues);
    id =
        _htsWriter.writeTimeSeries(
            DESCRIPTION,
            DATA_SOURCE,
            DATA_PROVIDER,
            DATA_FIELD,
            OBSERVATION_TIME,
            ExternalIdBundle.of(ID),
            updatedTs);

    manageableTs = _htsMaster.getTimeSeries(id);
    readTs = manageableTs.getTimeSeries();
    assertEquals(updatedTs, readTs);
  }
  /**
   * Updates an existing time-series in the master. If the time series provided has overlaps with
   * the existing time series, the old versions of intersecting points will be corrected to the new
   * ones. After that, points later than the existing latest point of the time series will be
   * appended.
   *
   * @param description a description of the time-series for display purposes, not null
   * @param dataSource the data source, not null
   * @param dataProvider the data provider, not null
   * @param dataField the data field, not null
   * @param observationTime the descriptive observation time key, e.g. LONDON_CLOSE, not null
   * @param oId the unique identifier of the time-series to be updated, not null
   * @param timeSeries the time-series, not null
   * @return the unique identifier of the time-series
   */
  public UniqueId writeTimeSeries(
      String description,
      String dataSource,
      String dataProvider,
      String dataField,
      String observationTime,
      ObjectId oId,
      LocalDateDoubleTimeSeries timeSeries) {

    UniqueId uId = oId.atLatestVersion();

    ManageableHistoricalTimeSeries existingManageableTs = _htsMaster.getTimeSeries(uId);
    LocalDateDoubleTimeSeries existingTs = existingManageableTs.getTimeSeries();

    if (existingTs.isEmpty()) {
      uId = _htsMaster.updateTimeSeriesDataPoints(oId, timeSeries);
      s_logger.debug(
          "Updating time series " + oId + "[" + dataField + "] with all as currently emtpy)");
    } else {
      // There is a non-empty matching time-series already in the master so update it to reflect the
      // new time-series
      // 1: 'correct' any differences in the subseries already present
      LocalDateDoubleTimeSeries tsIntersection =
          timeSeries.subSeries(
              existingTs.getEarliestTime(), true, existingTs.getLatestTime(), true);
      if (!tsIntersection.equals(existingTs)) {
        s_logger.debug(
            "Correcting time series "
                + oId
                + "["
                + dataField
                + "] from "
                + existingTs.getEarliestTime()
                + " to "
                + existingTs.getLatestTime());
        uId = _htsMaster.correctTimeSeriesDataPoints(oId, tsIntersection);
      }
      // 2: 'update' the time-series to add any new, later points
      if (existingTs.getLatestTime().isBefore(timeSeries.getLatestTime())) {
        LocalDateDoubleTimeSeries newSeries =
            timeSeries.subSeries(
                existingTs.getLatestTime(), false, timeSeries.getLatestTime(), true);
        if (newSeries.size() > 0) {
          s_logger.debug(
              "Updating time series "
                  + oId
                  + "["
                  + dataField
                  + "] from "
                  + newSeries.getEarliestTime()
                  + " to "
                  + newSeries.getLatestTime());
          uId = _htsMaster.updateTimeSeriesDataPoints(oId, newSeries);
        }
      }
    }
    return uId;
  }
  public void getHistoricalTimeSeriesByUID() throws Exception {
    ManageableHistoricalTimeSeries hts = new ManageableHistoricalTimeSeries();
    hts.setUniqueId(UID);
    hts.setTimeSeries(randomTimeSeries());
    when(_mockMaster.getTimeSeries(UID, HistoricalTimeSeriesGetFilter.ofRange(null, null)))
        .thenReturn(hts);

    HistoricalTimeSeries test = _tsSource.getHistoricalTimeSeries(UID);
    verify(_mockMaster, times(1))
        .getTimeSeries(UID, HistoricalTimeSeriesGetFilter.ofRange(null, null));

    assertEquals(UID, test.getUniqueId());
    assertEquals(hts.getTimeSeries().times(), test.getTimeSeries().times());
    assertEquals(hts.getTimeSeries().values(), test.getTimeSeries().values());
  }
  public void getHistoricalTimeSeriesByExternalIdWithMetaData() throws Exception {
    HistoricalTimeSeriesInfoSearchRequest request =
        new HistoricalTimeSeriesInfoSearchRequest(IDENTIFIERS);
    request.setValidityDate(LocalDate.now());
    request.setDataSource(BBG_DATA_SOURCE);
    request.setDataProvider(CMPL_DATA_PROVIDER);
    request.setDataField(CLOSE_DATA_FIELD);

    HistoricalTimeSeriesInfoSearchResult searchResult = new HistoricalTimeSeriesInfoSearchResult();
    HistoricalTimeSeriesInfoDocument doc = new HistoricalTimeSeriesInfoDocument();
    doc.setUniqueId(UID);

    when(_mockResolver.resolve(
            IDENTIFIERS,
            LocalDate.now(),
            BBG_DATA_SOURCE,
            CMPL_DATA_PROVIDER,
            CLOSE_DATA_FIELD,
            null))
        .thenReturn(new HistoricalTimeSeriesResolutionResult(doc.getInfo()));

    doc.getInfo().setTimeSeriesObjectId(UID.getObjectId());
    searchResult.getDocuments().add(doc);
    when(_mockMaster.search(request)).thenReturn(searchResult);

    ManageableHistoricalTimeSeries hts = new ManageableHistoricalTimeSeries();
    hts.setUniqueId(UID);
    hts.setTimeSeries(randomTimeSeries());
    when(_mockMaster.getTimeSeries(
            UID.getObjectId(),
            VersionCorrection.LATEST,
            HistoricalTimeSeriesGetFilter.ofRange(null, null)))
        .thenReturn(hts);

    HistoricalTimeSeries test =
        _tsSource.getHistoricalTimeSeries(
            IDENTIFIERS, BBG_DATA_SOURCE, CMPL_DATA_PROVIDER, CLOSE_DATA_FIELD);
    verify(_mockMaster, times(1))
        .getTimeSeries(
            UID.getObjectId(),
            VersionCorrection.LATEST,
            HistoricalTimeSeriesGetFilter.ofRange(null, null));

    assertEquals(UID, test.getUniqueId());
  }
예제 #6
0
  public void testAddTimeSeries() {
    List<LocalDate> dates = ImmutableList.of(_today.minusDays(2), _today.minusDays(1), _today);
    List<Double> values = ImmutableList.of(1d, 2d, 3d);
    ArrayLocalDateDoubleTimeSeries origTs = new ArrayLocalDateDoubleTimeSeries(dates, values);
    UniqueId id =
        _htsWriter.writeTimeSeries(
            DESCRIPTION,
            DATA_SOURCE,
            DATA_PROVIDER,
            DATA_FIELD,
            OBSERVATION_TIME,
            ExternalIdBundle.of(ID),
            origTs);

    ManageableHistoricalTimeSeries manageableTs = _htsMaster.getTimeSeries(id);
    LocalDateDoubleTimeSeries readTs = manageableTs.getTimeSeries();
    assertEquals(origTs, readTs);
  }
  /**
   * Updates an existing time-series in the master.
   *
   * @param uniqueId the unique identifier of the time-series to be updated, not null
   * @param timeSeries the time-series, not null
   * @return the unique identifier of the time-series
   */
  public UniqueId writeTimeSeries(UniqueId uniqueId, LocalDateDoubleTimeSeries timeSeries) {

    ManageableHistoricalTimeSeries existingManageableTs = _htsMaster.getTimeSeries(uniqueId);
    LocalDateDoubleTimeSeries existingTs = existingManageableTs.getTimeSeries();
    if (existingTs.isEmpty()) {
      _htsMaster.updateTimeSeriesDataPoints(uniqueId, timeSeries);
      s_logger.debug("Updating time series " + uniqueId + " with all as currently emtpy)");
    } else {
      // There is a matching time-series already in the master so update it to reflect the new
      // time-series
      // 1: 'correct' any differences in the subseries already present
      LocalDateDoubleTimeSeries tsIntersection =
          timeSeries.subSeries(
              existingTs.getEarliestTime(), true, existingTs.getLatestTime(), true);
      if (!tsIntersection.equals(existingTs)) {
        s_logger.debug(
            "Correcting time series "
                + uniqueId
                + " from "
                + existingTs.getEarliestTime()
                + " to "
                + existingTs.getLatestTime());
        uniqueId = _htsMaster.correctTimeSeriesDataPoints(uniqueId.getObjectId(), tsIntersection);
      }
      // 2: 'update' the time-series to add any new, later points
      if (existingTs.getLatestTime().isBefore(timeSeries.getLatestTime())) {
        LocalDateDoubleTimeSeries newSeries =
            timeSeries.subSeries(
                existingTs.getLatestTime(), false, timeSeries.getLatestTime(), true);
        if (newSeries.size() > 0) {
          s_logger.debug(
              "Updating time series "
                  + uniqueId
                  + " from "
                  + newSeries.getEarliestTime()
                  + " to "
                  + newSeries.getLatestTime());
          uniqueId = _htsMaster.updateTimeSeriesDataPoints(uniqueId, newSeries);
        }
      }
    }
    return uniqueId;
  }
  public void getHistoricalTimeSeriesByExternalIdWithoutMetaData() throws Exception {
    ManageableHistoricalTimeSeries hts = new ManageableHistoricalTimeSeries();
    hts.setUniqueId(UID);
    hts.setTimeSeries(randomTimeSeries());
    when(_mockMaster.getTimeSeries(UID, HistoricalTimeSeriesGetFilter.ofRange(null, null)))
        .thenReturn(hts);
    ManageableHistoricalTimeSeriesInfo tsInfo = new ManageableHistoricalTimeSeriesInfo();
    tsInfo.setUniqueId(UID);
    when(_mockResolver.resolve(
            IDENTIFIERS, LocalDate.now(), null, null, CLOSE_DATA_FIELD, TEST_CONFIG))
        .thenReturn(new HistoricalTimeSeriesResolutionResult(tsInfo));

    HistoricalTimeSeries test =
        _tsSource.getHistoricalTimeSeries(CLOSE_DATA_FIELD, IDENTIFIERS, TEST_CONFIG);
    verify(_mockMaster, times(1))
        .getTimeSeries(UID, HistoricalTimeSeriesGetFilter.ofRange(null, null));

    assertEquals(UID, test.getUniqueId());
    assertEquals(hts.getTimeSeries().times(), test.getTimeSeries().times());
    assertEquals(hts.getTimeSeries().values(), test.getTimeSeries().values());
  }
예제 #9
0
  @Test(enabled = false) // Current implementation does not support removing points
  public void testUpdateTimeSeriesRemoveExistingPoints() {
    // Add the test series
    testAddTimeSeries();

    List<LocalDate> dates = ImmutableList.of(_today.minusDays(2), _today);
    List<Double> values = ImmutableList.of(6d, 7d);
    ArrayLocalDateDoubleTimeSeries updatedTs = new ArrayLocalDateDoubleTimeSeries(dates, values);
    UniqueId id =
        _htsWriter.writeTimeSeries(
            DESCRIPTION,
            DATA_SOURCE,
            DATA_PROVIDER,
            DATA_FIELD,
            OBSERVATION_TIME,
            ExternalIdBundle.of(ID),
            updatedTs);

    ManageableHistoricalTimeSeries manageableTs = _htsMaster.getTimeSeries(id);
    LocalDateDoubleTimeSeries readTs = manageableTs.getTimeSeries();
    assertEquals(updatedTs, readTs);
  }
  public void getHistoricalWithInclusiveExclusiveDates() throws Exception {
    LocalDate end = DateUtils.previousWeekDay();
    LocalDate start = end.minusDays(7);

    HistoricalTimeSeriesInfoSearchRequest request =
        new HistoricalTimeSeriesInfoSearchRequest(IDENTIFIERS);
    request.setValidityDate(LocalDate.now());
    request.setDataSource(BBG_DATA_SOURCE);
    request.setDataProvider(CMPL_DATA_PROVIDER);
    request.setDataField(CLOSE_DATA_FIELD);
    LocalDateDoubleTimeSeries timeSeries = randomTimeSeries();

    HistoricalTimeSeriesInfoSearchResult searchResult = new HistoricalTimeSeriesInfoSearchResult();
    HistoricalTimeSeriesInfoDocument doc = new HistoricalTimeSeriesInfoDocument();
    doc.setUniqueId(UID);
    doc.getInfo().setTimeSeriesObjectId(UID.getObjectId());
    searchResult.getDocuments().add(doc);

    when(_mockResolver.resolve(
            IDENTIFIERS,
            LocalDate.now(),
            BBG_DATA_SOURCE,
            CMPL_DATA_PROVIDER,
            CLOSE_DATA_FIELD,
            null))
        .thenReturn(new HistoricalTimeSeriesResolutionResult(doc.getInfo()));

    for (boolean includeStart : new boolean[] {true, false}) {
      for (boolean includeEnd : new boolean[] {true, false}) {
        // Also test max points limit for various values
        for (Integer maxPoints : new Integer[] {null, -10, -1, 1, 0, -2, 2, 10}) {
          LocalDate startInput = start;
          LocalDate endInput = end;
          if (!includeStart) {
            startInput = start.plusDays(1);
          }
          if (!includeEnd) {
            endInput = end.minusDays(1);
          }

          ManageableHistoricalTimeSeries hts = new ManageableHistoricalTimeSeries();
          LocalDateDoubleTimeSeries lddts =
              (maxPoints == null)
                      || (Math.abs(maxPoints)
                          >= timeSeries.subSeries(start, includeStart, end, includeEnd).size())
                  ? timeSeries.subSeries(start, includeStart, end, includeEnd)
                  : (maxPoints >= 0)
                      ? timeSeries.subSeries(start, includeStart, end, includeEnd).head(maxPoints)
                      : timeSeries.subSeries(start, includeStart, end, includeEnd).tail(-maxPoints);
          hts.setUniqueId(UID);
          hts.setTimeSeries(lddts);
          when(_mockMaster.getTimeSeries(
                  UID.getObjectId(),
                  VersionCorrection.LATEST,
                  HistoricalTimeSeriesGetFilter.ofRange(startInput, endInput, maxPoints)))
              .thenReturn(hts);
          when(_mockMaster.search(request)).thenReturn(searchResult);

          HistoricalTimeSeries test =
              (maxPoints == null)
                  ? _tsSource.getHistoricalTimeSeries(
                      IDENTIFIERS,
                      BBG_DATA_SOURCE,
                      CMPL_DATA_PROVIDER,
                      CLOSE_DATA_FIELD,
                      start,
                      includeStart,
                      end,
                      includeEnd)
                  : _tsSource.getHistoricalTimeSeries(
                      IDENTIFIERS,
                      BBG_DATA_SOURCE,
                      CMPL_DATA_PROVIDER,
                      CLOSE_DATA_FIELD,
                      start,
                      includeStart,
                      end,
                      includeEnd,
                      maxPoints);

          assertEquals(UID, test.getUniqueId());
          assertEquals(hts.getTimeSeries(), test.getTimeSeries());
        }
      }
    }
  }
 private DoubleMatrix1D getSensitivities(
     final SecuritySource secSource,
     final FunctionInputs inputs,
     final RawSecurity rawSecurity,
     final InterpolatedYieldCurveSpecificationWithSecurities curveSpec,
     final YieldAndDiscountCurve curve) {
   final Collection<FactorExposureData> decodedSensitivities =
       RawSecurityUtils.decodeFactorExposureData(secSource, rawSecurity);
   final double[] entries = new double[curveSpec.getStrips().size()];
   int i = 0;
   for (final FixedIncomeStripWithSecurity strip : curveSpec.getStrips()) {
     final FactorExposureData swapExternalSensitivitiesData =
         searchForSwapTenorMatch(decodedSensitivities, strip);
     if (swapExternalSensitivitiesData != null) {
       final ComputedValue computedValue =
           inputs.getComputedValue(
               getSensitivityRequirement(swapExternalSensitivitiesData.getExposureExternalId()));
       if (computedValue != null) {
         final ManageableHistoricalTimeSeries mhts =
             (ManageableHistoricalTimeSeries) computedValue.getValue();
         final Double value = mhts.getTimeSeries().getLatestValue();
         entries[i] =
             -value; // * (qty.doubleValue() ); // we invert here because OpenGamma uses -1bp shift
                     // rather than +1.  DV01 function will invert back.
       } else {
         s_logger.warn(
             "Value was null when getting required input data "
                 + swapExternalSensitivitiesData.getExposureExternalId());
         entries[i] = 0d;
       }
     } else {
       entries[i] = 0d;
     }
     i++;
   }
   // Quick hack to map in bond data.
   i = 0;
   for (final FixedIncomeStripWithSecurity strip : curveSpec.getStrips()) {
     final FactorExposureData bondExternalSensitivitiesData =
         searchForBondTenorMatch(decodedSensitivities, strip);
     if (bondExternalSensitivitiesData != null) {
       final ComputedValue computedValue =
           inputs.getComputedValue(
               getSensitivityRequirement(bondExternalSensitivitiesData.getExposureExternalId()));
       if (computedValue != null) {
         final ManageableHistoricalTimeSeries mhts =
             (ManageableHistoricalTimeSeries) computedValue.getValue();
         final Double value = mhts.getTimeSeries().getLatestValue();
         entries[i] -=
             value; // * (qty.doubleValue() ); // we invert here because OpenGamma uses -1bp shift
                    // rather than +1.  DV01 function will invert back.
       } else {
         s_logger.warn(
             "Value was null when getting required input data "
                 + bondExternalSensitivitiesData.getExposureExternalId());
       }
     }
     i++;
   }
   return new DoubleMatrix1D(entries);
 }