@Test public void test() { final int n = 12; final GaussianQuadratureData f1 = GAUSS_LEGENDRE.generate(n); final GaussianQuadratureData f2 = GAUSS_JACOBI_GL_EQUIV.generate(n); final GaussianQuadratureData f3 = GAUSS_JACOBI_CHEBYSHEV_EQUIV.generate(n); final double[] w1 = f1.getWeights(); final double[] w2 = f2.getWeights(); final double[] x1 = f1.getAbscissas(); final double[] x2 = f2.getAbscissas(); assertTrue(w1.length == w2.length); assertTrue(x1.length == x2.length); for (int i = 0; i < n; i++) { assertEquals(w1[i], w2[i], EPS); assertEquals(x1[i], -x2[i], EPS); } final double[] w3 = f3.getWeights(); final double[] x3 = f3.getAbscissas(); final double chebyshevWeight = Math.PI / n; final Function1D<Integer, Double> chebyshevAbscissa = new Function1D<Integer, Double>() { @Override public Double evaluate(final Integer x) { return -Math.cos(Math.PI * (x + 0.5) / n); } }; for (int i = 0; i < n; i++) { assertEquals(chebyshevWeight, w3[i], EPS); assertEquals(chebyshevAbscissa.evaluate(i), -x3[i], EPS); } }
/** * Test interpolation part, essentially consistent with the super class (where extrapolation is * absent) iff local fitting is applied. If global fit is used, resulting interpolation is tested * with larger tolerance */ @Test public void interpolationTest() { double eps = 1.0e-14; double expiry = 1.2; double forward = 1.7; int nStrikes = 11; double[] strikes = new double[nStrikes]; double[] impliedVols = new double[] { 2.17, 1.92, 1.702, 1.545, 1.281, 0.912, 0.9934, 1.0878, 1.1499, 1.2032, 1.242 }; for (int i = 0; i < nStrikes; ++i) { strikes[i] = forward * (0.85 + i * 0.05); } WeightingFunction weight = LinearWeightingFunction.getInstance(); int seed = 4729; double beta = 0.95; ShiftedLogNormalExtrapolationFunctionProvider extapQuiet = new ShiftedLogNormalExtrapolationFunctionProvider("Quiet"); SmileInterpolatorSABRWithExtrapolation interpQuiet = new SmileInterpolatorSABRWithExtrapolation( seed, new SABRHaganVolatilityFunction(), beta, weight, extapQuiet); InterpolatedSmileFunction funcQuiet = new InterpolatedSmileFunction(interpQuiet, forward, strikes, expiry, impliedVols); SmileInterpolatorSABR sabr = new SmileInterpolatorSABR(seed, new SABRHaganVolatilityFunction(), beta, weight); Function1D<Double, Double> volFunc = sabr.getVolatilityFunction(forward, strikes, expiry, impliedVols); int nKeys = 20; for (int i = 0; i < nKeys + 1; ++i) { Double key = strikes[0] + (strikes[nStrikes - 1] - strikes[0]) * i / nKeys; assertEquals(volFunc.evaluate(key), funcQuiet.getVolatility(key), eps); } SmileInterpolatorSABRWithExtrapolation interpGlobal1 = new SmileInterpolatorSABRWithExtrapolation(new SABRPaulotVolatilityFunction(), extapQuiet); SmileInterpolatorSABRWithExtrapolation interpGlobal2 = new SmileInterpolatorSABRWithExtrapolation( new SABRBerestyckiVolatilityFunction(), extapQuiet); SmileInterpolatorSABRWithExtrapolation interpGlobal3 = new SmileInterpolatorSABRWithExtrapolation( new SABRHaganAlternativeVolatilityFunction(), extapQuiet); InterpolatedSmileFunction funcGlobal1 = new InterpolatedSmileFunction(interpGlobal1, forward, strikes, expiry, impliedVols); InterpolatedSmileFunction funcGlobal2 = new InterpolatedSmileFunction(interpGlobal2, forward, strikes, expiry, impliedVols); InterpolatedSmileFunction funcGlobal3 = new InterpolatedSmileFunction(interpGlobal3, forward, strikes, expiry, impliedVols); for (int i = 0; i < nKeys + 1; ++i) { Double key = strikes[0] + (strikes[nStrikes - 1] - strikes[0]) * i / nKeys; double ref = funcQuiet.getVolatility(key); assertEquals(ref, funcGlobal1.getVolatility(key), 1.5 * ref * 1.0e-1); assertEquals(ref, funcGlobal2.getVolatility(key), ref * 1.0e-1); assertEquals(ref, funcGlobal3.getVolatility(key), ref * 1.0e-1); } }
@Test /** * Tests the 'In-Out Parity' condition: Knock-In's pay rebate at maturity if barrier isn't hit. * Knock-Out pays at moment barrier is hit. The discounting issue can be sidestepped by setting * rates to 0. */ public void inOutParityWithRebate() { // Vanilla final Function1D<BlackFunctionData, Double> fcnVanillaCall = BLACK_FUNCTION.getPriceFunction(VANILLA_CALL_K100); final BlackFunctionData zeroRatesMarket = new BlackFunctionData(SPOT, 1.0, VOLATILITY); final double pxVanillaCall = fcnVanillaCall.evaluate(zeroRatesMarket); // Barriers with rebate final double priceDownInRebate = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_DOWN_IN, REBATE, SPOT, 0.0, 0.0, VOLATILITY); final double priceDownOutRebate = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_DOWN_OUT, REBATE, SPOT, 0.0, 0.0, VOLATILITY); assertEquals( "Knock In-Out Parity fails", 1.0, (pxVanillaCall + REBATE) / (priceDownInRebate + priceDownOutRebate), 1.e-6); }
@Test /** * Tests the 'In-Out Parity' condition: Without rebates, the price of a Knock-In plus a Knock-Out * of arbitrary barrier level must equal that of the underlying vanilla option */ public void inOutParityWithoutRebate() { // Vanilla final Function1D<BlackFunctionData, Double> fcnVanillaCall = BLACK_FUNCTION.getPriceFunction(VANILLA_CALL_K100); final double pxVanillaCall = fcnVanillaCall.evaluate(DATA_BLACK); // Barriers without rebate final double noRebate = 0.0; final double priceDownIn = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_DOWN_IN, noRebate, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY); final double priceDownOut = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_DOWN_OUT, noRebate, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY); assertEquals( "Knock In-Out Parity fails", 1.0, pxVanillaCall / (priceDownIn + priceDownOut), 1.e-6); }
protected double getResidual( final double fwd, final double expiry, final double[] ks, final double[] vols) { // Check for trivial case where cutoff is so low that there's no effective value in the option final double cutoffPrice = BlackFormulaRepository.price(fwd, ks[0], expiry, vols[0], ks[0] > fwd); if (CompareUtils.closeEquals(cutoffPrice, 0)) { return 0.0; // i.e. the tail function is never used } // The typical case - fit a ShiftedLognormal to the two strike-vol pairs final ShiftedLognormalVolModel leftExtrapolator = new ShiftedLognormalVolModel(fwd, expiry, ks[0], vols[0], ks[1], vols[1]); // Now, handle behaviour near zero strike. ShiftedLognormalVolModel has non-zero put price for // zero strike. // What we do is to find the strike, k_min, at which f(k) = p(k)/k^2 begins to blow up, by // finding the minimum of this function, k_min // then setting f(k) = f(k_min) for k < k_min. This ensures the implied volatility and the // integrand are well behaved in the limit k -> 0. final Function1D<Double, Double> shiftedLnIntegrand = new Function1D<Double, Double>() { @Override public Double evaluate(final Double strike) { return leftExtrapolator.priceFromFixedStrike(strike) / (strike * strike); } }; final double kMin = new BrentMinimizer1D().minimize(shiftedLnIntegrand, EPS, EPS, ks[0]); final double fMin = shiftedLnIntegrand.evaluate(kMin); double res = fMin * kMin; // the (hopefully) very small rectangular bit between zero and kMin res += _integrator.integrate(shiftedLnIntegrand, kMin, ks[0]); return res; }
@Test /** Tests the present value. */ public void presentValueNoNotional() { final MultipleCurrencyAmount pv = METHOD.presentValue(ZERO_COUPON_CAP, BLACK_INFLATION); final double timeToMaturity = ZERO_COUPON_CAP.getReferenceEndTime() - ZERO_COUPON_CAP.getLastKnownFixingTime(); final double df = MARKET .getCurve(ZERO_COUPON_CAP.getCurrency()) .getDiscountFactor(ZERO_COUPON_CAP.getPaymentTime()); final double finalIndex = MARKET.getCurve(PRICE_INDEX_EUR).getPriceIndex(ZERO_COUPON_CAP.getReferenceEndTime()); final double forward = finalIndex / INDEX_1MAY_2008; final EuropeanVanillaOption option = new EuropeanVanillaOption( Math.pow(1 + ZERO_COUPON_CAP.getStrike(), ZERO_COUPON_CAP.getMaturity()), timeToMaturity, ZERO_COUPON_CAP.isCap()); final double volatility = BLACK_INFLATION .getBlackParameters() .getVolatility(ZERO_COUPON_CAP.getReferenceEndTime(), ZERO_COUPON_CAP.getStrike()); final BlackFunctionData dataBlack = new BlackFunctionData(forward, 1.0, volatility); final Function1D<BlackFunctionData, Double> func = BLACK_FUNCTION.getPriceFunction(option); final double pvExpected = df * func.evaluate(dataBlack) * ZERO_COUPON_CAP.getNotional() * ZERO_COUPON_CAP.getPaymentYearFraction(); assertEquals( "Zero-coupon inflation DiscountingMethod: Present value", pvExpected, pv.getAmount(ZERO_COUPON_CAP.getCurrency()), TOLERANCE_PV); }
/** * @param x The array of data, not null. Must contain at least two data points * @return The Pearson first skewness coefficient */ @Override public Double evaluate(final double[] x) { Validate.notNull(x); Validate.isTrue( x.length > 1, "Need at least two data points to calculate Pearson first skewness coefficient"); return 3 * (MEAN.evaluate(x) - MODE.evaluate(x)) / STD_DEV.evaluate(x); }
static { final TreeMap<Double, Double> data = new TreeMap<>(); final TreeMap<Double, Double> transformedData = new TreeMap<>(); double x; for (int i = 0; i < 10; i++) { x = Double.valueOf(i); data.put(x, FUNCTION.evaluate(x)); transformedData.put(x, Math.log(FUNCTION.evaluate(x))); } MODEL = LINEAR.getDataBundle(data); TRANSFORMED_MODEL = INTERPOLATOR.getDataBundle(transformedData); }
/** * General method when you wish to compute the expected variance from a log-moneyness * parametrised surface to within a certain tolerance * * @param surface log-moneyness parametrised volatility surface * @return expected variance */ @SuppressWarnings("synthetic-access") @Override public Double visitLogMoneyness(final BlackVolatilitySurfaceLogMoneyness surface) { final double atmVol = surface.getVolatilityForLogMoneyness(_t, 0.0); if (_t < 1e-4) { return atmVol * atmVol; } final double rootT = Math.sqrt(_t); final double invNorTol = NORMAL.getInverseCDF(_tol); final Function1D<Double, Double> integrand = getLogMoneynessIntegrand(surface); double putPart; if (_addResidual) { putPart = _integrator.integrate(integrand, Math.log(_lowStrikeCutoff / _f), 0.0); putPart += _residual; } else { final double l = invNorTol * atmVol * rootT; // initial estimate of lower limit putPart = _integrator.integrate(integrand, l, 0.0); double rem = integrand.evaluate(l); double error = rem / putPart; int step = 1; while (error > _tol) { putPart += _integrator.integrate(integrand, (step + 1) * l, step * l); step++; rem = integrand.evaluate((step + 1) * l); error = rem / putPart; } putPart += rem; // add on the (very small) remainder estimate otherwise we'll always underestimate // variance } final double u = _f * Math.exp(-invNorTol * atmVol * rootT); // initial estimate of upper limit double callPart = _integrator.integrate(integrand, 0.0, u); double rem = integrand.evaluate(u); double error = rem / callPart; int step = 1; while (error > _tol) { callPart += _integrator.integrate(integrand, step * u, (1 + step) * u); step++; rem = integrand.evaluate((1 + step) * u); error = rem / putPart; } // callPart += rem; // don't add on the remainder estimate as it is very conservative, and likely too large return 2 * (putPart + callPart) / _t; }
private boolean getNextPosition( final Function1D<DoubleMatrix1D, Double> function, final Function1D<DoubleMatrix1D, DoubleMatrix1D> grad, final DataBundle data) { final DoubleMatrix1D p = getDirection(data); if (data.getLambda0() < 1.0) { data.setLambda0(1.0); } else { data.setLambda0(data.getLambda0() * BETA); } updatePosition(p, function, data); final double g1 = data.getG1(); // the function is invalid at the new position, try to recover if (Double.isInfinite(g1) || Double.isNaN(g1)) { bisectBacktrack(p, function, data); } if (data.getG1() > data.getG0() / (1 + ALPHA * data.getLambda0())) { quadraticBacktrack(p, function, data); int count = 0; while (data.getG1() > data.getG0() / (1 + ALPHA * data.getLambda0())) { if (count > 5) { return false; } cubicBacktrack(p, function, data); count++; } } final DoubleMatrix1D deltaX = data.getDeltaX(); data.setX((DoubleMatrix1D) MA.add(data.getX(), deltaX)); data.setG0(data.getG1()); final DoubleMatrix1D gradNew = grad.evaluate(data.getX()); data.setDeltaGrad((DoubleMatrix1D) MA.subtract(gradNew, data.getGrad())); data.setGrad(gradNew); return true; }
@Override public DoubleMatrix1D minimize( final Function1D<DoubleMatrix1D, Double> function, final Function1D<DoubleMatrix1D, DoubleMatrix1D> grad, final DoubleMatrix1D startPosition) { final DataBundle data = new DataBundle(); final double y = function.evaluate(startPosition); data.setX(startPosition); data.setG0(y * y); data.setGrad(grad.evaluate(startPosition)); data.setInverseHessianEsimate(getInitializedMatrix(startPosition)); if (!getNextPosition(function, grad, data)) { throw new MathException( "Cannot work with this starting position. Please choose another point"); } int count = 0; int resetCount = 1; while (!isConverged(data)) { if ((resetCount) % RESET_FREQ == 0) { data.setInverseHessianEsimate(getInitializedMatrix(startPosition)); resetCount = 1; } else { _hessainUpdater.update(data); } if (!getNextPosition(function, grad, data)) { data.setInverseHessianEsimate(getInitializedMatrix(startPosition)); resetCount = 1; if (!getNextPosition(function, grad, data)) { throw new MathException("Failed to converge in backtracking"); } } count++; resetCount++; if (count > _maxSteps) { throw new MathException( "Failed to converge after " + _maxSteps + " iterations. Final point reached: " + data.getX().toString()); } } return data.getX(); }
@Test public void presentValue() { final MultipleCurrencyAmount pvMethod = METHOD_BLACK.presentValue(SWAPTION_LONG_REC, BLACK_MULTICURVES); final double forward = SWAPTION_LONG_REC.getUnderlyingSwap().accept(PRDC, MULTICURVES); final double pvbp = METHOD_SWAP.presentValueBasisPoint(SWAPTION_LONG_REC.getUnderlyingSwap(), MULTICURVES); final double volatility = BLACK.getVolatility( SWAPTION_LONG_REC.getTimeToExpiry(), SWAPTION_LONG_REC.getMaturityTime()); final BlackPriceFunction blackFunction = new BlackPriceFunction(); final BlackFunctionData dataBlack = new BlackFunctionData(forward, pvbp, volatility); final Function1D<BlackFunctionData, Double> func = blackFunction.getPriceFunction(SWAPTION_LONG_REC); final double pvExpected = func.evaluate(dataBlack); assertEquals( "Swaption Black method: present value", pvExpected, pvMethod.getAmount(EUR), TOLERANCE_PV); }
protected void updatePosition( final DoubleMatrix1D p, final Function1D<DoubleMatrix1D, Double> function, final DataBundle data) { final double lambda0 = data.getLambda0(); final DoubleMatrix1D deltaX = (DoubleMatrix1D) MA.scale(p, lambda0); final DoubleMatrix1D xNew = (DoubleMatrix1D) MA.add(data.getX(), deltaX); data.setDeltaX(deltaX); data.setG2(data.getG1()); final double y = function.evaluate(xNew); data.setG1(y * y); }
/** Check extrapolation is recovered for shifted lognormal model extrapolation */ @Test public void functionRecoverySLNTest() { final double forward = 1.0; final double expiry = 3.0; int nSamples = 11; double[] strikes = new double[nSamples]; double[] vols = new double[nSamples]; final double muLeft = 0.4; final double thetaLeft = 0.5; // Expected left extrapolation Function1D<Double, Double> left = new Function1D<Double, Double>() { @Override public Double evaluate(Double strike) { return ShiftedLogNormalTailExtrapolation.impliedVolatility( forward, strike, expiry, muLeft, thetaLeft); } }; final double muRight = -0.3; final double thetaRight = 0.5; // Expected right extrapolation Function1D<Double, Double> right = new Function1D<Double, Double>() { @Override public Double evaluate(Double strike) { return ShiftedLogNormalTailExtrapolation.impliedVolatility( forward, strike, expiry, muRight, thetaRight); } }; for (int i = 0; i < 5; ++i) { double strike = forward * (0.75 + 0.05 * i); vols[i] = left.evaluate(strike); strikes[i] = strike; } for (int i = 6; i < nSamples; ++i) { double strike = forward * (0.75 + 0.05 * i); vols[i] = right.evaluate(strike); strikes[i] = strike; } strikes[5] = forward; vols[5] = 0.5 * (vols[4] + vols[6]); ShiftedLogNormalExtrapolationFunctionProvider extapSLN = new ShiftedLogNormalExtrapolationFunctionProvider(); SmileInterpolatorSABRWithExtrapolation interpSLN = new SmileInterpolatorSABRWithExtrapolation(extapSLN); InterpolatedSmileFunction funcSLN = new InterpolatedSmileFunction(interpSLN, forward, strikes, expiry, vols); double[] keys = new double[] {forward * 0.1, forward * 0.5, forward * 0.66}; for (int i = 0; i < keys.length; ++i) { assertEquals(left.evaluate(keys[i]), funcSLN.getVolatility(keys[i]), 1.e-2); } keys = new double[] {forward * 1.31, forward * 1.5, forward * 2.61, forward * 15.0}; for (int i = 0; i < keys.length; ++i) { assertEquals(right.evaluate(keys[i]), funcSLN.getVolatility(keys[i]), 1.e-2); } }
/** * Computes the present value of the Physical delivery swaption through approximation.. * * @param swaption The swaption. * @param cfe The swaption cash flow equiovalent. * @param g2Data The G2++ parameters and the curves. * @return The present value. */ public CurrencyAmount presentValue( final SwaptionPhysicalFixedIbor swaption, final AnnuityPaymentFixed cfe, final G2ppPiecewiseConstantDataBundle g2Data) { YieldAndDiscountCurve dsc = g2Data.getCurve(swaption.getUnderlyingSwap().getFixedLeg().getDiscountCurve()); int nbCf = cfe.getNumberOfPayments(); double[] cfa = new double[nbCf]; double[] t = new double[nbCf]; for (int loopcf = 0; loopcf < nbCf; loopcf++) { cfa[loopcf] = -Math.signum(cfe.getNthPayment(0).getAmount()) * cfe.getNthPayment(loopcf).getAmount(); t[loopcf] = cfe.getNthPayment(loopcf).getPaymentTime(); } double rhog2pp = g2Data.getG2ppParameter().getCorrelation(); double[][] ht0 = MODEL_G2PP.volatilityMaturityPart(g2Data.getG2ppParameter(), t[0], t); double[] dfswap = new double[nbCf]; double[] p0 = new double[nbCf]; double[] cP = new double[nbCf]; for (int loopcf = 0; loopcf < nbCf; loopcf++) { dfswap[loopcf] = dsc.getDiscountFactor(t[loopcf]); p0[loopcf] = dfswap[loopcf] / dfswap[0]; cP[loopcf] = cfa[loopcf] * p0[loopcf]; } double k = -cfa[0]; double b0 = 0.0; for (int loopcf = 1; loopcf < nbCf; loopcf++) { b0 += cP[loopcf]; } double[] alpha0 = new double[nbCf - 1]; double[] beta0 = new double[2]; for (int loopcf = 0; loopcf < nbCf - 1; loopcf++) { alpha0[loopcf] = cfa[loopcf + 1] * p0[loopcf + 1] / b0; beta0[0] += alpha0[loopcf] * ht0[0][loopcf + 1]; beta0[1] += alpha0[loopcf] * ht0[1][loopcf + 1]; } double[][] gamma = MODEL_G2PP.gamma(g2Data.getG2ppParameter(), 0, swaption.getTimeToExpiry()); double[] tau = new double[nbCf]; for (int loopcf = 0; loopcf < nbCf; loopcf++) { tau[loopcf] = gamma[0][0] * ht0[0][loopcf] * ht0[0][loopcf] + gamma[1][1] * ht0[1][loopcf] * ht0[1][loopcf] + 2 * rhog2pp * gamma[0][1] * ht0[0][loopcf] * ht0[1][loopcf]; } double xbarnum = 0.0; double xbarde = 0.0; for (int loopcf = 0; loopcf < nbCf; loopcf++) { xbarnum += cP[loopcf] - cP[loopcf] * tau[loopcf] * tau[loopcf] / 2.0; xbarde += cP[loopcf] * tau[loopcf]; } double xbar = xbarnum / xbarde; double[] pK = new double[nbCf]; for (int loopcf = 0; loopcf < nbCf; loopcf++) { pK[loopcf] = p0[loopcf] * (1.0 - tau[loopcf] * xbar - tau[loopcf] * tau[loopcf] / 2.0); } double[] alphaK = new double[nbCf - 1]; double[] betaK = new double[2]; for (int loopcf = 0; loopcf < nbCf - 1; loopcf++) { alphaK[loopcf] = cfa[loopcf + 1] * pK[loopcf + 1] / k; betaK[0] += alphaK[loopcf] * ht0[0][loopcf + 1]; betaK[1] += alphaK[loopcf] * ht0[1][loopcf + 1]; } double[] betaBar = new double[] {(beta0[0] + betaK[0]) / 2.0, (beta0[1] + betaK[1]) / 2.0}; double sigmaBar2 = gamma[0][0] * betaBar[0] * betaBar[0] + gamma[1][1] * betaBar[1] * betaBar[1] + 2 * rhog2pp * gamma[0][1] * betaBar[0] * betaBar[1]; double sigmaBar = Math.sqrt(sigmaBar2); EuropeanVanillaOption option = new EuropeanVanillaOption(k, 1, !swaption.isCall()); final BlackPriceFunction blackFunction = new BlackPriceFunction(); final BlackFunctionData dataBlack = new BlackFunctionData(b0, dfswap[0], sigmaBar); final Function1D<BlackFunctionData, Double> func = blackFunction.getPriceFunction(option); final double price = func.evaluate(dataBlack) * (swaption.isLong() ? 1.0 : -1.0); return CurrencyAmount.of(swaption.getCurrency(), price); }
/** {@inheritDoc} */ @Override public IsdaCompliantCreditCurve calibrateCreditCurve( CdsAnalytic[] cds, double[] premiums, IsdaCompliantYieldCurve yieldCurve, double[] pointsUpfront) { ArgChecker.noNulls(cds, "null CDSs"); ArgChecker.notEmpty(premiums, "empty fractionalSpreads"); ArgChecker.notEmpty(pointsUpfront, "empty pointsUpfront"); ArgChecker.notNull(yieldCurve, "null yieldCurve"); int n = cds.length; ArgChecker.isTrue(n == premiums.length, "Number of CDSs does not match number of spreads"); ArgChecker.isTrue( n == pointsUpfront.length, "Number of CDSs does not match number of pointsUpfront"); double proStart = cds[0].getEffectiveProtectionStart(); for (int i = 1; i < n; i++) { ArgChecker.isTrue( proStart == cds[i].getEffectiveProtectionStart(), "all CDSs must has same protection start"); ArgChecker.isTrue( cds[i].getProtectionEnd() > cds[i - 1].getProtectionEnd(), "protection end must be ascending"); } // use continuous premiums as initial guess double[] guess = new double[n]; double[] t = new double[n]; for (int i = 0; i < n; i++) { t[i] = cds[i].getProtectionEnd(); guess[i] = (premiums[i] + pointsUpfront[i] / t[i]) / cds[i].getLGD(); } IsdaCompliantCreditCurve creditCurve = new IsdaCompliantCreditCurve(t, guess); for (int i = 0; i < n; i++) { Pricer pricer = new Pricer(cds[i], yieldCurve, t, premiums[i], pointsUpfront[i]); Function1D<Double, Double> func = pricer.getPointFunction(i, creditCurve); switch (getArbHanding()) { case Ignore: { try { double[] bracket = BRACKER.getBracketedPoints( func, 0.8 * guess[i], 1.25 * guess[i], Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); double zeroRate = bracket[0] > bracket[1] ? ROOTFINDER.getRoot(func, bracket[1], bracket[0]) : ROOTFINDER.getRoot(func, bracket[0], bracket[1]); // Negative guess handled creditCurve = creditCurve.withRate(zeroRate, i); } catch ( MathException e) { // handling bracketing failure due to small survival probability if (Math.abs(func.evaluate(creditCurve.getZeroRateAtIndex(i - 1))) < 1.e-12) { creditCurve = creditCurve.withRate(creditCurve.getZeroRateAtIndex(i - 1), i); } else { throw new MathException(e); } } break; } case Fail: { double minValue = i == 0 ? 0.0 : creditCurve.getRTAtIndex(i - 1) / creditCurve.getTimeAtIndex(i); if (i > 0 && func.evaluate(minValue) > 0.0) { // can never fail on the first spread StringBuilder msg = new StringBuilder(); if (pointsUpfront[i] == 0.0) { msg.append("The par spread of " + premiums[i] + " at index " + i); } else { msg.append( "The premium of " + premiums[i] + "and points up-front of " + pointsUpfront[i] + " at index " + i); } msg.append( " is an arbitrage; cannot fit a curve with positive forward hazard rate. "); throw new IllegalArgumentException(msg.toString()); } guess[i] = Math.max(minValue, guess[i]); double[] bracket = BRACKER.getBracketedPoints( func, guess[i], 1.2 * guess[i], minValue, Double.POSITIVE_INFINITY); double zeroRate = ROOTFINDER.getRoot(func, bracket[0], bracket[1]); creditCurve = creditCurve.withRate(zeroRate, i); break; } case ZeroHazardRate: { double minValue = i == 0 ? 0.0 : creditCurve.getRTAtIndex(i - 1) / creditCurve.getTimeAtIndex(i); if (i > 0 && func.evaluate(minValue) > 0.0) { // can never fail on the first spread creditCurve = creditCurve.withRate(minValue, i); } else { guess[i] = Math.max(minValue, guess[i]); double[] bracket = BRACKER.getBracketedPoints( func, guess[i], 1.2 * guess[i], minValue, Double.POSITIVE_INFINITY); double zeroRate = ROOTFINDER.getRoot(func, bracket[0], bracket[1]); creditCurve = creditCurve.withRate(zeroRate, i); } break; } default: throw new IllegalArgumentException("unknow case " + getArbHanding()); } } return creditCurve; }
@Override public Set<ComputedValue> execute( final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) throws AsynchronousExecution { final Object originalCurveObject = inputs.getValue(YIELD_CURVE); if (originalCurveObject == null) { throw new OpenGammaRuntimeException("Could not get original curve"); } ValueProperties resultCurveProperties = null; String absoluteToleranceName = null; String relativeToleranceName = null; String iterationsName = null; String decompositionName = null; String useFiniteDifferenceName = null; for (final ValueRequirement desiredValue : desiredValues) { if (desiredValue.getValueName().equals(YIELD_CURVE)) { absoluteToleranceName = desiredValue.getConstraint( MultiYieldCurvePropertiesAndDefaults.PROPERTY_ROOT_FINDER_ABSOLUTE_TOLERANCE); relativeToleranceName = desiredValue.getConstraint( MultiYieldCurvePropertiesAndDefaults.PROPERTY_ROOT_FINDER_RELATIVE_TOLERANCE); iterationsName = desiredValue.getConstraint( MultiYieldCurvePropertiesAndDefaults.PROPERTY_ROOT_FINDER_MAX_ITERATIONS); decompositionName = desiredValue.getConstraint( MultiYieldCurvePropertiesAndDefaults.PROPERTY_DECOMPOSITION); useFiniteDifferenceName = desiredValue.getConstraint( MultiYieldCurvePropertiesAndDefaults.PROPERTY_USE_FINITE_DIFFERENCE); resultCurveProperties = desiredValue.getConstraints().copy().get(); break; } } if (resultCurveProperties == null) { throw new OpenGammaRuntimeException("Could not get result curve properties"); } final ValueProperties resultJacobianProperties = resultCurveProperties.withoutAny(CURVE); ZonedDateTime valuationDateTime = executionContext .getValuationTime() .atZone(executionContext.getValuationClock().getZone()); final HolidaySource holidaySource = OpenGammaExecutionContext.getHolidaySource(executionContext); final ConventionSource conventionSource = OpenGammaExecutionContext.getConventionSource(executionContext); final Calendar calendar = CalendarUtils.getCalendar(holidaySource, _currency); final DepositConvention convention = conventionSource.getSingle( ExternalId.of(SCHEME_NAME, getConventionName(_currency, DEPOSIT)), DepositConvention.class); final int spotLag = convention.getSettlementDays(); final ExternalId conventionSettlementRegion = convention.getRegionCalendar(); ZonedDateTime spotDate; if (spotLag == 0 && conventionSettlementRegion == null) { spotDate = valuationDateTime; } else { spotDate = ScheduleCalculator.getAdjustedDate(valuationDateTime, spotLag, calendar); ; } final YieldCurveBundle curves = new YieldCurveBundle(); final String fullYieldCurveName = _originalCurveName + "_" + _currency; curves.setCurve(fullYieldCurveName, (YieldAndDiscountCurve) originalCurveObject); final int n = _impliedDefinition.getStrips().size(); final double[] t = new double[n]; final double[] r = new double[n]; int i = 0; final DayCount dayCount = DayCountFactory.INSTANCE.getDayCount( "Act/360"); // TODO: Get the convention from the curve. final String impliedDepositCurveName = _curveCalculationConfig + "_" + _currency.getCode(); final List<InstrumentDerivative> derivatives = new ArrayList<>(); for (final FixedIncomeStrip strip : _impliedDefinition.getStrips()) { final Tenor tenor = strip.getCurveNodePointTime(); final ZonedDateTime paymentDate = ScheduleCalculator.getAdjustedDate( spotDate, tenor.getPeriod(), MOD_FOL, calendar, true); final double startTime = TimeCalculator.getTimeBetween(valuationDateTime, spotDate); final double endTime = TimeCalculator.getTimeBetween(valuationDateTime, paymentDate); final double accrualFactor = dayCount.getDayCountFraction(spotDate, paymentDate, calendar); final Cash cashFXCurve = new Cash(_currency, startTime, endTime, 1, 0, accrualFactor, fullYieldCurveName); final double parRate = METHOD_CASH.parRate(cashFXCurve, curves); final Cash cashDepositCurve = new Cash(_currency, startTime, endTime, 1, 0, accrualFactor, impliedDepositCurveName); derivatives.add(cashDepositCurve); t[i] = endTime; r[i++] = parRate; } final CombinedInterpolatorExtrapolator interpolator = CombinedInterpolatorExtrapolatorFactory.getInterpolator( _interpolatorName, _leftExtrapolatorName, _rightExtrapolatorName); final double absoluteTolerance = Double.parseDouble(absoluteToleranceName); final double relativeTolerance = Double.parseDouble(relativeToleranceName); final int iterations = Integer.parseInt(iterationsName); final Decomposition<?> decomposition = DecompositionFactory.getDecomposition(decompositionName); final boolean useFiniteDifference = Boolean.parseBoolean(useFiniteDifferenceName); final LinkedHashMap<String, double[]> curveNodes = new LinkedHashMap<>(); final LinkedHashMap<String, Interpolator1D> interpolators = new LinkedHashMap<>(); curveNodes.put(impliedDepositCurveName, t); interpolators.put(impliedDepositCurveName, interpolator); final FXMatrix fxMatrix = new FXMatrix(); final YieldCurveBundle knownCurve = new YieldCurveBundle(); final MultipleYieldCurveFinderDataBundle data = new MultipleYieldCurveFinderDataBundle( derivatives, r, knownCurve, curveNodes, interpolators, useFiniteDifference, fxMatrix); final NewtonVectorRootFinder rootFinder = new BroydenVectorRootFinder( absoluteTolerance, relativeTolerance, iterations, decomposition); final Function1D<DoubleMatrix1D, DoubleMatrix1D> curveCalculator = new MultipleYieldCurveFinderFunction(data, PAR_RATE_CALCULATOR); final Function1D<DoubleMatrix1D, DoubleMatrix2D> jacobianCalculator = new MultipleYieldCurveFinderJacobian(data, PAR_RATE_SENSITIVITY_CALCULATOR); final double[] fittedYields = rootFinder.getRoot(curveCalculator, jacobianCalculator, new DoubleMatrix1D(r)).getData(); final DoubleMatrix2D jacobianMatrix = jacobianCalculator.evaluate(new DoubleMatrix1D(fittedYields)); final YieldCurve impliedDepositCurve = new YieldCurve( impliedDepositCurveName, InterpolatedDoublesCurve.from(t, fittedYields, interpolator)); final ValueSpecification curveSpec = new ValueSpecification(YIELD_CURVE, target.toSpecification(), resultCurveProperties); final ValueSpecification jacobianSpec = new ValueSpecification( YIELD_CURVE_JACOBIAN, target.toSpecification(), resultJacobianProperties); return Sets.newHashSet( new ComputedValue(curveSpec, impliedDepositCurve), new ComputedValue(jacobianSpec, jacobianMatrix)); }
@Test /** * Tests the 'In-Out Parity' condition: The price of a Knock-In plus a Knock-Out of arbitrary * barrier level must equal that of the underlying vanilla option + value of the rebate */ public void inOutParityMorePathsWithRebate() { // Market with zero rates, domestic and foreign final BlackFunctionData zeroRatesMarket = new BlackFunctionData(SPOT, 1.0, VOLATILITY); final double rateDomestic = 0.0; final double rateForeign = 0.0; final double costOfCarry = rateDomestic - rateForeign; // Rebate final double pxRebate = REBATE; // 2 - Vanillas - Call and Put final Function1D<BlackFunctionData, Double> fcnVanillaCall = BLACK_FUNCTION.getPriceFunction(VANILLA_CALL_K100); final double pxVanillaCall = fcnVanillaCall.evaluate(zeroRatesMarket); final Function1D<BlackFunctionData, Double> fcnVanillaPut = BLACK_FUNCTION.getPriceFunction(VANILLA_PUT_K100); final double pxVanillaPut = fcnVanillaPut.evaluate(zeroRatesMarket); // Barriers: Up and Down, Call and Put, In and Out final double pxDownInCall = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_DOWN_IN, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); final double pxDownOutCall = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_DOWN_OUT, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); assertEquals( "Knock In-Out Parity fails", 1.0, (pxVanillaCall + pxRebate) / (pxDownInCall + pxDownOutCall), 1.e-6); // assertTrue("Knock In-Out Parity fails", Math.abs((pxVanillaCall + pxRebate) / (pxDownInCall + // pxDownOutCall) - 1) < 1.e-6); final double pxDownInPut = BARRIER_FUNCTION.getPrice( VANILLA_PUT_K100, BARRIER_DOWN_IN, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); final double pxDownOutPut = BARRIER_FUNCTION.getPrice( VANILLA_PUT_K100, BARRIER_DOWN_OUT, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); assertTrue( "Knock In-Out Parity fails", Math.abs((pxVanillaPut + pxRebate) / (pxDownInPut + pxDownOutPut) - 1) < 1.e-6); final double pxUpInCall = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_UP_IN, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); final double pxUpOutCall = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, BARRIER_UP_OUT, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); assertTrue( "Knock In-Out Parity fails", Math.abs((pxVanillaCall + pxRebate) / (pxUpInCall + pxUpOutCall) - 1) < 1.e-6); final double pxUpInPut = BARRIER_FUNCTION.getPrice( VANILLA_PUT_K100, BARRIER_UP_IN, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); final double pxUpOutPut = BARRIER_FUNCTION.getPrice( VANILLA_PUT_K100, BARRIER_UP_OUT, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); assertTrue( "Knock In-Out Parity fails", Math.abs((pxVanillaPut + pxRebate) / (pxUpInPut + pxUpOutPut) - 1) < 1.e-6); // Let's try the Up case with Barrier < Strike. To do this, I create a new vanilla with K120 (> // Barrier110) final Function1D<BlackFunctionData, Double> fcnVanillaPutHiK = BLACK_FUNCTION.getPriceFunction(VANILLA_PUT_KHI); final double pxVanillaPutHiK = fcnVanillaPutHiK.evaluate(zeroRatesMarket); final double pxUpInPutHiK = BARRIER_FUNCTION.getPrice( VANILLA_PUT_KHI, BARRIER_UP_IN, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); final double pxUpOutPutHiK = BARRIER_FUNCTION.getPrice( VANILLA_PUT_KHI, BARRIER_UP_OUT, REBATE, SPOT, costOfCarry, rateDomestic, VOLATILITY); assertTrue( "Knock In-Out Parity fails", Math.abs((pxVanillaPutHiK + pxRebate) / (pxUpInPutHiK + pxUpOutPutHiK) - 1) < 1.e-6); }
@Test /** * Tests the 'In-Out Parity' condition: The price of a Knock-In plus a Knock-Out of arbitrary * barrier level must equal that of the underlying vanilla option + value of the rebate */ public void impossibleToHitBarrierIsVanilla() { final Barrier veryLowKnockIn = new Barrier(KnockType.IN, BarrierType.DOWN, ObservationType.CONTINUOUS, 1e-6); final Barrier veryLowKnockOut = new Barrier(KnockType.OUT, BarrierType.DOWN, ObservationType.CONTINUOUS, 1e-6); final Barrier veryHighKnockIn = new Barrier(KnockType.IN, BarrierType.UP, ObservationType.CONTINUOUS, 1e6); final Barrier veryHighKnockOut = new Barrier(KnockType.OUT, BarrierType.UP, ObservationType.CONTINUOUS, 1e6); final double pxRebate = DF_DOM * REBATE; final Function1D<BlackFunctionData, Double> fcnVanillaCall = BLACK_FUNCTION.getPriceFunction(VANILLA_CALL_K100); final double pxVanillaCall = fcnVanillaCall.evaluate(DATA_BLACK); // KnockIn's with impossible to reach barrier's are guaranteed to pay the rebate at maturity final double pxDownInPut = BARRIER_FUNCTION.getPrice( VANILLA_PUT_K100, veryLowKnockIn, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY); assertTrue("VeryLowKnockInBarrier doesn't match rebate", pxDownInPut / pxRebate - 1 < 1e-6); final double pxDownInCall = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, veryLowKnockIn, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY); assertTrue("VeryLowKnockInBarrier doesn't match rebate", pxDownInCall / pxRebate - 1 < 1e-6); final double pxUpInCall = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, veryHighKnockIn, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY); assertTrue("VeryHighKnockInBarrier doesn't match rebate", pxUpInCall / pxRebate - 1 < 1e-6); // KnockOut's with impossible to reach barrier's are guaranteed to pay the value of the // underlying vanilla final double pxDownOutCall = BARRIER_FUNCTION.getPrice( VANILLA_CALL_K100, veryLowKnockOut, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY); assertTrue( "VeryLowKnockInBarrier doesn't match rebate", Math.abs(pxDownOutCall / pxVanillaCall - 1) < 1e-6); // Derivatives final double[] derivs = new double[5]; BARRIER_FUNCTION.getPriceAdjoint( VANILLA_CALL_K100, veryLowKnockIn, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY, derivs); assertTrue( "Impossible KnockIn: rate sens is incorrect", derivs[2] / Math.abs((-1 * EXPIRY_TIME * DF_DOM * REBATE) - 1) < 1e-6); assertEquals( "Impossible KnockIn: Encountered derivative, other than d/dr, != 0", 0.0, derivs[0] + derivs[1] + derivs[3] + derivs[4], 1.0e-6); BARRIER_FUNCTION.getPriceAdjoint( VANILLA_CALL_K100, veryHighKnockIn, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY, derivs); assertTrue( "Impossible KnockIn: rate sens is incorrect", derivs[2] / Math.abs((-1 * EXPIRY_TIME * DF_DOM * REBATE) - 1) < 1e-6); assertEquals( "Impossible KnockIn: Encountered derivative, other than d/dr, != 0", 0.0, derivs[0] + derivs[1] + derivs[3] + derivs[4], 1.0e-6); // Barrier: [0] spot, [1] strike, [2] rate, [3] cost-of-carry, [4] volatility. BARRIER_FUNCTION.getPriceAdjoint( VANILLA_CALL_K100, veryLowKnockOut, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY, derivs); // Vanilla: [0] the price, [1] the derivative with respect to the forward, [2] the derivative // with respect to the volatility and [3] the derivative with respect to the strike. final double[] vanillaDerivs = BLACK_FUNCTION.getPriceAdjoint(VANILLA_CALL_K100, DATA_BLACK); assertEquals( "Impossible KnockOut: Vega doesn't match vanilla", vanillaDerivs[2], derivs[4], 1e-6); assertEquals( "Impossible KnockOut: Dual Delta (d/dK) doesn't match vanilla", vanillaDerivs[3], derivs[1], 1e-6); assertEquals( "Impossible KnockOut: Delta doesn't match vanilla", vanillaDerivs[1] * DF_FOR / DF_DOM, derivs[0], 1e-6); BARRIER_FUNCTION.getPriceAdjoint( VANILLA_CALL_K100, veryHighKnockOut, REBATE, SPOT, COST_OF_CARRY, RATE_DOM, VOLATILITY, derivs); assertEquals( "Impossible KnockOut: Vega doesn't match vanilla", vanillaDerivs[2], derivs[4], 1e-6); assertEquals( "Impossible KnockOut: Dual Delta (d/dK) doesn't match vanilla", vanillaDerivs[3], derivs[1], 1e-6); assertEquals( "Impossible KnockOut: Delta doesn't match vanilla", vanillaDerivs[1] * DF_FOR / DF_DOM, derivs[0], 1e-6); }
@SuppressWarnings("synthetic-access") @Override public Double visitStrike(final BlackVolatilitySurfaceStrike surface) { final double atmVol = surface.getVolatility(_t, _f); if (_t < 1e-4) { return atmVol * atmVol; } final double rootT = Math.sqrt(_t); final double invNorTol = NORMAL.getInverseCDF(_tol); final Function1D<Double, Double> integrand = getStrikeIntegrand(surface); final Function1D<Double, Double> remainder = new Function1D<Double, Double>() { @Override public Double evaluate(final Double strike) { if (strike == 0) { return 0.0; } final boolean isCall = strike >= _f; final double vol = surface.getVolatility(_t, strike); final double otmPrice = BlackFormulaRepository.price(_f, strike, _t, vol, isCall); final double res = (isCall ? otmPrice / strike : otmPrice / 2 / strike); return res; } }; double putPart; if (_addResidual) { putPart = _integrator.integrate(integrand, _lowStrikeCutoff, _f); putPart += _residual; } else { double l = _f * Math.exp(invNorTol * atmVol * rootT); // initial estimate of lower limit putPart = _integrator.integrate(integrand, l, _f); double rem = remainder.evaluate(l); double error = rem / putPart; while (error > _tol) { l /= 2.0; putPart += _integrator.integrate(integrand, l, 2 * l); rem = remainder.evaluate(l); error = rem / putPart; } putPart += rem; // add on the (very small) remainder estimate otherwise we'll always underestimate // variance } double u = _f * Math.exp(-invNorTol * atmVol * rootT); // initial estimate of upper limit double callPart = _integrator.integrate(integrand, _f, u); double rem = remainder.evaluate(u); double error = rem / callPart; while (error > _tol) { callPart += _integrator.integrate(integrand, u, 2 * u); u *= 2.0; rem = remainder.evaluate(u); error = rem / putPart; } // callPart += rem/2.0; // don't add on the remainder estimate as it is very conservative, and likely too large return 2 * (putPart + callPart) / _t; }
/** Check trivial extrapolation is recovered for Benaim-Dodgson-Kainth extrapolation */ @Test public void functionRecoveryBDKExtrapolationTest() { double forward = 1.0; double expiry = 3.0; int nSamples = 4; double[] strikes = new double[nSamples]; double[] vols = new double[nSamples]; final double mu = 1.0; final double a = -1.0; final double b = 0.0; final double c = 0.0; // Expected left extrapolation Function1D<Double, Double> left = new Function1D<Double, Double>() { @Override public Double evaluate(Double strike) { return Math.pow(strike, mu) * Math.exp(a + b * strike + c * strike * strike); } }; // Expected right extrapolation Function1D<Double, Double> right = new Function1D<Double, Double>() { @Override public Double evaluate(Double strike) { return Math.pow(strike, -mu) * Math.exp(a + b / strike + c / strike / strike); } }; for (int i = 0; i < nSamples; ++i) { double strike = forward * (0.75 + 0.05 * i); double price = left.evaluate(strike); double vol = BlackFormulaRepository.impliedVolatility(price, forward, strike, expiry, false); strikes[i] = strike; vols[i] = vol; } SmileExtrapolationFunctionSABRProvider extrapBDK = new BenaimDodgsonKainthExtrapolationFunctionProvider(mu, mu); SmileInterpolatorSABRWithExtrapolation interpBDK = new SmileInterpolatorSABRWithExtrapolation( new SABRBerestyckiVolatilityFunction(), extrapBDK); InterpolatedSmileFunction funcBDK = new InterpolatedSmileFunction(interpBDK, forward, strikes, expiry, vols); double[] keys = new double[] {forward * 0.1, forward * 0.5, forward * 0.66}; for (int i = 0; i < keys.length; ++i) { double vol = funcBDK.getVolatility(keys[i]); double price = BlackFormulaRepository.price(forward, keys[i], expiry, vol, false); assertEquals(left.evaluate(keys[i]), price, 1.e-2); } for (int i = 0; i < nSamples; ++i) { double strike = forward * (1.1 + 0.05 * i); double price = right.evaluate(strike); double vol = BlackFormulaRepository.impliedVolatility(price, forward, strike, expiry, true); strikes[i] = strike; vols[i] = vol; } extrapBDK = new BenaimDodgsonKainthExtrapolationFunctionProvider(mu, mu); interpBDK = new SmileInterpolatorSABRWithExtrapolation(extrapBDK); funcBDK = new InterpolatedSmileFunction(interpBDK, forward, strikes, expiry, vols); keys = new double[] {forward * 1.31, forward * 1.5, forward * 2.61, forward * 15.0}; for (int i = 0; i < keys.length; ++i) { double vol = funcBDK.getVolatility(keys[i]); double price = BlackFormulaRepository.price(forward, keys[i], expiry, vol, true); assertEquals(right.evaluate(keys[i]), price, 1.e-2); } }
@Override public Double evaluate(final Double lambda) { final DoubleMatrix1D x = (DoubleMatrix1D) OG_ALGEBRA.add(_x0, OG_ALGEBRA.scale(_p, lambda)); return _f.evaluate(x); }