Ejemplo n.º 1
0
  @Override
  public double empiricalLoss(
      final org.drip.function.definition.R1ToR1 funcLearnerR1ToR1,
      final org.drip.spaces.instance.GeneralizedValidatedVector gvviX,
      final org.drip.spaces.instance.GeneralizedValidatedVector gvviY)
      throws java.lang.Exception {
    if (null == funcLearnerR1ToR1
        || null == gvviX
        || !(gvviX instanceof org.drip.spaces.instance.ValidatedR1)
        || null == gvviY
        || !(gvviY instanceof org.drip.spaces.instance.ValidatedR1))
      throw new java.lang.Exception("LpLossLearner::empiricalLoss => Invalid Inputs");

    double[] adblX = ((org.drip.spaces.instance.ValidatedR1) gvviX).instance();

    double[] adblY = ((org.drip.spaces.instance.ValidatedR1) gvviY).instance();

    double dblEmpiricalLoss = 0.;
    int iNumSample = adblX.length;

    if (iNumSample != adblY.length)
      throw new java.lang.Exception("LpLossLearner::empiricalLoss => Invalid Inputs");

    for (int i = 0; i < iNumSample; ++i)
      dblEmpiricalLoss +=
          java.lang.Math.pow(
              java.lang.Math.abs(funcLearnerR1ToR1.evaluate(adblX[i]) - adblY[i]),
              _dblLossExponent);

    return dblEmpiricalLoss / _dblLossExponent;
  }
Ejemplo n.º 2
0
  @Override
  public double empiricalRisk(
      final org.drip.measure.continuous.R1R1 distR1R1,
      final org.drip.function.definition.R1ToR1 funcLearnerR1ToR1,
      final org.drip.spaces.instance.GeneralizedValidatedVector gvviX,
      final org.drip.spaces.instance.GeneralizedValidatedVector gvviY)
      throws java.lang.Exception {
    if (null == distR1R1
        || null == funcLearnerR1ToR1
        || null == gvviX
        || !(gvviX instanceof org.drip.spaces.instance.ValidatedR1)
        || null == gvviY
        || !(gvviY instanceof org.drip.spaces.instance.ValidatedR1))
      throw new java.lang.Exception("LpLossLearner::empiricalRisk => Invalid Inputs");

    double[] adblX = ((org.drip.spaces.instance.ValidatedR1) gvviX).instance();

    double[] adblY = ((org.drip.spaces.instance.ValidatedR1) gvviY).instance();

    double dblNormalizer = 0.;
    double dblEmpiricalLoss = 0.;
    int iNumSample = adblX.length;

    if (iNumSample != adblY.length)
      throw new java.lang.Exception("LpLossLearner::empiricalRisk => Invalid Inputs");

    for (int i = 0; i < iNumSample; ++i) {
      double dblDensity = distR1R1.density(adblX[i], adblY[i]);

      dblNormalizer += dblDensity;

      dblEmpiricalLoss +=
          dblDensity
              * java.lang.Math.pow(
                  java.lang.Math.abs(funcLearnerR1ToR1.evaluate(adblX[i]) - adblY[i]),
                  _dblLossExponent);
    }

    return dblEmpiricalLoss / _dblLossExponent / dblNormalizer;
  }