コード例 #1
0
ファイル: Decorate.java プロジェクト: paolopavan/cfr
  /**
   * Compute and store statistics required for generating artificial data.
   *
   * @param data training instances
   * @exception Exception if statistics could not be calculated successfully
   */
  protected void computeStats(Instances data) throws Exception {
    int numAttributes = data.numAttributes();
    m_AttributeStats = new Vector(numAttributes); // use to map attributes to their stats

    for (int j = 0; j < numAttributes; j++) {
      if (data.attribute(j).isNominal()) {
        // Compute the probability of occurence of each distinct value
        int[] nomCounts = (data.attributeStats(j)).nominalCounts;
        double[] counts = new double[nomCounts.length];
        if (counts.length < 2)
          throw new Exception("Nominal attribute has less than two distinct values!");
        // Perform Laplace smoothing
        for (int i = 0; i < counts.length; i++) counts[i] = nomCounts[i] + 1;
        Utils.normalize(counts);
        double[] stats = new double[counts.length - 1];
        stats[0] = counts[0];
        // Calculate cumulative probabilities
        for (int i = 1; i < stats.length; i++) stats[i] = stats[i - 1] + counts[i];
        m_AttributeStats.add(j, stats);
      } else if (data.attribute(j).isNumeric()) {
        // Get mean and standard deviation from the training data
        double[] stats = new double[2];
        stats[0] = data.meanOrMode(j);
        stats[1] = Math.sqrt(data.variance(j));
        m_AttributeStats.add(j, stats);
      } else System.err.println("Decorate can only handle numeric and nominal values.");
    }
  }
コード例 #2
0
  /**
   * Prints out the classifier.
   *
   * @return a description of the classifier as a string
   */
  public String toString() {
    StringBuffer text = new StringBuffer();
    text.append("SMOreg\n\n");
    if (m_weights != null) {
      text.append("weights (not support vectors):\n");
      // it's a linear machine
      for (int i = 0; i < m_data.numAttributes(); i++) {
        if (i != m_classIndex) {
          text.append(
              (m_weights[i] >= 0 ? " + " : " - ")
                  + Utils.doubleToString(Math.abs(m_weights[i]), 12, 4)
                  + " * ");
          if (m_SVM.getFilterType().getSelectedTag().getID() == SMOreg.FILTER_STANDARDIZE) {
            text.append("(standardized) ");
          } else if (m_SVM.getFilterType().getSelectedTag().getID() == SMOreg.FILTER_NORMALIZE) {
            text.append("(normalized) ");
          }
          text.append(m_data.attribute(i).name() + "\n");
        }
      }
    } else {
      // non linear, print out all supportvectors
      text.append("Support vectors:\n");
      for (int i = 0; i < m_nInstances; i++) {
        if (m_alpha[i] > 0) {
          text.append("+" + m_alpha[i] + " * k[" + i + "]\n");
        }
        if (m_alphaStar[i] > 0) {
          text.append("-" + m_alphaStar[i] + " * k[" + i + "]\n");
        }
      }
    }

    text.append((m_b <= 0 ? " + " : " - ") + Utils.doubleToString(Math.abs(m_b), 12, 4) + "\n\n");

    text.append("\n\nNumber of kernel evaluations: " + m_nEvals);
    if (m_nCacheHits >= 0 && m_nEvals > 0) {
      double hitRatio = 1 - m_nEvals * 1.0 / (m_nCacheHits + m_nEvals);
      text.append(" (" + Utils.doubleToString(hitRatio * 100, 7, 3).trim() + "% cached)");
    }

    return text.toString();
  }
コード例 #3
0
ファイル: FarthestFirst.java プロジェクト: bigbigbug/wekax
  /**
   * Calculates the distance between two instances
   *
   * @param test the first instance
   * @param train the second instance
   * @return the distance between the two given instances, between 0 and 1
   */
  protected double distance(Instance first, Instance second) {

    double distance = 0;
    int firstI, secondI;

    for (int p1 = 0, p2 = 0; p1 < first.numValues() || p2 < second.numValues(); ) {
      if (p1 >= first.numValues()) {
        firstI = m_instances.numAttributes();
      } else {
        firstI = first.index(p1);
      }
      if (p2 >= second.numValues()) {
        secondI = m_instances.numAttributes();
      } else {
        secondI = second.index(p2);
      }
      if (firstI == m_instances.classIndex()) {
        p1++;
        continue;
      }
      if (secondI == m_instances.classIndex()) {
        p2++;
        continue;
      }
      double diff;
      if (firstI == secondI) {
        diff = difference(firstI, first.valueSparse(p1), second.valueSparse(p2));
        p1++;
        p2++;
      } else if (firstI > secondI) {
        diff = difference(secondI, 0, second.valueSparse(p2));
        p2++;
      } else {
        diff = difference(firstI, first.valueSparse(p1), 0);
        p1++;
      }
      distance += diff * diff;
    }

    return Math.sqrt(distance / m_instances.numAttributes());
  }
コード例 #4
0
  /**
   * evaluates a subset of attributes
   *
   * @param subset a bitset representing the attribute subset to be evaluated
   * @return the merit
   * @throws Exception if the subset could not be evaluated
   */
  public double evaluateSubset(BitSet subset, int decAttr) throws Exception {
    // need to set c_divisor appropriately
    double eval = 0.0;

    // This needs to be performed by the search method itself
    /*if (computeCore) {
        if (core==null) {
            core = computeCore();
        }
        subset.or(core);
    }*/

    if (subset.cardinality() > 0) {
      try {
        eval = m_FuzzyMeasure.calculate(subset, decAttr);
      } catch (Exception e) {
        System.err.println(e);
      }

      eval = Math.min(1, eval);
    }

    return eval;
  }
コード例 #5
0
ファイル: RankSearch.java プロジェクト: alishakiba/jDenetX
  /**
   * returns a description of the search as a String
   *
   * @return a description of the search
   */
  public String toString() {
    StringBuffer text = new StringBuffer();
    text.append("\tRankSearch :\n");
    text.append("\tAttribute evaluator : " + getAttributeEvaluator().getClass().getName() + " ");
    if (m_ASEval instanceof OptionHandler) {
      String[] evaluatorOptions = new String[0];
      evaluatorOptions = ((OptionHandler) m_ASEval).getOptions();
      for (int i = 0; i < evaluatorOptions.length; i++) {
        text.append(evaluatorOptions[i] + ' ');
      }
    }
    text.append("\n");
    text.append("\tAttribute ranking : \n");
    int rlength = (int) (Math.log(m_Ranking.length) / Math.log(10) + 1);
    for (int i = 0; i < m_Ranking.length; i++) {
      text.append(
          "\t "
              + Utils.doubleToString((double) (m_Ranking[i] + 1), rlength, 0)
              + " "
              + m_Instances.attribute(m_Ranking[i]).name()
              + '\n');
    }
    text.append("\tMerit of best subset found : ");
    int fieldwidth = 3;
    double precision = (m_bestMerit - (int) m_bestMerit);
    if (Math.abs(m_bestMerit) > 0) {
      fieldwidth = (int) Math.abs((Math.log(Math.abs(m_bestMerit)) / Math.log(10))) + 2;
    }
    if (Math.abs(precision) > 0) {
      precision = Math.abs((Math.log(Math.abs(precision)) / Math.log(10))) + 3;
    } else {
      precision = 2;
    }

    text.append(
        Utils.doubleToString(Math.abs(m_bestMerit), fieldwidth + (int) precision, (int) precision)
            + "\n");
    return text.toString();
  }
コード例 #6
0
ファイル: Decorate.java プロジェクト: paolopavan/cfr
  /**
   * Build Decorate classifier
   *
   * @param data the training data to be used for generating the classifier
   * @exception Exception if the classifier could not be built successfully
   */
  public void buildClassifier(Instances data) throws Exception {
    if (m_Classifier == null) {
      throw new Exception("A base classifier has not been specified!");
    }
    if (data.checkForStringAttributes()) {
      throw new UnsupportedAttributeTypeException("Cannot handle string attributes!");
    }
    if (data.classAttribute().isNumeric()) {
      throw new UnsupportedClassTypeException("Decorate can't handle a numeric class!");
    }
    if (m_NumIterations < m_DesiredSize)
      throw new Exception("Max number of iterations must be >= desired ensemble size!");

    // initialize random number generator
    if (m_Seed == -1) m_Random = new Random();
    else m_Random = new Random(m_Seed);

    int i = 1; // current committee size
    int numTrials = 1; // number of Decorate iterations
    Instances divData = new Instances(data); // local copy of data - diversity data
    divData.deleteWithMissingClass();
    Instances artData = null; // artificial data

    // compute number of artficial instances to add at each iteration
    int artSize = (int) (Math.abs(m_ArtSize) * divData.numInstances());
    if (artSize == 0) artSize = 1; // atleast add one random example
    computeStats(data); // Compute training data stats for creating artificial examples

    // initialize new committee
    m_Committee = new Vector();
    Classifier newClassifier = m_Classifier;
    newClassifier.buildClassifier(divData);
    m_Committee.add(newClassifier);
    double eComm = computeError(divData); // compute ensemble error
    if (m_Debug)
      System.out.println(
          "Initialize:\tClassifier " + i + " added to ensemble. Ensemble error = " + eComm);

    // repeat till desired committee size is reached OR the max number of iterations is exceeded
    while (i < m_DesiredSize && numTrials < m_NumIterations) {
      // Generate artificial training examples
      artData = generateArtificialData(artSize, data);

      // Label artificial examples
      labelData(artData);
      addInstances(divData, artData); // Add new artificial data

      // Build new classifier
      Classifier tmp[] = Classifier.makeCopies(m_Classifier, 1);
      newClassifier = tmp[0];
      newClassifier.buildClassifier(divData);
      // Remove all the artificial data
      removeInstances(divData, artSize);

      // Test if the new classifier should be added to the ensemble
      m_Committee.add(newClassifier); // add new classifier to current committee
      double currError = computeError(divData);
      if (currError <= eComm) { // adding the new member did not increase the error
        i++;
        eComm = currError;
        if (m_Debug)
          System.out.println(
              "Iteration: "
                  + (1 + numTrials)
                  + "\tClassifier "
                  + i
                  + " added to ensemble. Ensemble error = "
                  + eComm);
      } else { // reject the current classifier because it increased the ensemble error
        m_Committee.removeElementAt(m_Committee.size() - 1); // pop the last member
      }
      numTrials++;
    }
  }
コード例 #7
0
  /**
   * Returns the best cut of a graph w.r.t. the degree of dissimilarity between points of different
   * partitions and the degree of similarity between points of the same partition.
   *
   * @param W the weight matrix of the graph
   * @return an array of two elements, each of these contains the points of a partition
   */
  protected static int[][] bestCut(DoubleMatrix2D W) {
    int n = W.columns();
    // Builds the diagonal matrices D and D^(-1/2) (represented as their diagonals)
    DoubleMatrix1D d = DoubleFactory1D.dense.make(n);
    DoubleMatrix1D d_minus_1_2 = DoubleFactory1D.dense.make(n);
    for (int i = 0; i < n; i++) {
      double d_i = W.viewRow(i).zSum();
      d.set(i, d_i);
      d_minus_1_2.set(i, 1 / Math.sqrt(d_i));
    }
    DoubleMatrix2D D = DoubleFactory2D.sparse.diagonal(d);

    // System.out.println("DoubleMatrix2D :\n"+D.toString());

    DoubleMatrix2D X = D.copy();

    // System.out.println("DoubleMatrix2D copy :\n"+X.toString());

    // X = D^(-1/2) * (D - W) * D^(-1/2)
    X.assign(W, Functions.minus);
    // System.out.println("DoubleMatrix2D X: (D-W) :\n"+X.toString());
    for (int i = 0; i < n; i++)
      for (int j = 0; j < n; j++)
        X.set(i, j, X.get(i, j) * d_minus_1_2.get(i) * d_minus_1_2.get(j));

    // Computes the eigenvalues and the eigenvectors of X
    EigenvalueDecomposition e = new EigenvalueDecomposition(X);
    DoubleMatrix1D lambda = e.getRealEigenvalues();

    // Selects the eigenvector z_2 associated with the second smallest eigenvalue
    // Creates a map that contains the pairs <index, eigenvalue>
    AbstractIntDoubleMap map = new OpenIntDoubleHashMap(n);
    for (int i = 0; i < n; i++) map.put(i, Math.abs(lambda.get(i)));
    IntArrayList list = new IntArrayList();
    // Sorts the map on the value
    map.keysSortedByValue(list);
    // Gets the index of the second smallest element
    int i_2 = list.get(1);

    // y_2 = D^(-1/2) * z_2
    DoubleMatrix1D y_2 = e.getV().viewColumn(i_2).copy();
    y_2.assign(d_minus_1_2, Functions.mult);

    // Creates a map that contains the pairs <i, y_2[i]>
    map.clear();
    for (int i = 0; i < n; i++) map.put(i, y_2.get(i));
    // Sorts the map on the value
    map.keysSortedByValue(list);
    // Search the element in the map previuosly ordered that minimizes the cut
    // of the partition
    double best_cut = Double.POSITIVE_INFINITY;
    int[][] partition = new int[2][];

    // The array v contains all the elements of the graph ordered by their
    // projection on vector y_2
    int[] v = list.elements();
    // For each admissible splitting point i
    for (int i = 1; i < n; i++) {
      // The array a contains all the elements that have a projection on vector
      // y_2 less or equal to the one of i-th element
      // The array b contains the remaining elements
      int[] a = new int[i];
      int[] b = new int[n - i];
      System.arraycopy(v, 0, a, 0, i);
      System.arraycopy(v, i, b, 0, n - i);
      double cut = Ncut(W, a, b, v);
      if (cut < best_cut) {
        best_cut = cut;
        partition[0] = a;
        partition[1] = b;
      }
    }

    // System.out.println("Partition:");
    // UtilsJS.printMatrix(partition);

    return partition;
  }
コード例 #8
0
ファイル: LWL.java プロジェクト: alishakiba/jDenetX
  /**
   * Calculates the class membership probabilities for the given test instance.
   *
   * @param instance the instance to be classified
   * @return preedicted class probability distribution
   * @throws Exception if distribution can't be computed successfully
   */
  public double[] distributionForInstance(Instance instance) throws Exception {

    // default model?
    if (m_ZeroR != null) {
      return m_ZeroR.distributionForInstance(instance);
    }

    if (m_Train.numInstances() == 0) {
      throw new Exception("No training instances!");
    }

    m_NNSearch.addInstanceInfo(instance);

    int k = m_Train.numInstances();
    if ((!m_UseAllK && (m_kNN < k)) /*&&
       !(m_WeightKernel==INVERSE ||
         m_WeightKernel==GAUSS)*/) {
      k = m_kNN;
    }

    Instances neighbours = m_NNSearch.kNearestNeighbours(instance, k);
    double distances[] = m_NNSearch.getDistances();

    if (m_Debug) {
      System.out.println("Test Instance: " + instance);
      System.out.println(
          "For "
              + k
              + " kept "
              + neighbours.numInstances()
              + " out of "
              + m_Train.numInstances()
              + " instances.");
    }

    // IF LinearNN has skipped so much that <k neighbours are remaining.
    if (k > distances.length) k = distances.length;

    if (m_Debug) {
      System.out.println("Instance Distances");
      for (int i = 0; i < distances.length; i++) {
        System.out.println("" + distances[i]);
      }
    }

    // Determine the bandwidth
    double bandwidth = distances[k - 1];

    // Check for bandwidth zero
    if (bandwidth <= 0) {
      // if the kth distance is zero than give all instances the same weight
      for (int i = 0; i < distances.length; i++) distances[i] = 1;
    } else {
      // Rescale the distances by the bandwidth
      for (int i = 0; i < distances.length; i++) distances[i] = distances[i] / bandwidth;
    }

    // Pass the distances through a weighting kernel
    for (int i = 0; i < distances.length; i++) {
      switch (m_WeightKernel) {
        case LINEAR:
          distances[i] = 1.0001 - distances[i];
          break;
        case EPANECHNIKOV:
          distances[i] = 3 / 4D * (1.0001 - distances[i] * distances[i]);
          break;
        case TRICUBE:
          distances[i] = Math.pow((1.0001 - Math.pow(distances[i], 3)), 3);
          break;
        case CONSTANT:
          // System.err.println("using constant kernel");
          distances[i] = 1;
          break;
        case INVERSE:
          distances[i] = 1.0 / (1.0 + distances[i]);
          break;
        case GAUSS:
          distances[i] = Math.exp(-distances[i] * distances[i]);
          break;
      }
    }

    if (m_Debug) {
      System.out.println("Instance Weights");
      for (int i = 0; i < distances.length; i++) {
        System.out.println("" + distances[i]);
      }
    }

    // Set the weights on the training data
    double sumOfWeights = 0, newSumOfWeights = 0;
    for (int i = 0; i < distances.length; i++) {
      double weight = distances[i];
      Instance inst = (Instance) neighbours.instance(i);
      sumOfWeights += inst.weight();
      newSumOfWeights += inst.weight() * weight;
      inst.setWeight(inst.weight() * weight);
      // weightedTrain.add(newInst);
    }

    // Rescale weights
    for (int i = 0; i < neighbours.numInstances(); i++) {
      Instance inst = neighbours.instance(i);
      inst.setWeight(inst.weight() * sumOfWeights / newSumOfWeights);
    }

    // Create a weighted classifier
    m_Classifier.buildClassifier(neighbours);

    if (m_Debug) {
      System.out.println("Classifying test instance: " + instance);
      System.out.println("Built base classifier:\n" + m_Classifier.toString());
    }

    // Return the classifier's predictions
    return m_Classifier.distributionForInstance(instance);
  }
コード例 #9
0
  /** if clusterIdx is -1, all instances are used (a single metric for all clusters is used) */
  public boolean trainMetric(int clusterIdx) throws Exception {
    Init(clusterIdx);

    double[] weights = new double[m_numAttributes];
    int violatedConstraints = 0;
    int numInstances = 0;

    for (int instIdx = 0; instIdx < m_instances.numInstances(); instIdx++) {
      int assignment = m_clusterAssignments[instIdx];

      // only instances assigned to this cluster are of importance
      if (assignment == clusterIdx || clusterIdx == -1) {
        numInstances++;
        if (clusterIdx < 0) {
          m_centroid = m_kmeans.getClusterCentroids().instance(assignment);
        }

        // accumulate variance
        Instance instance = m_instances.instance(instIdx);
        Instance diffInstance = m_metric.createDiffInstance(instance, m_centroid);
        for (int attr = 0; attr < m_numAttributes; attr++) {
          weights[attr] += diffInstance.value(attr);
        }

        // check all constraints for this instance
        Object list = m_instanceConstraintMap.get(new Integer(instIdx));
        if (list != null) { // there are constraints associated with this instance
          ArrayList constraintList = (ArrayList) list;
          for (int i = 0; i < constraintList.size(); i++) {
            InstancePair pair = (InstancePair) constraintList.get(i);
            int linkType = pair.linkType;
            int firstIdx = pair.first;
            int secondIdx = pair.second;
            Instance instance1 = m_instances.instance(firstIdx);
            Instance instance2 = m_instances.instance(secondIdx);
            int otherIdx =
                (firstIdx == instIdx)
                    ? m_clusterAssignments[secondIdx]
                    : m_clusterAssignments[firstIdx];

            if (otherIdx != -1) { // check whether the constraint is violated
              if (otherIdx != assignment && linkType == InstancePair.MUST_LINK) {
                diffInstance = m_metric.createDiffInstance(instance1, instance2);
                for (int attr = 0; attr < m_numAttributes; attr++) {
                  weights[attr] += 0.5 * m_MLweight * diffInstance.value(attr);
                }
              } else if (otherIdx == assignment && linkType == InstancePair.CANNOT_LINK) {
                diffInstance = m_metric.createDiffInstance(instance1, instance2);
                for (int attr = 0; attr < m_numAttributes; attr++) {
                  // this constraint will be counted twice, hence 0.5
                  weights[attr] += 0.5 * m_CLweight * m_maxCLDiffInstance.value(attr);
                  weights[attr] -= 0.5 * m_CLweight * diffInstance.value(attr);
                }
              }
            }
          }
        }
      }
    }
    //      System.out.println("Updating cluster " + clusterIdx
    //  		       + " containing " + numInstances);

    // check the weights
    double[] newWeights = new double[m_numAttributes];
    double[] currentWeights = m_metric.getWeights();

    boolean needNewtonRaphson = false;
    for (int attr = 0; attr < m_numAttributes; attr++) {
      if (weights[attr] <= 0) { // check to avoid divide by 0 - TODO!
        System.out.println(
            "Negative weight "
                + weights[attr]
                + " for clusterIdx="
                + clusterIdx
                + "; using prev value="
                + currentWeights[attr]);
        newWeights[attr] = currentWeights[attr];
        //  	needNewtonRaphson = true;
        //	break;
      } else {
        if (m_regularize) { // solution of quadratic equation - TODO!
          int n = m_instances.numInstances();
          double ratio = (m_logTermWeight * n) / (2 * weights[attr]);
          newWeights[attr] =
              ratio + Math.sqrt(ratio * ratio + (m_regularizerTermWeight * n) / weights[attr]);
        } else {
          newWeights[attr] = m_logTermWeight * numInstances / weights[attr];
        }
      }
    }

    // do NR if needed
    if (needNewtonRaphson) {
      System.out.println("GOING TO NEWTON-RAPHSON!!!\n");
      newWeights = updateWeightsUsingNewtonRaphson(currentWeights, weights);
    }

    // PRINT routine
    //      System.out.println("Total constraints violated: " + violatedConstraints/2 + "; weights
    // are:");
    //      for (int attr=0; attr<numAttributes; attr++) {
    //        System.out.print(newWeights[attr] + "\t");
    //      }
    //      System.out.println();
    // end PRINT routine

    m_metric.setWeights(newWeights);
    return true;
  }
コード例 #10
0
ファイル: RegSMO.java プロジェクト: alishakiba/jDenetX
  /**
   * finds alpha and alpha* parameters that optimize the SVM target function
   *
   * @throws Exception
   */
  public void optimize() throws Exception {

    //	main routine:
    //		initialize threshold to zero
    //		numChanged = 0
    //		examineAll = 1
    //		SigFig = -100
    //		LoopCounter = 0
    int numChanged = 0;
    int examineAll = 1;
    int sigFig = -100;
    int loopCounter = 0;
    //		while ((numChanged > 0 | examineAll) | (SigFig < 3))
    while ((numChanged > 0 || (examineAll > 0)) | (sigFig < 3)) {
      //			LoopCounter++
      //			numChanged = 0;
      loopCounter++;
      numChanged = 0;
      //			if (examineAll)
      //				loop I over all training examples
      //				numChanged += examineExample(I)
      //			else
      //				loop I over examples where alpha is not 0 & not C
      //				numChanged += examineExample(I)
      //			endif
      int numSamples = 0;
      if (examineAll > 0) {
        for (int i = 0; i < m_nInstances; i++) {
          numChanged += examineExample(i);
        }
      } else {
        for (int i = 0; i < m_target.length; i++) {
          if ((m_alpha[i] > 0 && m_alpha[i] < m_C * m_data.instance(i).weight())
              || (m_alphaStar[i] > 0 && m_alphaStar[i] < m_C * m_data.instance(i).weight())) {
            numSamples++;
            numChanged += examineExample(i);
          }
        }
      }
      //
      //		if (mod(LoopCounter, 2) == 0)
      //				MinimumNumChanged = max(1, 0.1*NumSamples)
      //			else
      //				MinimumNumChanged = 1
      //			endif
      int minimumNumChanged = 1;
      if (loopCounter % 2 == 0) {
        minimumNumChanged = (int) Math.max(1, 0.1 * numSamples);
      }

      //			if (examineAll == 1)
      //				examineAll = 0
      //			elseif (numChanged < MinimumNumChanged)
      //				examineAll = 1
      //			endif
      if (examineAll == 1) {
        examineAll = 0;
      } else if (numChanged < minimumNumChanged) {
        examineAll = 1;
      }

      //		endwhile
      if (loopCounter == 2500) {
        break;
      }
    }
    //	endmain
  }
コード例 #11
0
ファイル: RegSMO.java プロジェクト: alishakiba/jDenetX
  /**
   * Finds optimal point on line constrained by first (i1) and second (i2) candidate. Parameters
   * correspond to pseudocode (see technicalinformation)
   *
   * @param i1
   * @param alpha1
   * @param alpha1Star
   * @param C1
   * @param i2
   * @param alpha2
   * @param alpha2Star
   * @param C2
   * @param gamma
   * @param eta
   * @param deltaPhi
   * @return
   */
  protected boolean findOptimalPointOnLine(
      int i1,
      double alpha1,
      double alpha1Star,
      double C1,
      int i2,
      double alpha2,
      double alpha2Star,
      double C2,
      double gamma,
      double eta,
      double deltaPhi) {
    if (eta <= 0) {
      // this may happen due to numeric instability
      // due to Mercer's condition, this should not happen, hence we give up
      return false;
    }

    boolean case1 = false;
    boolean case2 = false;
    boolean case3 = false;
    boolean case4 = false;
    boolean finished = false;

    //		while !finished
    //		% this loop is passed at most three times
    //		% case variables needed to avoid attempting small changes twice
    while (!finished) {
      //			if (case1 == 0) &&
      //				(alpha1 > 0 || (alpha1* == 0 && deltaPhi > 0)) &&
      //				(alpha2 > 0 || (alpha2* == 0 && deltaPhi < 0))
      //				compute L, H (wrt. alpha1, alpha2)
      //				if L < H
      //					a2 = alpha2 ? - deltaPhi/eta
      //					a2 = min(a2, H)
      //					a2 = max(L, a2)
      //					a1 = alpha1 ? - (a2 ? alpha2)
      //					update alpha1, alpha2 if change is larger than some eps
      //				else
      //					finished = 1
      //				endif
      //				case1 = 1;

      if ((case1 == false)
          && (alpha1 > 0 || (alpha1Star == 0 && deltaPhi > 0))
          && (alpha2 > 0 || (alpha2Star == 0 && deltaPhi < 0))) {
        // compute L, H (wrt. alpha1, alpha2)
        double L = Math.max(0, gamma - C1);
        double H = Math.min(C2, gamma);
        if (L < H) {
          double a2 = alpha2 - deltaPhi / eta;
          a2 = Math.min(a2, H);
          a2 = Math.max(L, a2);
          // To prevent precision problems
          if (a2 > C2 - m_Del * C2) {
            a2 = C2;
          } else if (a2 <= m_Del * C2) {
            a2 = 0;
          }
          double a1 = alpha1 - (a2 - alpha2);
          if (a1 > C1 - m_Del * C1) {
            a1 = C1;
          } else if (a1 <= m_Del * C1) {
            a1 = 0;
          }
          // update alpha1, alpha2 if change is larger than some eps
          if (Math.abs(alpha1 - a1) > m_eps) {
            deltaPhi += eta * (a2 - alpha2);
            alpha1 = a1;
            alpha2 = a2;
          }
        } else {
          finished = true;
        }
        case1 = true;
      }

      //			elseif (case2 == 0) &&
      //				(alpha1 > 0 || (alpha1* == 0 && deltaPhi > 2 epsilon)) &&
      //				(alpha2* > 0 || (alpha2 == 0 && deltaPhi > 2 epsilon))
      //				compute L, H (wrt. alpha1, alpha2*)
      //				if L < H
      //					a2 = alpha2* + (deltaPhi ?- 2 epsilon)/eta
      //					a2 = min(a2, H)
      //					a2 = max(L, a2)
      //					a1 = alpha1 + (a2 ? alpha2*)
      //					update alpha1, alpha2* if change is larger than some eps
      //				else
      //					finished = 1
      //				endif
      //				case2 = 1;

      else if ((case2 == false)
          && (alpha1 > 0 || (alpha1Star == 0 && deltaPhi > 2 * m_epsilon))
          && (alpha2Star > 0 || (alpha2 == 0 && deltaPhi > 2 * m_epsilon))) {
        // compute L, H (wrt. alpha1, alpha2*)
        double L = Math.max(0, -gamma);
        double H = Math.min(C2, -gamma + C1);
        if (L < H) {
          double a2 = alpha2Star + (deltaPhi - 2 * m_epsilon) / eta;
          a2 = Math.min(a2, H);
          a2 = Math.max(L, a2);
          // To prevent precision problems
          if (a2 > C2 - m_Del * C2) {
            a2 = C2;
          } else if (a2 <= m_Del * C2) {
            a2 = 0;
          }
          double a1 = alpha1 + (a2 - alpha2Star);
          if (a1 > C1 - m_Del * C1) {
            a1 = C1;
          } else if (a1 <= m_Del * C1) {
            a1 = 0;
          }
          // update alpha1, alpha2* if change is larger than some eps
          if (Math.abs(alpha1 - a1) > m_eps) {
            deltaPhi += eta * (-a2 + alpha2Star);
            alpha1 = a1;
            alpha2Star = a2;
          }
        } else {
          finished = true;
        }
        case2 = true;
      }

      //			elseif (case3 == 0) &&
      //				(alpha1* > 0 || (alpha1 == 0 && deltaPhi < -2 epsilon)) &&
      //				(alpha2 > 0 || (alpha2* == 0 && deltaPhi < -2 epsilon))
      //				compute L, H (wrt. alpha1*, alpha2)
      //				if L < H
      //					a2 = alpha2 ?- (deltaPhi ?+ 2 epsilon)/eta
      //					a2 = min(a2, H)
      //					a2 = max(L, a2)
      //					a1 = alpha1* + (a2 ? alpha2)
      //					update alpha1*, alpha2 if change is larger than some eps
      //				else
      //					finished = 1
      //				endif
      //				case3 = 1;

      else if ((case3 == false)
          && (alpha1Star > 0 || (alpha1 == 0 && deltaPhi < -2 * m_epsilon))
          && (alpha2 > 0 || (alpha2Star == 0 && deltaPhi < -2 * m_epsilon))) {
        // compute L, H (wrt. alpha1*, alpha2)
        double L = Math.max(0, gamma);
        double H = Math.min(C2, C1 + gamma);
        if (L < H) {
          // note Smola's psuedocode has a minus, where there should be a plus in the following
          // line, Keerthi's is correct
          double a2 = alpha2 - (deltaPhi + 2 * m_epsilon) / eta;
          a2 = Math.min(a2, H);
          a2 = Math.max(L, a2);
          // To prevent precision problems
          if (a2 > C2 - m_Del * C2) {
            a2 = C2;
          } else if (a2 <= m_Del * C2) {
            a2 = 0;
          }
          double a1 = alpha1Star + (a2 - alpha2);
          if (a1 > C1 - m_Del * C1) {
            a1 = C1;
          } else if (a1 <= m_Del * C1) {
            a1 = 0;
          }
          // update alpha1*, alpha2 if change is larger than some eps
          if (Math.abs(alpha1Star - a1) > m_eps) {
            deltaPhi += eta * (a2 - alpha2);
            alpha1Star = a1;
            alpha2 = a2;
          }
        } else {
          finished = true;
        }
        case3 = true;
      }

      //			elseif (case4 == 0) &&
      //				(alpha1* > 0 || (alpha1 == 0 && deltaPhi < 0)) &&
      //				(alpha2* > 0 || (alpha2 == 0 && deltaPhi > 0))
      //				compute L, H (wrt. alpha1*, alpha2*)
      //				if L < H
      //					a2 = alpha2* + deltaPhi/eta
      //					a2 = min(a2, H)
      //					a2 = max(L, a2)
      //					a1 = alpha1* ? (a2 ? alpha2*)
      //					update alpha1*, alpha2* if change is larger than some eps
      //				else
      //					finished = 1
      //				endif
      //				case4 = 1;
      //			else
      //				finished = 1
      //			endif

      else if ((case4 == false)
          && (alpha1Star > 0 || (alpha1 == 0 && deltaPhi < 0))
          && (alpha2Star > 0 || (alpha2 == 0 && deltaPhi > 0))) {
        // compute L, H (wrt. alpha1*, alpha2*)
        double L = Math.max(0, -gamma - C1);
        double H = Math.min(C2, -gamma);
        if (L < H) {
          double a2 = alpha2Star + deltaPhi / eta;
          a2 = Math.min(a2, H);
          a2 = Math.max(L, a2);
          // To prevent precision problems
          if (a2 > C2 - m_Del * C2) {
            a2 = C2;
          } else if (a2 <= m_Del * C2) {
            a2 = 0;
          }
          double a1 = alpha1Star - (a2 - alpha2Star);
          if (a1 > C1 - m_Del * C1) {
            a1 = C1;
          } else if (a1 <= m_Del * C1) {
            a1 = 0;
          }
          // update alpha1*, alpha2* if change is larger than some eps
          if (Math.abs(alpha1Star - a1) > m_eps) {
            deltaPhi += eta * (-a2 + alpha2Star);

            alpha1Star = a1;
            alpha2Star = a2;
          }
        } else {
          finished = true;
        }
        case4 = true;
      } else {
        finished = true;
      }

      //			update deltaPhi
      // using 4.36 from Smola's thesis:
      // deltaPhi = deltaPhi - eta * ((alpha1New-alpha1StarNew)-(alpha1-alpha1Star));
      // the update is done inside the loop, saving us to remember old values of alpha1(*)
      // deltaPhi += eta * ((alpha2 - alpha2Star) - dAlpha2Old);
      // dAlpha2Old = (alpha2 - alpha2Star);

      //		endwhile

    }

    if (Math.abs(alpha1 - m_alpha[i1]) > m_eps
        || Math.abs(alpha1Star - m_alphaStar[i1]) > m_eps
        || Math.abs(alpha2 - m_alpha[i2]) > m_eps
        || Math.abs(alpha2Star - m_alphaStar[i2]) > m_eps) {

      if (alpha1 > C1 - m_Del * C1) {
        alpha1 = C1;
      } else if (alpha1 <= m_Del * C1) {
        alpha1 = 0;
      }
      if (alpha1Star > C1 - m_Del * C1) {
        alpha1Star = C1;
      } else if (alpha1Star <= m_Del * C1) {
        alpha1Star = 0;
      }
      if (alpha2 > C2 - m_Del * C2) {
        alpha2 = C2;
      } else if (alpha2 <= m_Del * C2) {
        alpha2 = 0;
      }
      if (alpha2Star > C2 - m_Del * C2) {
        alpha2Star = C2;
      } else if (alpha2Star <= m_Del * C2) {
        alpha2Star = 0;
      }

      // store new alpha's
      m_alpha[i1] = alpha1;
      m_alphaStar[i1] = alpha1Star;
      m_alpha[i2] = alpha2;
      m_alphaStar[i2] = alpha2Star;

      // update supportvector set
      if (alpha1 != 0 || alpha1Star != 0) {
        if (!m_supportVectors.contains(i1)) {
          m_supportVectors.insert(i1);
        }
      } else {
        m_supportVectors.delete(i1);
      }
      if (alpha2 != 0 || alpha2Star != 0) {
        if (!m_supportVectors.contains(i2)) {
          m_supportVectors.insert(i2);
        }
      } else {
        m_supportVectors.delete(i2);
      }
      return true;
    }

    return false;
  }