/**
   * Computes the density function <SPAN CLASS="MATH"><I>f</I> (<I>x</I>)</SPAN>, with <SPAN
   * CLASS="MATH"><I>&#955;</I><SUB>i</SUB> =</SPAN> <TT>lambda[<SPAN CLASS="MATH"><I>i</I> -
   * 1</SPAN>]</TT>, <SPAN CLASS="MATH"><I>i</I> = 1,&#8230;, <I>k</I></SPAN>.
   *
   * @param lambda rates of the hypoexponential distribution
   * @param x value at which the density is evaluated
   * @return density at <SPAN CLASS="MATH"><I>x</I></SPAN>
   */
  public static double density(double[] lambda, double x) {
    testLambda(lambda);
    if (x < 0) return 0;
    DoubleMatrix2D Ax = buildMatrix(lambda, x);
    DoubleMatrix2D M = DMatrix.expBidiagonal(Ax);

    int k = lambda.length;
    return lambda[k - 1] * M.getQuick(0, k - 1);
  }
 // Builds the bidiagonal matrix A out of the lambda
 private static DoubleMatrix2D buildMatrix(double[] lambda, double x) {
   int k = lambda.length;
   DoubleFactory2D F2 = DoubleFactory2D.dense;
   DoubleMatrix2D A = F2.make(k, k);
   for (int j = 0; j < k - 1; j++) {
     A.setQuick(j, j, -lambda[j] * x);
     A.setQuick(j, j + 1, lambda[j] * x);
   }
   A.setQuick(k - 1, k - 1, -lambda[k - 1] * x);
   return A;
 }
 /**
  * Classifies an instance w.r.t. the partitions found. It applies a naive min-distance algorithm.
  *
  * @param instance the instance to classify
  * @return the cluster that contains the nearest point to the instance
  */
 public int clusterInstance(Instance instance) throws java.lang.Exception {
   DoubleMatrix1D u = DoubleFactory1D.dense.make(instance.toDoubleArray());
   double min_dist = Double.POSITIVE_INFINITY;
   int c = -1;
   for (int i = 0; i < v.rows(); i++) {
     double dist = distnorm2(u, v.viewRow(i));
     if (dist < min_dist) {
       c = cluster[i];
       min_dist = dist;
     }
   }
   return c;
 }
  /**
   * Computes the complementary distribution <SPAN CLASS="MATH">bar(F)(<I>x</I>)</SPAN>, with <SPAN
   * CLASS="MATH"><I>&#955;</I><SUB>i</SUB> =</SPAN> <TT>lambda[<SPAN CLASS="MATH"><I>i</I> -
   * 1</SPAN>]</TT>, <SPAN CLASS="MATH"><I>i</I> = 1,&#8230;, <I>k</I></SPAN>.
   *
   * @param lambda rates of the hypoexponential distribution
   * @param x value at which the complementary distribution is evaluated
   * @return complementary distribution at <SPAN CLASS="MATH"><I>x</I></SPAN>
   */
  public static double barF(double[] lambda, double x) {
    testLambda(lambda);
    if (x <= 0.0) return 1.0;
    if (x >= Double.MAX_VALUE) return 0.0;
    DoubleMatrix2D M = buildMatrix(lambda, x);
    M = DMatrix.expBidiagonal(M);

    // prob is first row of final matrix
    int k = lambda.length;
    double sum = 0;
    for (int j = 0; j < k; j++) sum += M.getQuick(0, j);
    return sum;
  }
  /**
   * Splits recursively the points of the graph while the value of the best cut found is less of a
   * specified limit (the alpha star factor).
   *
   * @param W the weight matrix of the graph
   * @param alpha_star the alpha star factor
   * @return an array of sets of points (partitions)
   */
  protected int[][] partition(DoubleMatrix2D W, double alpha_star) {
    numPartitions++;

    // System.out.println("!");

    // If the graph contains only one point
    if (W.columns() == 1) {
      int[][] p = new int[1][1];
      p[0][0] = 0;
      return p;
      // Otherwise
    } else {
      // Computes the best cut
      int[][] cut = bestCut(W);
      // Computes the value of the found cut
      double cutVal = Ncut(W, cut[0], cut[1], null);

      // System.out.println("cutVal = "+cutVal +"\tnumPartitions = "+numPartitions);

      // If the value is less than alpha star
      if (cutVal < alpha_star && numPartitions < 2) {

        // Recursively partitions the first one found ...
        DoubleMatrix2D W0 = W.viewSelection(cut[0], cut[0]);
        int[][] p0 = partition(W0, alpha_star);
        // ... and the second one
        DoubleMatrix2D W1 = W.viewSelection(cut[1], cut[1]);
        int[][] p1 = partition(W1, alpha_star);

        // Merges the partitions found in the previous recursive steps
        int[][] p = new int[p0.length + p1.length][];
        for (int i = 0; i < p0.length; i++) {
          p[i] = new int[p0[i].length];
          for (int j = 0; j < p0[i].length; j++) p[i][j] = cut[0][p0[i][j]];
        }

        for (int i = 0; i < p1.length; i++) {
          p[i + p0.length] = new int[p1[i].length];
          for (int j = 0; j < p1[i].length; j++) p[i + p0.length][j] = cut[1][p1[i][j]];
        }

        return p;
      } else {
        // Otherwise returns the partitions found in current step
        // w/o recursive invocation
        int[][] p = new int[1][W.columns()];
        for (int i = 0; i < p[0].length; i++) p[0][i] = i;
        return p;
      }
    }
  }
  /**
   * Generates a clusterer by the mean of spectral clustering algorithm.
   *
   * @param data set of instances serving as training data
   * @exception Exception if the clusterer has not been generated successfully
   */
  public void buildClusterer(Instances data) throws java.lang.Exception {
    m_Sequences = new Instances(data);
    int n = data.numInstances();
    int k = data.numAttributes();
    DoubleMatrix2D w;
    if (useSparseMatrix) w = DoubleFactory2D.sparse.make(n, n);
    else w = DoubleFactory2D.dense.make(n, n);
    double[][] v1 = new double[n][];
    for (int i = 0; i < n; i++) v1[i] = data.instance(i).toDoubleArray();
    v = DoubleFactory2D.dense.make(v1);
    double sigma_sq = sigma * sigma;
    // Sets up similarity matrix
    for (int i = 0; i < n; i++)
      for (int j = i; j < n; j++) {
        /*double dist = distnorm2(v.viewRow(i), v.viewRow(j));
        if((r == -1) || (dist < r)) {
          double sim = Math.exp(- (dist * dist) / (2 * sigma_sq));
          w.set(i, j, sim);
          w.set(j, i, sim);
        }*/
        /* String [] key = {data.instance(i).stringValue(0), data.instance(j).stringValue(0)};
        System.out.println(key[0]);
        System.out.println(key[1]);
        System.out.println(simScoreMap.containsKey(key));
        Double simValue = simScoreMap.get(key);*/

        double sim = sim_matrix[i][j];
        w.set(i, j, sim);
        w.set(j, i, sim);
      }

    // Partitions points
    int[][] p = partition(w, alpha_star);

    // Deploys results
    numOfClusters = p.length;
    cluster = new int[n];
    for (int i = 0; i < p.length; i++) for (int j = 0; j < p[i].length; j++) cluster[p[i][j]] = i;

    // System.out.println("Final partition:");
    // UtilsJS.printMatrix(p);
    // System.out.println("Cluster:\n");
    // UtilsJS.printArray(cluster);
    this.numOfClusters = cluster[Utils.maxIndex(cluster)] + 1;
    //  System.out.println("Num clusters:\t"+this.numOfClusters);
  }
Example #7
0
  static boolean computeLogMi(
      FeatureGenerator featureGen,
      double lambda[],
      DoubleMatrix2D Mi_YY,
      DoubleMatrix1D Ri_Y,
      boolean takeExp,
      boolean reuseM,
      boolean initMDone) {

    if (reuseM && initMDone) {
      Mi_YY = null;
    } else initMDone = false;
    if (Mi_YY != null) Mi_YY.assign(0);
    Ri_Y.assign(0);
    while (featureGen.hasNext()) {
      Feature feature = featureGen.next();
      int f = feature.index();
      int yp = feature.y();
      int yprev = feature.yprev();
      float val = feature.value();
      //	    System.out.println(feature.toString());

      if (yprev < 0) {
        // this is a single state feature.
        double oldVal = Ri_Y.getQuick(yp);
        Ri_Y.setQuick(yp, oldVal + lambda[f] * val);
      } else if (Mi_YY != null) {
        Mi_YY.setQuick(yprev, yp, Mi_YY.getQuick(yprev, yp) + lambda[f] * val);
        initMDone = true;
      }
    }
    if (takeExp) {
      for (int r = Ri_Y.size() - 1; r >= 0; r--) {
        Ri_Y.setQuick(r, expE(Ri_Y.getQuick(r)));
        if (Mi_YY != null)
          for (int c = Mi_YY.columns() - 1; c >= 0; c--) {
            Mi_YY.setQuick(r, c, expE(Mi_YY.getQuick(r, c)));
          }
      }
    }
    return initMDone;
  }
  /**
   * A native implementation of Colt's original multiplication method method.
   *
   * <p>NOTE: this method will use native calls only when:
   *
   * <ul>
   *   <li>all input matrices are @link DenseDoubleMatrix2D or its subclasses (e.g. @link
   *       NNIDenseDoubleMatrix2D)
   *   <li>none of the input matrices is a view
   *   <li>the dynamic libraries required by the NNI are available
   * </ul>
   */
  public DoubleMatrix2D zMult(
      DoubleMatrix2D B,
      DoubleMatrix2D C,
      double alpha,
      double beta,
      boolean transposeA,
      boolean transposeB) {
    // A workaround for a bug in DenseDoubleMatrix2D.
    // If B is a SelectedDenseDoubleMatrix the implementation of this method
    // throws a ClassCastException. The workaround is to swap and transpose
    // the arguments and then transpose the result. As SelectedDenseDoubleMatrix2D is
    // package-private, if it was loaded with a different class loader than
    // the one used for this class it would give a VerificationError if we referred
    // to it directly here. Hence the hacky string comparison here.
    //
    if (B.getClass().getName().endsWith("SelectedDenseDoubleMatrix2D")) {
      return B.zMult(this, C, alpha, beta, !transposeB, !transposeA).viewDice();
    }

    // Check the sizes
    int rowsB = (transposeB ? B.columns() : B.rows());
    int columnsB = (transposeB ? B.rows() : B.columns());
    int rowsA = (transposeA ? columns() : rows());
    int columnsA = (transposeA ? rows() : columns());

    if (C == null) {
      C = new NNIDenseDoubleMatrix2D(rowsA, columnsB);
    }

    if (this == C || B == C) {
      throw new IllegalArgumentException("Matrices must not be identical");
    }

    final int rowsC = C.rows();
    final int columnsC = C.columns();

    if (rowsB != columnsA) {
      throw new IllegalArgumentException(
          "Matrix2D inner dimensions must agree:" + toStringShort() + ", " + B.toStringShort());
    }

    if (rowsC != rowsA || columnsC != columnsB) {
      throw new IllegalArgumentException(
          "Incompatibile result matrix: "
              + toStringShort()
              + ", "
              + B.toStringShort()
              + ", "
              + C.toStringShort());
    }

    // Need native BLAS, dense matrices and no views to operate
    // Default to Colt's implementation otherwise
    if (!NNIInterface.isNativeBlasAvailable()
        || (!(B instanceof NNIDenseDoubleMatrix2D))
        || (!(C instanceof NNIDenseDoubleMatrix2D))
        || isView()
        || ((NNIDenseDoubleMatrix2D) B).isView()
        || ((NNIDenseDoubleMatrix2D) C).isView()) {
      return super.zMult(B, C, alpha, beta, transposeA, transposeB);
    }

    NNIInterface.getBlas()
        .gemm(
            this,
            (NNIDenseDoubleMatrix2D) B,
            (NNIDenseDoubleMatrix2D) C,
            transposeA,
            transposeB,
            columnsA,
            alpha,
            columns,
            beta);

    return C;
  }
  /**
   * Returns the best cut of a graph w.r.t. the degree of dissimilarity between points of different
   * partitions and the degree of similarity between points of the same partition.
   *
   * @param W the weight matrix of the graph
   * @return an array of two elements, each of these contains the points of a partition
   */
  protected static int[][] bestCut(DoubleMatrix2D W) {
    int n = W.columns();
    // Builds the diagonal matrices D and D^(-1/2) (represented as their diagonals)
    DoubleMatrix1D d = DoubleFactory1D.dense.make(n);
    DoubleMatrix1D d_minus_1_2 = DoubleFactory1D.dense.make(n);
    for (int i = 0; i < n; i++) {
      double d_i = W.viewRow(i).zSum();
      d.set(i, d_i);
      d_minus_1_2.set(i, 1 / Math.sqrt(d_i));
    }
    DoubleMatrix2D D = DoubleFactory2D.sparse.diagonal(d);

    // System.out.println("DoubleMatrix2D :\n"+D.toString());

    DoubleMatrix2D X = D.copy();

    // System.out.println("DoubleMatrix2D copy :\n"+X.toString());

    // X = D^(-1/2) * (D - W) * D^(-1/2)
    X.assign(W, Functions.minus);
    // System.out.println("DoubleMatrix2D X: (D-W) :\n"+X.toString());
    for (int i = 0; i < n; i++)
      for (int j = 0; j < n; j++)
        X.set(i, j, X.get(i, j) * d_minus_1_2.get(i) * d_minus_1_2.get(j));

    // Computes the eigenvalues and the eigenvectors of X
    EigenvalueDecomposition e = new EigenvalueDecomposition(X);
    DoubleMatrix1D lambda = e.getRealEigenvalues();

    // Selects the eigenvector z_2 associated with the second smallest eigenvalue
    // Creates a map that contains the pairs <index, eigenvalue>
    AbstractIntDoubleMap map = new OpenIntDoubleHashMap(n);
    for (int i = 0; i < n; i++) map.put(i, Math.abs(lambda.get(i)));
    IntArrayList list = new IntArrayList();
    // Sorts the map on the value
    map.keysSortedByValue(list);
    // Gets the index of the second smallest element
    int i_2 = list.get(1);

    // y_2 = D^(-1/2) * z_2
    DoubleMatrix1D y_2 = e.getV().viewColumn(i_2).copy();
    y_2.assign(d_minus_1_2, Functions.mult);

    // Creates a map that contains the pairs <i, y_2[i]>
    map.clear();
    for (int i = 0; i < n; i++) map.put(i, y_2.get(i));
    // Sorts the map on the value
    map.keysSortedByValue(list);
    // Search the element in the map previuosly ordered that minimizes the cut
    // of the partition
    double best_cut = Double.POSITIVE_INFINITY;
    int[][] partition = new int[2][];

    // The array v contains all the elements of the graph ordered by their
    // projection on vector y_2
    int[] v = list.elements();
    // For each admissible splitting point i
    for (int i = 1; i < n; i++) {
      // The array a contains all the elements that have a projection on vector
      // y_2 less or equal to the one of i-th element
      // The array b contains the remaining elements
      int[] a = new int[i];
      int[] b = new int[n - i];
      System.arraycopy(v, 0, a, 0, i);
      System.arraycopy(v, i, b, 0, n - i);
      double cut = Ncut(W, a, b, v);
      if (cut < best_cut) {
        best_cut = cut;
        partition[0] = a;
        partition[1] = b;
      }
    }

    // System.out.println("Partition:");
    // UtilsJS.printMatrix(partition);

    return partition;
  }
 /**
  * Computes the association degree between two partitions of a graph.<br>
  * The association degree is defined as the sum of the weights of all the edges between points of
  * the two partitions.
  *
  * @param W the weight matrix of the graph
  * @param a the points of the first partition
  * @param b the points of the second partition
  * @return the association degree
  */
 protected static double asso(DoubleMatrix2D W, int[] a, int[] b) {
   return W.viewSelection(a, b).zSum();
 }
  /** Creates a new instance of testmatrix */
  public GLSsolver(double[][] p_MatrixgleichNull) throws IllegalArgumentException {

    // --------------------------------------
    // Kontrolle, ob Eingabematrix rechteckig
    // --------------------------------------

    int nplus1 = p_MatrixgleichNull[0].length;
    for (int i = 1; i < p_MatrixgleichNull.length; i++) { // Zeilen i
      if (p_MatrixgleichNull[i].length != nplus1) {
        System.err.println(
            "Programmfehler: Matrix des GLS ist nicht rechteckig! (im solver entdeckt)");
        throw new IllegalArgumentException();
      }
    }
    if (nplus1 <= 1) throw new IllegalArgumentException("keine Unbekannte"); // keine Unbekannte!!!

    // Umgeht einen Fehler in der colt-Bibliothek // TODO wenn behoben, Workaround entfernen
    // ------
    int anzGl = p_MatrixgleichNull.length;
    if (anzGl < nplus1 - 1) { // anzGleichungen < anz Unbekannte
      if (debug) System.out.println("WorkAround fuer Fehler in colt: 0 = 0 Gleichungen anhaengen");
      anzGl = nplus1 - 1; // = Anzahl Unbek, 0 0 0 ... 0 = 0 Zeile angehängt
    }

    // -------------------------
    // Daten in A und b einlesen
    // -------------------------

    // so dass A*x = b
    A = new DenseDoubleMatrix2D(anzGl, (nplus1 - 1));
    DenseDoubleMatrix2D b = new DenseDoubleMatrix2D(anzGl, 1);

    for (int i = 0; i < p_MatrixgleichNull.length; i++) { // Zeilen i
      for (int j = 0; j < nplus1 - 1; j++) { // Spalten
        A.set(i, j, p_MatrixgleichNull[i][j]);
      }
      b.set(i, 0, -p_MatrixgleichNull[i][nplus1 - 1]);
    }

    if (debug) {
      System.out.println(" A = " + A.toString());
      System.out.println(" b = " + b.toString());
      System.out.println("");
    }

    // --------------
    // LR - Zerlegung
    // --------------

    LUDecomposition ALU = new LUDecomposition(A);
    if (debug) System.out.println(ALU.toString());

    DoubleMatrix2D L = ALU.getL();
    R = ALU.getU();
    int[] piv = ALU.getPivot();

    Algebra alg = new Algebra();
    //        if (debug) System.out.println("L = " + L.toString());
    //        if (debug) System.out.println("Kontrolle L*R = " + alg.mult(L,R).toString());
    //        if (debug) System.out.println("Kontrolle P*b = " + alg.permute(b, piv, null) );
    //
    //        if (debug) System.out.println("Rx = c: R = " + R.toString());
    //        if (debug) System.out.println("alg.permute(b, piv, null) = " + alg.permute(b, piv,
    // null).toString());

    c = alg.solve(L, alg.permute(b, piv, null)); // TODO: kann zu Problemen führen,
    // wenn weniger Gleichungen als Unbek --> s.Workaround oben

    if (debug) System.out.println("Lc = Pb:  c = " + c.toString());

    if (debug) {
      System.out.println("Rang A: " + alg.rank(A));
      System.out.println("Rang R: " + alg.rank(R));
    }

    assert (alg.rank(A) == alg.rank(R)) : "Rang von A ungleich Rang von R --> Programmfehler";
    anzUnbestParam = A.columns() - alg.rank(A);
    if (debug) System.out.println("Anz unbest Parameter: " + anzUnbestParam);
  }
  /**
   * Gibt die Lösung x des Gleichungssystems zurück: nur eindeutige (d.h. parameterunabhängige xi)
   * werden zurückgegeben. index 0: Wert = 0 bedeutet xi unbestimmt, Wert = 1 bedeutet xi bestimmt
   * index 1: eigentlicher Wert (nur wenn xi bestimmt, dh index 0 = 1, sonst Wert 0)
   */
  public final double[][] solve() throws ArithmeticException {

    // --------------------------------------------------------------------------
    // EIGENTLICHER SOLVER für bestimmte Lösungsvariablen in unbestimmen Systemen
    // --------------------------------------------------------------------------

    int gebrauchteUnbestParam = 0;
    x =
        new double[A.columns()]
            [2 + anzUnbestParam]; // Status 1 (bestimmt), kN, alpha, beta (Parameter)

    int z = A.rows() - 1; // Zeilenvariable, beginnt zuunterst

    // Gleichungen mit lauter Nullen
    while (R.viewRow(z).cardinality() == 0 // nachfolgende Tests massgebend, dieser jedoch schnell
        || (Fkt.max(R.viewRow(z).toArray()) < TOL && Fkt.min(R.viewRow(z).toArray()) > -TOL)) {
      double cwert;
      if (z < c.rows()) cwert = c.get(z, 0);
      else cwert = 0;
      if (Math.abs(cwert) > TOL) {
        System.out.println("widersprüchliche Gleichungen im System! Zeile " + z);
        throw new ArithmeticException("Widerspruch im Gleichungssystem!");
      }
      z--;
      if (z <= 0) {
        System.out.println("lauter Nullen im GLS");
        break;
      }
    }

    // Verarbeiten der Gleichungen (von unten her)
    for (z = z; z >= 0; z--) {
      // finde erste nicht-Null in Zeile (Pivot)
      int p = -1; // Pivot: erste Zahl welche nicht null ist
      pivotfinden:
      for (int i = 0; i < R.columns(); i++) {
        if (Math.abs(R.get(z, i))
            > TOL) { // Versuch, numerische Probleme (Überbestimmtheit) zu vermeiden
          p = i;
          break pivotfinden;
        }
      }

      // Fall Kein Pivot gefunden (d.h. linker Teil der Gleichung aus lauter Nullen)
      if (p < 0) {
        if (debug) System.out.println("Warnung: kein Pivot gefunden in Zeile " + z);
        // Kontrolle, ob rechte Seite (c) auch null --> ok, sonst Widerspruch im GLS
        if (Math.abs(c.get(z, 0)) > TOL) {
          System.out.println("widersprüchliche Gleichungen im System! Zeile " + z);
          throw new ArithmeticException("Widerspruch im Gleichungssystem!");
        } else {
          if (debug)
            System.out.println("Entwarnung: Zeile " + z + " besteht aus lauter Nullen (ok)");
          continue;
        }
      }

      // kontrollieren, ob es in der Gleichung (Zeile) eine neue Unbestimmte Variable (i.d.R. Pivot)
      // hat.
      boolean alleVarBestimmt = true;
      int effPivot = p; // effektiver Pivot (1. Unbestimmte Variable der Zeile), i.d.R. Pivot
      for (int i = p; i < R.columns(); i++) {
        if (x[i][0] == 0 && Math.abs(R.viewRow(z).get(i)) > TOL) {
          alleVarBestimmt = false;
          effPivot = i; // i.d.R. effPivot=p, aber nicht immer.
          break;
        }
      }

      if (alleVarBestimmt) { // alle Variablen (inkl.Pivot) schon bestimmt!
        // CHECKEN, ob (Zeile "+z+") nicht widersprüchlich
        double[] kontrolle = new double[1 + anzUnbestParam];
        for (int j = 0; j < kontrolle.length; j++) kontrolle[j] = 0;
        for (int i = p; i < R.columns(); i++) {
          for (int j = 0; j < kontrolle.length; j++) {
            kontrolle[j] += R.viewRow(z).get(i) * x[i][j + 1];
          }
        }
        kontrolle[0] -= c.get(z, 0);

        // TODO TESTEN!
        boolean alleParamNull = true;
        int bekParam = -1; // Parameter der aus der Gleichung bestimmt werden kann.
        for (int j = kontrolle.length - 1; j > 0; j--) {
          if (Math.abs(kontrolle[j]) > TOL) {
            alleParamNull = false;
            if (bekParam < 0) bekParam = j;
          }
        }
        // Überprüfen, ob Gleichung widersprüchlich ist
        if (alleParamNull) { // TODO ev. nochmals prüfen ob alle 0 mit geringerer Toleranz (Problem
                             // fastNull*Param ≠ 0 könnte bedeuten dass Param = 0). Zumindestens
                             // wenn noch Parameter zu vergeben.
          double obnull = Math.abs(kontrolle[0]);
          if (obnull > TOL) {
            System.out.println("");
            System.out.println(
                "Widerspruch im Gleichungssystem! (Zeile "
                    + z
                    + ") "
                    + obnull
                    + " ungleich 0"); // TODO: URSPRÜNGLICH ZEILE (piv) ANGEBEN!
            System.out.println("eventuell numerisches Problem");
            throw new ArithmeticException("Widerspruch im Gleichungssystem!");
          } else continue; // nächste Gleichung
        }
        // else
        // Ein schon vergebener Parameter kann ausgerechnet werden

        // Schlaufe über bisherige Lösung
        assert bekParam > 0;
        for (int xi = 0; xi < x.length; xi++) {
          double faktor = x[xi][1 + bekParam];
          if (Math.abs(faktor) < TOL) continue;
          // Einsetzen
          assert x[xi][0] > 0; // bestimmt
          for (int j = 0; j < kontrolle.length; j++) {
            if (j != bekParam) {
              x[xi][j + 1] += -kontrolle[j] * faktor / kontrolle[bekParam];
            }
          }
        }
        for (int xi = 0; xi < x.length; xi++) {
          // Parameter nachrutschen
          if (bekParam < anzUnbestParam) { // d.h. nicht der letzte zu vergebende Parameter.
            for (int j = bekParam; j < anzUnbestParam; j++) {
              x[xi][j + 1] = x[xi][j + 2];
              x[xi][j + 2] = 0;
            }
          } else x[xi][bekParam + 1] = 0;
        }
        if (debug)
          System.err.println(
              "VORSICHT, wenig GETESTETES Modul des Solvers im Einsatz."); // TODO Warnung
                                                                           // entfernen, da
                                                                           // vermutlich i.O.
        gebrauchteUnbestParam--;
      }

      // Normalfall, unbestimmter (effektiver) Pivot vorhanden
      else {

        // unbekannte
        x[effPivot][1] = c.get(z, 0) / R.viewRow(z).get(effPivot);
        for (int i = R.columns() - 1; i >= p; i--) { // R.Spalten, da dies AnzUnbek x entspricht
          if (i == effPivot) continue;
          if (x[i][0] == 0) { // unbestimmt, aber nicht Pivot
            if (Math.abs(R.viewRow(z).get(i)) > TOL) { // TODO testen!!!
              if (gebrauchteUnbestParam >= anzUnbestParam) {
                System.err.println(
                    "Programmfehler in solver: gebrauchteUnbestParam >= anzUnbestParam");
                throw new AssertionError(
                    "Programmfehler in solver: gebrauchteUnbestParam >= anzUnbestParam");
              }
              x[i][gebrauchteUnbestParam + 2] = 1; // neuer Parameter (alpha, beta) setzen
              x[i][0] = 1; // bestimmt (auch wenn von Parameter abhängig).

              gebrauchteUnbestParam++;
            }
          }

          x[effPivot][1] += -R.viewRow(z).get(i) * x[i][1] / R.viewRow(z).get(effPivot);
          for (int j = 0; j < gebrauchteUnbestParam; j++) {
            x[effPivot][2 + j] += -R.viewRow(z).get(i) * x[i][2 + j] / R.viewRow(z).get(effPivot);
          }
        }
        x[effPivot][0] = 1;
      }
    }

    if (debug) {
      System.out.println("");
      for (int i = 0; i < x.length; i++) {
        System.out.print("x" + i + " = " + Fkt.nf(x[i][1], 3));
        for (int j = 2; j < x[i].length; j++) {
          System.out.print(", P" + (j - 1) + " = " + Fkt.nf(x[i][j], 3));
        }
        System.out.println("");
      }
    }

    // ------------------
    // Lösung zurückgeben
    // ------------------

    // Lösung x: nur eindeutige (d.h. parameterunabhängige xi) werden zurückgegeben
    // index 0: Wert = 0 bedeutet xi unbestimmt,  Wert = 1 bedeutet xi bestimmt
    // index 1: eigentlicher Wert (nur wenn xi bestimmt, dh index 0 = 1, sonst Wert 0)
    xLsg = new double[R.columns()][2];

    for (int i = 0; i < x.length; i++) {
      boolean bestimmt;
      if (x[i][0] > 0) {
        bestimmt = true;
        // schauen, ob Lösungsvariable xi bestimmt, dh unabhängig von überzähligen Parametern
        for (int j = 2; j < x[i].length; j++) {
          if (Math.abs(x[i][j]) > TOL) bestimmt = false;
        }
      } else bestimmt = false;

      if (bestimmt) {
        xLsg[i][0] = 1;
        xLsg[i][1] = x[i][1];
      } else xLsg[i][0] = 0;
    }

    solved = true;
    return xLsg;
  }
Example #13
0
  protected double computeFunctionGradientLL(double lambda[], double grad[]) {
    double logli = 0;
    try {
      for (int f = 0; f < lambda.length; f++) {
        grad[f] = -1 * lambda[f] * params.invSigmaSquare;
        logli -= ((lambda[f] * lambda[f]) * params.invSigmaSquare) / 2;
      }
      diter.startScan();
      if (featureGenCache != null) featureGenCache.startDataScan();
      for (int numRecord = 0; diter.hasNext(); numRecord++) {
        DataSequence dataSeq = (DataSequence) diter.next();
        if (featureGenCache != null) featureGenCache.nextDataIndex();
        if (params.debugLvl > 1) {
          Util.printDbg("Read next seq: " + numRecord + " logli " + logli);
        }
        alpha_Y.assign(0);
        for (int f = 0; f < lambda.length; f++) ExpF[f] = RobustMath.LOG0;

        if ((beta_Y == null) || (beta_Y.length < dataSeq.length())) {
          beta_Y = new DenseDoubleMatrix1D[2 * dataSeq.length()];
          for (int i = 0; i < beta_Y.length; i++) beta_Y[i] = new DenseDoubleMatrix1D(numY);
        }
        // compute beta values in a backward scan.
        // also scale beta-values to 1 to avoid numerical problems.
        beta_Y[dataSeq.length() - 1].assign(0);
        for (int i = dataSeq.length() - 1; i > 0; i--) {
          if (params.debugLvl > 2) {
            /*  Util.printDbg("Features fired");
            featureGenerator.startScanFeaturesAt(dataSeq, i);
            while (featureGenerator.hasNext()) {
            Feature feature = featureGenerator.next();
            Util.printDbg(feature.toString());
            }
            */
          }

          // compute the Mi matrix
          initMDone =
              computeLogMi(
                  featureGenerator, lambda, dataSeq, i, Mi_YY, Ri_Y, false, reuseM, initMDone);
          tmp_Y.assign(beta_Y[i]);
          tmp_Y.assign(Ri_Y, sumFunc);
          RobustMath.logMult(Mi_YY, tmp_Y, beta_Y[i - 1], 1, 0, false, edgeGen);
        }

        double thisSeqLogli = 0;
        for (int i = 0; i < dataSeq.length(); i++) {
          // compute the Mi matrix
          initMDone =
              computeLogMi(
                  featureGenerator, lambda, dataSeq, i, Mi_YY, Ri_Y, false, reuseM, initMDone);
          // find features that fire at this position..
          featureGenerator.startScanFeaturesAt(dataSeq, i);

          if (i > 0) {
            tmp_Y.assign(alpha_Y);
            RobustMath.logMult(Mi_YY, tmp_Y, newAlpha_Y, 1, 0, true, edgeGen);
            newAlpha_Y.assign(Ri_Y, sumFunc);
          } else {
            newAlpha_Y.assign(Ri_Y);
          }

          while (featureGenerator.hasNext()) {
            Feature feature = featureGenerator.next();
            int f = feature.index();

            int yp = feature.y();
            int yprev = feature.yprev();
            float val = feature.value();

            if ((dataSeq.y(i) == yp)
                && (((i - 1 >= 0) && (yprev == dataSeq.y(i - 1))) || (yprev < 0))) {
              grad[f] += val;
              thisSeqLogli += val * lambda[f];
              if (params.debugLvl > 2) {
                System.out.println("Feature fired " + f + " " + feature);
              }
            }

            if (yprev < 0) {
              ExpF[f] =
                  RobustMath.logSumExp(
                      ExpF[f], newAlpha_Y.get(yp) + RobustMath.log(val) + beta_Y[i].get(yp));
            } else {
              ExpF[f] =
                  RobustMath.logSumExp(
                      ExpF[f],
                      alpha_Y.get(yprev)
                          + Ri_Y.get(yp)
                          + Mi_YY.get(yprev, yp)
                          + RobustMath.log(val)
                          + beta_Y[i].get(yp));
            }
          }
          alpha_Y.assign(newAlpha_Y);

          if (params.debugLvl > 2) {
            System.out.println("Alpha-i " + alpha_Y.toString());
            System.out.println("Ri " + Ri_Y.toString());
            System.out.println("Mi " + Mi_YY.toString());
            System.out.println("Beta-i " + beta_Y[i].toString());
          }
        }
        double lZx = RobustMath.logSumExp(alpha_Y);
        thisSeqLogli -= lZx;
        logli += thisSeqLogli;
        // update grad.
        for (int f = 0; f < grad.length; f++) {
          grad[f] -= RobustMath.exp(ExpF[f] - lZx);
        }
        if (params.debugLvl > 1) {
          System.out.println(
              "Sequence "
                  + thisSeqLogli
                  + " logli "
                  + logli
                  + " log(Zx) "
                  + lZx
                  + " Zx "
                  + Math.exp(lZx));
        }
      }
      if (params.debugLvl > 2) {
        for (int f = 0; f < lambda.length; f++) System.out.print(lambda[f] + " ");
        System.out.println(" :x");
        for (int f = 0; f < lambda.length; f++) System.out.print(grad[f] + " ");
        System.out.println(" :g");
      }

      if (params.debugLvl > 0)
        Util.printDbg(
            "Iteration "
                + icall
                + " log-likelihood "
                + logli
                + " norm(grad logli) "
                + norm(grad)
                + " norm(x) "
                + norm(lambda));

    } catch (Exception e) {
      System.out.println("Alpha-i " + alpha_Y.toString());
      System.out.println("Ri " + Ri_Y.toString());
      System.out.println("Mi " + Mi_YY.toString());

      e.printStackTrace();
      System.exit(0);
    }
    return logli;
  }
Example #14
0
  protected double computeFunctionGradient(double lambda[], double grad[]) {
    initMDone = false;

    if (params.trainerType.equals("ll")) return computeFunctionGradientLL(lambda, grad);
    double logli = 0;
    try {
      for (int f = 0; f < lambda.length; f++) {
        grad[f] = -1 * lambda[f] * params.invSigmaSquare;
        logli -= ((lambda[f] * lambda[f]) * params.invSigmaSquare) / 2;
      }
      boolean doScaling = params.doScaling;

      diter.startScan();
      if (featureGenCache != null) featureGenCache.startDataScan();
      int numRecord = 0;
      for (numRecord = 0; diter.hasNext(); numRecord++) {
        DataSequence dataSeq = (DataSequence) diter.next();
        if (featureGenCache != null) featureGenCache.nextDataIndex();
        if (params.debugLvl > 1) {
          Util.printDbg("Read next seq: " + numRecord + " logli " + logli);
        }
        alpha_Y.assign(1);
        for (int f = 0; f < lambda.length; f++) ExpF[f] = 0;

        if ((beta_Y == null) || (beta_Y.length < dataSeq.length())) {
          beta_Y = new DenseDoubleMatrix1D[2 * dataSeq.length()];
          for (int i = 0; i < beta_Y.length; i++) beta_Y[i] = new DenseDoubleMatrix1D(numY);

          scale = new double[2 * dataSeq.length()];
        }
        // compute beta values in a backward scan.
        // also scale beta-values to 1 to avoid numerical problems.
        scale[dataSeq.length() - 1] = (doScaling) ? numY : 1;
        beta_Y[dataSeq.length() - 1].assign(1.0 / scale[dataSeq.length() - 1]);
        for (int i = dataSeq.length() - 1; i > 0; i--) {
          if (params.debugLvl > 2) {
            Util.printDbg("Features fired");
            // featureGenerator.startScanFeaturesAt(dataSeq, i);
            // while (featureGenerator.hasNext()) {
            // Feature feature = featureGenerator.next();
            // Util.printDbg(feature.toString());
            // }
          }

          // compute the Mi matrix
          initMDone =
              computeLogMi(
                  featureGenerator, lambda, dataSeq, i, Mi_YY, Ri_Y, true, reuseM, initMDone);
          tmp_Y.assign(beta_Y[i]);
          tmp_Y.assign(Ri_Y, multFunc);
          RobustMath.Mult(Mi_YY, tmp_Y, beta_Y[i - 1], 1, 0, false, edgeGen);
          //		Mi_YY.zMult(tmp_Y, beta_Y[i-1]);

          // need to scale the beta-s to avoid overflow
          scale[i - 1] = doScaling ? beta_Y[i - 1].zSum() : 1;
          if ((scale[i - 1] < 1) && (scale[i - 1] > -1)) scale[i - 1] = 1;
          constMultiplier.multiplicator = 1.0 / scale[i - 1];
          beta_Y[i - 1].assign(constMultiplier);
        }

        double thisSeqLogli = 0;
        for (int i = 0; i < dataSeq.length(); i++) {
          // compute the Mi matrix
          initMDone =
              computeLogMi(
                  featureGenerator, lambda, dataSeq, i, Mi_YY, Ri_Y, true, reuseM, initMDone);
          // find features that fire at this position..
          featureGenerator.startScanFeaturesAt(dataSeq, i);

          if (i > 0) {
            tmp_Y.assign(alpha_Y);
            RobustMath.Mult(Mi_YY, tmp_Y, newAlpha_Y, 1, 0, true, edgeGen);
            //		Mi_YY.zMult(tmp_Y, newAlpha_Y,1,0,true);
            newAlpha_Y.assign(Ri_Y, multFunc);
          } else {
            newAlpha_Y.assign(Ri_Y);
          }
          while (featureGenerator.hasNext()) {
            Feature feature = featureGenerator.next();
            int f = feature.index();

            int yp = feature.y();
            int yprev = feature.yprev();
            float val = feature.value();
            if ((dataSeq.y(i) == yp)
                && (((i - 1 >= 0) && (yprev == dataSeq.y(i - 1))) || (yprev < 0))) {
              grad[f] += val;
              thisSeqLogli += val * lambda[f];
            }
            if (yprev < 0) {
              ExpF[f] += newAlpha_Y.get(yp) * val * beta_Y[i].get(yp);
            } else {
              ExpF[f] +=
                  alpha_Y.get(yprev)
                      * Ri_Y.get(yp)
                      * Mi_YY.get(yprev, yp)
                      * val
                      * beta_Y[i].get(yp);
            }
          }

          alpha_Y.assign(newAlpha_Y);
          // now scale the alpha-s to avoid overflow problems.
          constMultiplier.multiplicator = 1.0 / scale[i];
          alpha_Y.assign(constMultiplier);

          if (params.debugLvl > 2) {
            System.out.println("Alpha-i " + alpha_Y.toString());
            System.out.println("Ri " + Ri_Y.toString());
            System.out.println("Mi " + Mi_YY.toString());
            System.out.println("Beta-i " + beta_Y[i].toString());
          }
        }
        double Zx = alpha_Y.zSum();
        thisSeqLogli -= log(Zx);
        // correct for the fact that alpha-s were scaled.
        for (int i = 0; i < dataSeq.length(); i++) {
          thisSeqLogli -= log(scale[i]);
        }

        logli += thisSeqLogli;
        // update grad.
        for (int f = 0; f < grad.length; f++) grad[f] -= ExpF[f] / Zx;

        if (params.debugLvl > 1) {
          System.out.println(
              "Sequence "
                  + thisSeqLogli
                  + " logli "
                  + logli
                  + " log(Zx) "
                  + Math.log(Zx)
                  + " Zx "
                  + Zx);
        }
      }
      if (params.debugLvl > 2) {
        for (int f = 0; f < lambda.length; f++) System.out.print(lambda[f] + " ");
        System.out.println(" :x");
        for (int f = 0; f < lambda.length; f++)
          System.out.println(featureGenerator.featureName(f) + " " + grad[f] + " ");
        System.out.println(" :g");
      }

      if (params.debugLvl > 0)
        Util.printDbg(
            "Iter "
                + icall
                + " log likelihood "
                + logli
                + " norm(grad logli) "
                + norm(grad)
                + " norm(x) "
                + norm(lambda));
      if (icall == 0) {
        System.out.println("Number of training records" + numRecord);
      }
    } catch (Exception e) {
      System.out.println("Alpha-i " + alpha_Y.toString());
      System.out.println("Ri " + Ri_Y.toString());
      System.out.println("Mi " + Mi_YY.toString());

      e.printStackTrace();
      System.exit(0);
    }
    return logli;
  }