Esempio n. 1
0
 public void mergeAcronymCache(CorefCluster to, CorefCluster from) {
   TwoDimensionalSet<Integer, Integer> replacements = TwoDimensionalSet.hashSet();
   for (Integer first : acronymCache.firstKeySet()) {
     for (Integer second : acronymCache.get(first).keySet()) {
       if (acronymCache.get(first, second)) {
         Integer other = null;
         if (first == from.clusterID) {
           other = second;
         } else if (second == from.clusterID) {
           other = first;
         }
         if (other != null && other != to.clusterID) {
           int cid1 = Math.min(other, to.clusterID);
           int cid2 = Math.max(other, to.clusterID);
           replacements.add(cid1, cid2);
         }
       }
     }
   }
   for (Integer first : replacements.firstKeySet()) {
     for (Integer second : replacements.secondKeySet(first)) {
       acronymCache.put(first, second, true);
     }
   }
 }
Esempio n. 2
0
 public SimpleMatrix getBinaryClassification(String left, String right) {
   if (op.combineClassification) {
     return unaryClassification.get("");
   } else {
     left = basicCategory(left);
     right = basicCategory(right);
     return binaryClassification.get(left, right);
   }
 }
Esempio n. 3
0
 // TODO: combine this and getClassWForNode?
 public SimpleMatrix getWForNode(Tree node) {
   if (node.children().length == 2) {
     String leftLabel = node.children()[0].value();
     String leftBasic = basicCategory(leftLabel);
     String rightLabel = node.children()[1].value();
     String rightBasic = basicCategory(rightLabel);
     return binaryTransform.get(leftBasic, rightBasic);
   } else if (node.children().length == 1) {
     throw new AssertionError("No unary transform matrices, only unary classification");
   } else {
     throw new AssertionError("Unexpected tree children size of " + node.children().length);
   }
 }
Esempio n. 4
0
 public SimpleMatrix getClassWForNode(Tree node) {
   if (op.combineClassification) {
     return unaryClassification.get("");
   } else if (node.children().length == 2) {
     String leftLabel = node.children()[0].value();
     String leftBasic = basicCategory(leftLabel);
     String rightLabel = node.children()[1].value();
     String rightBasic = basicCategory(rightLabel);
     return binaryClassification.get(leftBasic, rightBasic);
   } else if (node.children().length == 1) {
     String unaryLabel = node.children()[0].value();
     String unaryBasic = basicCategory(unaryLabel);
     return unaryClassification.get(unaryBasic);
   } else {
     throw new AssertionError("Unexpected tree children size of " + node.children().length);
   }
 }
  public void backpropDerivative(
      Tree tree,
      List<String> words,
      IdentityHashMap<Tree, SimpleMatrix> nodeVectors,
      TwoDimensionalMap<String, String, SimpleMatrix> binaryW_dfs,
      Map<String, SimpleMatrix> unaryW_dfs,
      TwoDimensionalMap<String, String, SimpleMatrix> binaryScoreDerivatives,
      Map<String, SimpleMatrix> unaryScoreDerivatives,
      Map<String, SimpleMatrix> wordVectorDerivatives,
      SimpleMatrix deltaUp) {
    if (tree.isLeaf()) {
      return;
    }
    if (tree.isPreTerminal()) {
      if (op.trainOptions.trainWordVectors) {
        String word = tree.children()[0].label().value();
        word = dvModel.getVocabWord(word);
        //        SimpleMatrix currentVector = nodeVectors.get(tree);
        //        SimpleMatrix currentVectorDerivative =
        // nonlinearityVectorToDerivative(currentVector);
        //        SimpleMatrix derivative = deltaUp.elementMult(currentVectorDerivative);
        SimpleMatrix derivative = deltaUp;
        wordVectorDerivatives.put(word, wordVectorDerivatives.get(word).plus(derivative));
      }
      return;
    }
    SimpleMatrix currentVector = nodeVectors.get(tree);
    SimpleMatrix currentVectorDerivative =
        NeuralUtils.elementwiseApplyTanhDerivative(currentVector);

    SimpleMatrix scoreW = dvModel.getScoreWForNode(tree);
    currentVectorDerivative = currentVectorDerivative.elementMult(scoreW.transpose());

    // the delta that is used at the current nodes
    SimpleMatrix deltaCurrent = deltaUp.plus(currentVectorDerivative);
    SimpleMatrix W = dvModel.getWForNode(tree);
    SimpleMatrix WTdelta = W.transpose().mult(deltaCurrent);

    if (tree.children().length == 2) {
      // TODO: RS: Change to the nice "getWForNode" setup?
      String leftLabel = dvModel.basicCategory(tree.children()[0].label().value());
      String rightLabel = dvModel.basicCategory(tree.children()[1].label().value());

      binaryScoreDerivatives.put(
          leftLabel,
          rightLabel,
          binaryScoreDerivatives.get(leftLabel, rightLabel).plus(currentVector.transpose()));

      SimpleMatrix leftVector = nodeVectors.get(tree.children()[0]);
      SimpleMatrix rightVector = nodeVectors.get(tree.children()[1]);
      SimpleMatrix childrenVector = NeuralUtils.concatenateWithBias(leftVector, rightVector);
      if (op.trainOptions.useContextWords) {
        childrenVector = concatenateContextWords(childrenVector, tree.getSpan(), words);
      }
      SimpleMatrix W_df = deltaCurrent.mult(childrenVector.transpose());
      binaryW_dfs.put(leftLabel, rightLabel, binaryW_dfs.get(leftLabel, rightLabel).plus(W_df));

      // and then recurse
      SimpleMatrix leftDerivative = NeuralUtils.elementwiseApplyTanhDerivative(leftVector);
      SimpleMatrix rightDerivative = NeuralUtils.elementwiseApplyTanhDerivative(rightVector);
      SimpleMatrix leftWTDelta = WTdelta.extractMatrix(0, deltaCurrent.numRows(), 0, 1);
      SimpleMatrix rightWTDelta =
          WTdelta.extractMatrix(deltaCurrent.numRows(), deltaCurrent.numRows() * 2, 0, 1);
      backpropDerivative(
          tree.children()[0],
          words,
          nodeVectors,
          binaryW_dfs,
          unaryW_dfs,
          binaryScoreDerivatives,
          unaryScoreDerivatives,
          wordVectorDerivatives,
          leftDerivative.elementMult(leftWTDelta));
      backpropDerivative(
          tree.children()[1],
          words,
          nodeVectors,
          binaryW_dfs,
          unaryW_dfs,
          binaryScoreDerivatives,
          unaryScoreDerivatives,
          wordVectorDerivatives,
          rightDerivative.elementMult(rightWTDelta));
    } else if (tree.children().length == 1) {
      String childLabel = dvModel.basicCategory(tree.children()[0].label().value());

      unaryScoreDerivatives.put(
          childLabel, unaryScoreDerivatives.get(childLabel).plus(currentVector.transpose()));

      SimpleMatrix childVector = nodeVectors.get(tree.children()[0]);
      SimpleMatrix childVectorWithBias = NeuralUtils.concatenateWithBias(childVector);
      if (op.trainOptions.useContextWords) {
        childVectorWithBias = concatenateContextWords(childVectorWithBias, tree.getSpan(), words);
      }
      SimpleMatrix W_df = deltaCurrent.mult(childVectorWithBias.transpose());

      // System.out.println("unary backprop derivative for " + childLabel);
      // System.out.println("Old transform:");
      // System.out.println(unaryW_dfs.get(childLabel));
      // System.out.println(" Delta:");
      // System.out.println(W_df.scale(scale));
      unaryW_dfs.put(childLabel, unaryW_dfs.get(childLabel).plus(W_df));

      // and then recurse
      SimpleMatrix childDerivative = NeuralUtils.elementwiseApplyTanhDerivative(childVector);
      // SimpleMatrix childDerivative = childVector;
      SimpleMatrix childWTDelta = WTdelta.extractMatrix(0, deltaCurrent.numRows(), 0, 1);
      backpropDerivative(
          tree.children()[0],
          words,
          nodeVectors,
          binaryW_dfs,
          unaryW_dfs,
          binaryScoreDerivatives,
          unaryScoreDerivatives,
          wordVectorDerivatives,
          childDerivative.elementMult(childWTDelta));
    }
  }
Esempio n. 6
0
 public SimpleTensor getBinaryTensor(String left, String right) {
   left = basicCategory(left);
   right = basicCategory(right);
   return binaryTensors.get(left, right);
 }
Esempio n. 7
0
 public SimpleMatrix getBinaryTransform(String left, String right) {
   left = basicCategory(left);
   right = basicCategory(right);
   return binaryTransform.get(left, right);
 }