@Override
  public double getUnnormalizedLogProbability(Assignment assignment) {
    Preconditions.checkArgument(assignment.containsAll(getVars().getVariableNumsArray()));
    Tensor inputFeatureVector =
        (Tensor) assignment.getValue(getInputVariable().getOnlyVariableNum());

    if (conditionalVars.size() == 0) {
      // No normalization for any conditioned-on variables. This case
      // allows a more efficient implementation than the default
      // in ClassifierFactor.
      VariableNumMap outputVars = getOutputVariables();
      Tensor outputTensor =
          SparseTensor.singleElement(
              outputVars.getVariableNumsArray(),
              outputVars.getVariableSizes(),
              outputVars.assignmentToIntArray(assignment),
              1.0);

      Tensor featureIndicator = outputTensor.outerProduct(inputFeatureVector);
      return logWeights.innerProduct(featureIndicator).getByDimKey();
    } else {
      // Default to looking up the answer in the output log probabilities
      int[] outputIndexes = getOutputVariables().assignmentToIntArray(assignment);
      Tensor logProbs = getOutputLogProbTensor(inputFeatureVector);
      return logProbs.getByDimKey(outputIndexes);
    }
  }
Ejemplo n.º 2
0
  @SuppressWarnings("unchecked")
  public void setUp() {
    ParametricFactorGraphBuilder builder = new ParametricFactorGraphBuilder();

    // Create a plate for each input/output pair.
    DiscreteVariable outputVar = new DiscreteVariable("tf", Arrays.asList("T", "F"));
    ObjectVariable tensorVar = new ObjectVariable(Tensor.class);
    builder.addPlate(
        "plateVar",
        new VariableNumMap(
            Ints.asList(0, 1), Arrays.asList("x", "y"), Arrays.asList(tensorVar, outputVar)),
        10);

    // Factor connecting each x to the corresponding y.
    all =
        new VariableNumMap(
            Ints.asList(0, 1),
            Arrays.asList("plateVar/?(0)/x", "plateVar/?(0)/y"),
            Arrays.asList(tensorVar, outputVar));
    x = all.getVariablesByName("plateVar/?(0)/x");
    y = all.getVariablesByName("plateVar/?(0)/y");
    ConditionalLogLinearFactor f =
        new ConditionalLogLinearFactor(
            x, y, VariableNumMap.EMPTY, DiscreteVariable.sequence("foo", 4));
    builder.addFactor(
        "classifier",
        f,
        VariableNumPattern.fromTemplateVariables(
            all, VariableNumMap.EMPTY, builder.getDynamicVariableSet()));

    // Factor connecting adjacent y's
    VariableNumMap adjacentVars =
        new VariableNumMap(
            Ints.asList(0, 1),
            Arrays.asList("plateVar/?(0)/y", "plateVar/?(1)/y"),
            Arrays.asList(outputVar, outputVar));
    builder.addFactor(
        "adjacent",
        DiscreteLogLinearFactor.createIndicatorFactor(adjacentVars),
        VariableNumPattern.fromTemplateVariables(
            adjacentVars, VariableNumMap.EMPTY, builder.getDynamicVariableSet()));
    sequenceModel = builder.build();

    // Construct some training data.
    List<Assignment> inputAssignments = Lists.newArrayList();
    for (int i = 0; i < 8; i++) {
      double[] values = new double[4];
      values[0] = (i % 2) * 2 - 1;
      values[1] = ((i / 2) % 2) * 2 - 1;
      values[2] = ((i / 4) % 2) * 2 - 1;
      values[3] = 1;
      inputAssignments.add(x.outcomeArrayToAssignment(SparseTensor.vector(0, 4, values)));
    }

    Assignment yf = y.outcomeArrayToAssignment("F");
    Assignment yt = y.outcomeArrayToAssignment("T");

    trainingData = Lists.newArrayList();
    trainingData.add(
        getListVarAssignment(
            Arrays.asList(inputAssignments.get(3), inputAssignments.get(5)),
            Arrays.asList(yt, yt)));
    trainingData.add(
        getListVarAssignment(
            Arrays.asList(inputAssignments.get(0), inputAssignments.get(2)),
            Arrays.asList(yf, yf)));
  }
Ejemplo n.º 3
0
  /** Compute the expected *unnormalized* probability of every rule. */
  public Factor getBinaryRuleExpectations() {
    Tensor binaryRuleWeights = binaryRuleDistribution.coerceToDiscrete().getWeights();
    SparseTensor tensor = SparseTensor.copyRemovingZeros(binaryRuleWeights, binaryRuleExpectations);

    return new TableFactor(binaryRuleDistribution.getVars(), tensor);
  }
Ejemplo n.º 4
0
public class TableAssignment {

  private final VariableNumMap vars;
  private final Tensor indicators;

  public static final TableAssignment SATISFIABLE =
      new TableAssignment(VariableNumMap.EMPTY, SparseTensor.getScalarConstant(1.0));
  public static final TableAssignment UNSATISFIABLE =
      new TableAssignment(VariableNumMap.EMPTY, SparseTensor.getScalarConstant(0.0));

  public TableAssignment(VariableNumMap vars, Tensor indicators) {
    this.vars = Preconditions.checkNotNull(vars);
    this.indicators = Preconditions.checkNotNull(indicators);

    Preconditions.checkArgument(vars.getDiscreteVariables().size() == vars.size());
    Preconditions.checkArgument(
        Arrays.equals(vars.getVariableNumsArray(), indicators.getDimensionNumbers()));
  }

  public static TableAssignment fromDelimitedLines(VariableNumMap vars, Iterable<String> lines) {
    Preconditions.checkArgument(vars.getDiscreteVariables().size() == vars.size());
    CsvParser parser = CsvParser.defaultParser();
    SparseTensorBuilder builder =
        new SparseTensorBuilder(vars.getVariableNumsArray(), vars.getVariableSizes());
    for (String line : lines) {
      String[] parts = parser.parseLine(line);
      Assignment assignment = vars.outcomeToAssignment(parts);
      builder.put(vars.assignmentToIntArray(assignment), 1.0);
    }
    return new TableAssignment(vars, builder.build());
  }

  public VariableNumMap getVariables() {
    return vars;
  }

  public Tensor getIndicators() {
    return indicators;
  }

  /**
   * Gets the tuples which are in this assignment.
   *
   * @return
   */
  public List<List<Object>> getTuples() {
    Iterator<KeyValue> keyValueIter = indicators.keyValueIterator();
    List<List<Object>> tuples = Lists.newArrayList();
    while (keyValueIter.hasNext()) {
      KeyValue keyValue = keyValueIter.next();
      if (keyValue.getValue() != 0.0) {
        tuples.add(vars.intArrayToAssignment(keyValue.getKey()).getValues());
      }
    }
    return tuples;
  }

  public TableAssignment relabelVariables(int[] relabeling) {
    VariableNumMap relabeledVars = vars.relabelVariableNums(relabeling);
    Tensor relabeledTensor = indicators.relabelDimensions(relabeling);

    return new TableAssignment(relabeledVars, relabeledTensor);
  }

  @Override
  public String toString() {
    Iterator<KeyValue> keyValueIter = indicators.keyValueIterator();
    StringBuilder sb = new StringBuilder();
    while (keyValueIter.hasNext()) {
      KeyValue keyValue = keyValueIter.next();
      if (keyValue.getValue() != 0.0) {
        sb.append(vars.intArrayToAssignment(keyValue.getKey()).getValues());
      }
    }
    return sb.toString();
  }
}