Esempio n. 1
0
  @Override
  public void debug() {
    System.err.println("###################");
    System.err.printf(
        "hypothesis [class=%s,id=%d,pos=%d,expired=%s,pending=%d]: %s\n",
        getClass(),
        System.identityHashCode(this),
        featurizable.targetPosition,
        hasExpired,
        pendingPhrases.size(),
        this);
    System.err.printf(
        "parent hypothesis [class=%s,id=%d,pos=%d,expired=%s]: %s\n",
        preceedingDerivation.getClass(),
        System.identityHashCode(preceedingDerivation),
        preceedingDerivation.featurizable.targetPosition,
        preceedingDerivation.hasExpired(),
        preceedingDerivation);
    System.err.println("pendingPhrasesCost: " + pendingPhrasesCost);

    DTUHypothesis<TK, FV> hyp = this;
    if (hyp.isDone() != hyp.featurizable.done) {
      System.err.println("Error in AbstractBeamInferer with: " + hyp);
      System.err.println("isDone(): " + hyp.isDone());
      System.err.println("pending phrases: " + hyp.pendingPhrases.size());
      System.err.println("f.done: " + hyp.featurizable.done);
      Derivation<TK, FV> curHyp = hyp;
      while (curHyp != null) {
        System.err.println("  " + curHyp.toString());
        curHyp = curHyp.preceedingDerivation;
      }
      throw new RuntimeException();
    }
  }
Esempio n. 2
0
  /** Constructor used for 1st segment of a discontinuous phrase. */
  public DTUHypothesis(
      int sourceInputId,
      ConcreteRule<TK, FV> translationOpt,
      int insertionPosition,
      Derivation<TK, FV> baseHyp,
      CombinedFeaturizer<TK, FV> featurizer,
      Scorer<FV> scorer,
      SearchHeuristic<TK, FV> heuristic) {

    super(
        sourceInputId,
        translationOpt,
        translationOpt.abstractRule,
        insertionPosition,
        baseHyp,
        featurizer,
        scorer,
        heuristic, /*
                    * targetPhrase=
                    */
        getSegment(translationOpt.abstractRule, 0),
        /* hasPendingPhrases= */ hasPendingPhrases(translationOpt, baseHyp, true, false),
        /* segmentIdx= */ 0);

    // Copy old pending phrases from parent hypothesis:
    this.pendingPhrases = new TreeSet<PendingPhrase<TK, FV>>();
    if (baseHyp instanceof DTUHypothesis) {
      Set<PendingPhrase<TK, FV>> oldPhrases = ((DTUHypothesis<TK, FV>) baseHyp).pendingPhrases;
      for (PendingPhrase<TK, FV> oldPhrase : oldPhrases) {
        this.pendingPhrases.add(new PendingPhrase<TK, FV>(oldPhrase));
        int lastPosition = oldPhrase.lastPosition;
        if (lastPosition < this.length) this.hasExpired = true;
      }
    }

    // First segment of a discontinuous phrase has both source and target:
    this.segmentIdx = 0;

    // If parent hypothesis has expired, so does the current:
    if (baseHyp.hasExpired()) this.hasExpired = true;

    // Add new pending phrases:
    // assert (MAX_TARGET_PHRASE_SPAN >= 0);
    if (translationOpt.abstractRule instanceof DTURule) {
      PendingPhrase<TK, FV> newPhrase =
          new PendingPhrase<TK, FV>(
              translationOpt,
              sourceInputId,
              this,
              featurizer,
              scorer,
              0,
              this.length + MIN_GAP_SIZE,
              this.length + MAX_TARGET_PHRASE_SPAN);
      pendingPhrases.add(newPhrase);
    }

    // Too many pending phrases?:
    if (pendingPhrases.size() > MAX_PENDING_PHRASES) this.hasExpired = true;

    // Estimate future cost for pending phrases:
    pendingPhrasesCost = costPendingPhrases();
    checkExpiration();
  }