Exemplo n.º 1
2
  /**
   * Load the directed graph from the given file
   *
   * @param fileName the file to load the cart from
   * @param featDefinition the feature definition
   * @param dummy unused, just here for compatibility with the FeatureFileIndexer.
   * @throws IOException , {@link MaryConfigurationException} if a problem occurs while loading
   */
  public DirectedGraph load(InputStream inStream) throws IOException, MaryConfigurationException {
    BufferedInputStream buffInStream = new BufferedInputStream(inStream);
    assert buffInStream.markSupported();
    buffInStream.mark(10000);
    // open the CART-File and read the header
    DataInput raf = new DataInputStream(buffInStream);

    MaryHeader maryHeader = new MaryHeader(raf);
    if (!maryHeader.hasCurrentVersion()) {
      throw new IOException("Wrong version of database file");
    }
    if (maryHeader.getType() != MaryHeader.DIRECTED_GRAPH) {
      if (maryHeader.getType() == MaryHeader.CARTS) {
        buffInStream.reset();
        return new MaryCARTReader().loadFromStream(buffInStream);
      } else {
        throw new IOException("Not a directed graph file");
      }
    }

    // Read properties
    short propDataLength = raf.readShort();
    Properties props;
    if (propDataLength == 0) {
      props = null;
    } else {
      byte[] propsData = new byte[propDataLength];
      raf.readFully(propsData);
      ByteArrayInputStream bais = new ByteArrayInputStream(propsData);
      props = new Properties();
      props.load(bais);
      bais.close();
    }

    // Read the feature definition
    FeatureDefinition featureDefinition = new FeatureDefinition(raf);

    // read the decision nodes
    int numDecNodes = raf.readInt(); // number of decision nodes

    // First we need to read all nodes into memory, then we can link them properly
    // in terms of parent/child.
    DecisionNode[] dns = new DecisionNode[numDecNodes];
    int[][] childIndexes = new int[numDecNodes][];
    for (int i = 0; i < numDecNodes; i++) {
      // read one decision node
      int featureNameIndex = raf.readInt();
      int nodeTypeNr = raf.readInt();
      DecisionNode.Type nodeType = DecisionNode.Type.values()[nodeTypeNr];
      int numChildren = 2; // for binary nodes
      switch (nodeType) {
        case BinaryByteDecisionNode:
          int criterion = raf.readInt();
          dns[i] =
              new DecisionNode.BinaryByteDecisionNode(
                  featureNameIndex, (byte) criterion, featureDefinition);
          break;
        case BinaryShortDecisionNode:
          criterion = raf.readInt();
          dns[i] =
              new DecisionNode.BinaryShortDecisionNode(
                  featureNameIndex, (short) criterion, featureDefinition);
          break;
        case BinaryFloatDecisionNode:
          float floatCriterion = raf.readFloat();
          dns[i] =
              new DecisionNode.BinaryFloatDecisionNode(
                  featureNameIndex, floatCriterion, featureDefinition);
          break;
        case ByteDecisionNode:
          numChildren = raf.readInt();
          if (featureDefinition.getNumberOfValues(featureNameIndex) != numChildren) {
            throw new IOException(
                "Inconsistent cart file: feature "
                    + featureDefinition.getFeatureName(featureNameIndex)
                    + " should have "
                    + featureDefinition.getNumberOfValues(featureNameIndex)
                    + " values, but decision node "
                    + i
                    + " has only "
                    + numChildren
                    + " child nodes");
          }
          dns[i] =
              new DecisionNode.ByteDecisionNode(featureNameIndex, numChildren, featureDefinition);
          break;
        case ShortDecisionNode:
          numChildren = raf.readInt();
          if (featureDefinition.getNumberOfValues(featureNameIndex) != numChildren) {
            throw new IOException(
                "Inconsistent cart file: feature "
                    + featureDefinition.getFeatureName(featureNameIndex)
                    + " should have "
                    + featureDefinition.getNumberOfValues(featureNameIndex)
                    + " values, but decision node "
                    + i
                    + " has only "
                    + numChildren
                    + " child nodes");
          }
          dns[i] =
              new DecisionNode.ShortDecisionNode(featureNameIndex, numChildren, featureDefinition);
      }
      dns[i].setUniqueDecisionNodeId(i + 1);
      // now read the children, indexes only:
      childIndexes[i] = new int[numChildren];
      for (int k = 0; k < numChildren; k++) {
        childIndexes[i][k] = raf.readInt();
      }
    }

    // read the leaves
    int numLeafNodes = raf.readInt(); // number of leaves, it does not include empty leaves
    LeafNode[] lns = new LeafNode[numLeafNodes];

    for (int j = 0; j < numLeafNodes; j++) {
      // read one leaf node
      int leafTypeNr = raf.readInt();
      LeafNode.LeafType leafNodeType = LeafNode.LeafType.values()[leafTypeNr];
      switch (leafNodeType) {
        case IntArrayLeafNode:
          int numData = raf.readInt();
          int[] data = new int[numData];
          for (int d = 0; d < numData; d++) {
            data[d] = raf.readInt();
          }
          lns[j] = new LeafNode.IntArrayLeafNode(data);
          break;
        case FloatLeafNode:
          float stddev = raf.readFloat();
          float mean = raf.readFloat();
          lns[j] = new LeafNode.FloatLeafNode(new float[] {stddev, mean});
          break;
        case IntAndFloatArrayLeafNode:
        case StringAndFloatLeafNode:
          int numPairs = raf.readInt();
          int[] ints = new int[numPairs];
          float[] floats = new float[numPairs];
          for (int d = 0; d < numPairs; d++) {
            ints[d] = raf.readInt();
            floats[d] = raf.readFloat();
          }
          if (leafNodeType == LeafNode.LeafType.IntAndFloatArrayLeafNode)
            lns[j] = new LeafNode.IntAndFloatArrayLeafNode(ints, floats);
          else lns[j] = new LeafNode.StringAndFloatLeafNode(ints, floats);
          break;
        case FeatureVectorLeafNode:
          throw new IllegalArgumentException(
              "Reading feature vector leaf nodes is not yet implemented");
        case PdfLeafNode:
          throw new IllegalArgumentException("Reading pdf leaf nodes is not yet implemented");
      }
      lns[j].setUniqueLeafId(j + 1);
    }

    // Graph nodes
    int numDirectedGraphNodes = raf.readInt();
    DirectedGraphNode[] graphNodes = new DirectedGraphNode[numDirectedGraphNodes];
    int[] dgnLeafIndices = new int[numDirectedGraphNodes];
    int[] dgnDecIndices = new int[numDirectedGraphNodes];
    for (int g = 0; g < numDirectedGraphNodes; g++) {
      graphNodes[g] = new DirectedGraphNode(null, null);
      graphNodes[g].setUniqueGraphNodeID(g + 1);
      dgnLeafIndices[g] = raf.readInt();
      dgnDecIndices[g] = raf.readInt();
    }

    // Now, link up the decision nodes with their daughters
    for (int i = 0; i < numDecNodes; i++) {
      // System.out.print(dns[i]+" "+dns[i].getFeatureName()+" ");
      for (int k = 0; k < childIndexes[i].length; k++) {
        Node child = childIndexToNode(childIndexes[i][k], dns, lns, graphNodes);
        dns[i].addDaughter(child);
        // System.out.print(" "+dns[i].getDaughter(k));
      }
      // System.out.println();
    }
    // And link up directed graph nodes
    for (int g = 0; g < numDirectedGraphNodes; g++) {
      Node leaf = childIndexToNode(dgnLeafIndices[g], dns, lns, graphNodes);
      graphNodes[g].setLeafNode(leaf);
      Node dec = childIndexToNode(dgnDecIndices[g], dns, lns, graphNodes);
      if (dec != null && !dec.isDecisionNode())
        throw new IllegalArgumentException("Only decision nodes allowed, read " + dec.getClass());
      graphNodes[g].setDecisionNode((DecisionNode) dec);
      // System.out.println("Graph node "+(g+1)+", leaf: "+Integer.toHexString(dgnLeafIndices[g])+",
      // "+leaf+" -- dec: "+Integer.toHexString(dgnDecIndices[g])+", "+dec);
    }

    Node rootNode;
    if (graphNodes.length > 0) {
      rootNode = graphNodes[0];
    } else if (dns.length > 0) {
      rootNode = dns[0];
      // CART behaviour, not sure if this is needed:
      // Now count all data once, so that getNumberOfData()
      // will return the correct figure.
      ((DecisionNode) rootNode).countData();
    } else if (lns.length > 0) {
      rootNode = lns[0]; // single-leaf tree...
    } else {
      rootNode = null;
    }

    // set the rootNode as the rootNode of cart
    return new DirectedGraph(rootNode, featureDefinition, props);
  }
  @Override
  protected void loadFromByteBuffer(String fileName)
      throws IOException, MaryConfigurationException {
    /* Open the file */
    FileInputStream fis = new FileInputStream(fileName);
    FileChannel fc = fis.getChannel();
    ByteBuffer bb = fc.map(FileChannel.MapMode.READ_ONLY, 0, fc.size());
    fis.close();

    /* Load the Mary header */
    hdr = new MaryHeader(bb);
    if (hdr.getType() != MaryHeader.HALFPHONE_UNITFEATS) {
      throw new MaryConfigurationException(
          "File [" + fileName + "] is not a valid Mary Halfphone Features file.");
    }
    leftWeights = new FeatureDefinition(bb);
    rightWeights = new FeatureDefinition(bb);
    assert leftWeights.featureEquals(rightWeights)
        : "Halfphone unit feature file contains incompatible feature definitions for left and right units -- this should not happen!";
    featureDefinition = leftWeights; // one of them, for super class
    int numberOfUnits = bb.getInt();
    featureVectors = new FeatureVector[numberOfUnits];
    for (int i = 0; i < numberOfUnits; i++) {
      featureVectors[i] = featureDefinition.readFeatureVector(i, bb);
    }
  }
Exemplo n.º 3
0
 /**
  * Get the theoretical number of leaves, given a feature sequence.
  *
  * @return The number of leaves, or -1 if the capacity of the long integer was blown.
  */
 public long getTheoreticalNumberOfLeaves(int[] feaSeq) {
   long ret = 1;
   for (int i = 0; i < feaSeq.length; i++) {
     //          System.out.println( "Feature [" + i + "] has [" +
     // featureDefinition.getNumberOfValues( featureSequence[i] ) + "] values."
     //          + "(Number of leaves = [" + ret + "].)" );
     ret *= featureDefinition.getNumberOfValues(feaSeq[i]);
     if (ret < 0) return (-1);
   }
   return (ret);
 }
Exemplo n.º 4
0
 public TrainedLTS(AllophoneSet aPhonSet, CART predictionTree) {
   this.allophoneSet = aPhonSet;
   this.tree = predictionTree;
   this.featureDefinition = tree.getFeatureDefinition();
   this.indexPredictedFeature = featureDefinition.getFeatureIndex(PREDICTED_STRING_FEATURENAME);
   Properties props = tree.getProperties();
   if (props == null)
     throw new IllegalArgumentException("Prediction tree does not contain properties");
   convertToLowercase = Boolean.parseBoolean(props.getProperty("lowercase"));
   context = Integer.parseInt(props.getProperty("context"));
 }
Exemplo n.º 5
0
 /**
  * Convenience method to load tree from an inputstream
  *
  * @param treeStream
  * @throws IOException
  */
 public void loadTree(InputStream treeStream) throws IOException, MaryConfigurationException {
   MaryCARTReader cartReader = new MaryCARTReader();
   this.tree = cartReader.loadFromStream(treeStream);
   this.featureDefinition = tree.getFeatureDefinition();
   this.indexPredictedFeature = featureDefinition.getFeatureIndex(PREDICTED_STRING_FEATURENAME);
   this.convertToLowercase = false;
   Properties props = tree.getProperties();
   if (props == null)
     throw new IllegalArgumentException("Prediction tree does not contain properties");
   convertToLowercase = Boolean.parseBoolean(props.getProperty("lowercase"));
   context = Integer.parseInt(props.getProperty("context"));
 }
Exemplo n.º 6
0
 private void dumpFeatureVectors() throws IOException {
   // open file
   PrintWriter out = new PrintWriter(new BufferedOutputStream(new FileOutputStream(featFile)));
   for (int i = 0; i < fv.length; i++) {
     // Print the feature string
     out.print(i + " " + featureDefinition.toFeatureString(fv[i]));
     // print a newline if this is not the last vector
     if (i + 1 != fv.length) {
       out.print("\n");
     }
   }
   // dump and close
   out.flush();
   out.close();
 }
Exemplo n.º 7
0
  /**
   * Load SoP data.
   *
   * @throws IOException if data can not be read.
   */
  @Override
  protected void loadData() throws IOException {
    sopModels = new HashMap<String, SoP>();
    String nextLine, nextType;
    String strContext = "";
    Scanner s = null;
    try {
      s = new Scanner(new BufferedReader(new InputStreamReader(dataStream, "UTF-8")));

      // The first part contains the feature definition
      while (s.hasNext()) {
        nextLine = s.nextLine();
        if (nextLine.trim().equals("")) break;
        else strContext += nextLine + "\n";
      }
      // the featureDefinition is the same for vowel, consonant and Pause
      FeatureDefinition sopFeatureDefinition =
          new FeatureDefinition(new BufferedReader(new StringReader(strContext)), false);
      predictionFeatureNames = sopFeatureDefinition.getFeatureNames();

      while (s.hasNext()) {
        nextType = s.nextLine();
        nextLine = s.nextLine();

        if (nextType.startsWith("f0")) {
          sopModels.put("f0", new SoP(nextLine, sopFeatureDefinition));
        } else {
          sopModels.put(nextType, new SoP(nextLine, sopFeatureDefinition));
        }
      }
      s.close();

    } catch (Exception e) {
      throw new IOException("Error reading SoP data", e);
    }
  }
Exemplo n.º 8
0
  /**
   * A local sort at a particular node along the deep sorting operation. This is a recursive
   * function.
   *
   * @param currentFeatureIdx The currently tested feature.
   * @param currentNode The current node, holding the currently processed zone in the array of
   *     feature vectors.
   */
  private void sortNode(int currentFeatureIdx, MaryNode currentNode) {
    /* If we have reached a leaf, do a final sort according to the unit index and return: */
    if (currentFeatureIdx == featureSequence.length) {
      Arrays.sort(featureVectors, currentNode.from, currentNode.to, cui);
      numberOfLeaves++;
      /*System.out.print( "LEAF ! (" + (currentNode.to-currentNode.from) + " units)" );
      for ( int i = currentNode.from; i < currentNode.to; i++ ) {
          System.out.print( " (" + featureVectors[i].getUnitIndex() + " 0)" );
      }
      System.out.println( "" );*/
      return;
    }
    /* Else: */
    int currentFeature = featureSequence[currentFeatureIdx];
    FeatureVector.FeatureType featureType = featureVectors[0].getFeatureType(currentFeature);
    /* Register the feature currently used for the splitting */
    currentNode.setFeatureIndex(currentFeature);
    /* Perform the sorting according to the currently considered feature: */
    /* 1) position the comparator onto the right feature */
    c.setFeatureIdx(currentFeature, featureType);
    /* 2) do the sorting */
    Arrays.sort(featureVectors, currentNode.from, currentNode.to, c);

    /* Then, seek for the zones where the feature value is the same,
     * and launch the next sort level on these. */
    int nVal = featureDefinition.getNumberOfValues(currentFeature);
    currentNode.split(nVal);
    int nextFrom = currentNode.from;
    int nextTo = currentNode.from;
    for (int i = 0; i < nVal; i++) {
      nextFrom = nextTo;
      // System.out.print( "Next node begins at " + nextFrom );
      while ((nextTo < currentNode.to)
          && (featureVectors[nextTo].getFeatureAsInt(currentFeature) == i)) {
        // System.out.print( " " + featureVectors[nextTo].getFeatureAsInt( currentFeature ) );
        nextTo++;
      }
      // System.out.println( " and ends at " + nextTo + " for a total of " + (nextTo-nextFrom) + "
      // units." );
      if ((nextTo - nextFrom) != 0) {
        MaryNode nod = new MaryNode(nextFrom, nextTo);
        currentNode.setChild(i, nod);
        // System.out.print("(" + i + " isByteOf " + currentFeature + ")" );
        sortNode(currentFeatureIdx + 1, nod);
      } else currentNode.setChild(i, null);
    }
  }
 @Override
 protected void loadFromStream(String fileName) throws IOException, MaryConfigurationException {
   /* Open the file */
   DataInputStream dis = null;
   dis = new DataInputStream(new BufferedInputStream(new FileInputStream(fileName)));
   /* Load the Mary header */
   hdr = new MaryHeader(dis);
   if (hdr.getType() != MaryHeader.HALFPHONE_UNITFEATS) {
     throw new IOException("File [" + fileName + "] is not a valid Mary Halfphone Features file.");
   }
   leftWeights = new FeatureDefinition(dis);
   rightWeights = new FeatureDefinition(dis);
   assert leftWeights.featureEquals(rightWeights)
       : "Halfphone unit feature file contains incompatible feature definitions for left and right units -- this should not happen!";
   featureDefinition = leftWeights; // one of them, for super class
   int numberOfUnits = dis.readInt();
   featureVectors = new FeatureVector[numberOfUnits];
   for (int i = 0; i < numberOfUnits; i++) {
     featureVectors[i] = featureDefinition.readFeatureVector(i, dis);
   }
 }
Exemplo n.º 10
0
  /**
   * Export this feature definition in the "all.desc" format which can be read by wagon.
   *
   * @param out the destination of the data
   * @param featuresToIgnore a set of Strings containing the names of features that wagon should
   *     ignore. Can be null.
   */
  private void createDescFile() throws IOException {
    PrintWriter out = new PrintWriter(new FileOutputStream(descFile));
    Set<String> featuresToIgnore = new HashSet<String>();
    featuresToIgnore.add("unit_logf0");
    featuresToIgnore.add("unit_duration");

    int numDiscreteFeatures =
        featureDefinition.getNumberOfByteFeatures() + featureDefinition.getNumberOfShortFeatures();
    out.println("(");
    out.println("(occurid cluster)");
    for (int i = 0, n = featureDefinition.getNumberOfFeatures(); i < n; i++) {
      out.print("( ");
      String featureName = featureDefinition.getFeatureName(i);
      out.print(featureName);
      if (featuresToIgnore != null && featuresToIgnore.contains(featureName)) {
        out.print(" ignore");
      }
      if (i < numDiscreteFeatures) { // list values
        for (int v = 0, vmax = featureDefinition.getNumberOfValues(i); v < vmax; v++) {
          out.print("  ");
          // Print values surrounded by double quotes, and make sure any
          // double quotes in the value are preceded by a backslash --
          // otherwise, we get problems e.g. for sentence_punc
          String val = featureDefinition.getFeatureValueAsString(i, v);
          if (val.indexOf('"') != -1) {
            StringBuilder buf = new StringBuilder();
            for (int c = 0; c < val.length(); c++) {
              char ch = val.charAt(c);
              if (ch == '"') buf.append("\\\"");
              else buf.append(ch);
            }
            val = buf.toString();
          }
          out.print("\"" + val + "\"");
        }
        out.println(" )");
      } else { // float feature
        out.println(" float )");
      }
    }
    out.println(")");
    out.close();
  }
Exemplo n.º 11
0
 /**
  * Launches a deep sort on the array of feature vectors. This is public because it can be used to
  * re-index the previously read feature file.
  *
  * @param featureIdx An array of feature names, indicating the sequence of features according to
  *     which the sorting should be performed.
  */
 public void deepSort(String[] setFeatureSequence) {
   featureSequence = featureDefinition.getFeatureIndexArray(setFeatureSequence);
   numberOfLeaves = 0;
   tree = new MaryNode(0, featureVectors.length);
   sortNode(0, tree);
 }
Exemplo n.º 12
0
  /**
   * Create a utterance model list from feature vectors predicted from elements.
   *
   * @param predictFromElements elements from MaryXML from where to get feature vectors.
   * @return Utterance model um containing state durations and pdfs already searched on the trees to
   *     generate F0.
   * @throws MaryConfigurationException if error searching in HMM trees.
   */
  private HTSUttModel createUttModel(List<Element> predictFromElements)
      throws MaryConfigurationException {
    int i, k, s, t, mstate, frame, durInFrames, durStateInFrames, numVoicedInModel;
    HTSModel m;
    List<Element> predictorElements = predictFromElements;
    List<Target> predictorTargets = getTargets(predictorElements);
    FeatureVector fv;
    HTSUttModel um = new HTSUttModel();
    FeatureDefinition feaDef = htsData.getFeatureDefinition();
    float duration;
    double diffdurOld = 0.0;
    double diffdurNew = 0.0;
    float f0s[] = null;
    try {
      // (1) Predict the values
      for (i = 0; i < predictorTargets.size(); i++) {
        fv = predictorTargets.get(i).getFeatureVector();
        Element e = predictFromElements.get(i);
        um.addUttModel(new HTSModel(cart.getNumStates()));
        m = um.getUttModel(i);
        /* this function also sets the phone name, the phone between - and + */
        m.setPhoneName(fv.getFeatureAsString(feaDef.getFeatureIndex("phone"), feaDef));
        /* Check if context-dependent gv (gv without sil) */
        if (htsData.getUseContextDependentGV()) {
          if (m.getPhoneName().contentEquals("_")) m.setGvSwitch(false);
        }
        /* increment number of models in utterance model */
        um.setNumModel(um.getNumModel() + 1);
        /* update number of states */
        um.setNumState(um.getNumState() + cart.getNumStates());
        // get the duration from the element
        duration = Integer.parseInt(e.getAttribute("d")) * 0.001f; // in sec.
        // distribute the duration (in frames) among the five states, here it is done the same
        // amount for each state
        durInFrames = (int) (duration / fperiodsec);
        durStateInFrames = (int) (durInFrames / cart.getNumStates());
        m.setTotalDur(0); // reset to set new value according to duration
        for (s = 0; s < cart.getNumStates(); s++) {
          m.setDur(s, durStateInFrames);
          m.setTotalDur(m.getTotalDur() + m.getDur(s));
        }
        um.setTotalFrame(um.getTotalFrame() + m.getTotalDur());
        System.out.format(
            "createUttModel: duration=%.3f sec. durInFrames=%d  durStateInFrames=%d  m.getTotalDur()=%d\n",
            duration, durInFrames, durStateInFrames, m.getTotalDur());

        /* Find pdf for LF0, this function sets the pdf for each state.
         * and determines, according to the HMM models, whether the states are voiced or unvoiced, (it can be possible that some states are voiced
         * and some unvoiced).*/
        cart.searchLf0InCartTree(m, fv, feaDef, htsData.getUV());
        for (mstate = 0; mstate < cart.getNumStates(); mstate++) {
          for (frame = 0; frame < m.getDur(mstate); frame++)
            if (m.getVoiced(mstate)) um.setLf0Frame(um.getLf0Frame() + 1);
        }
      }
      return um;
    } catch (Exception e) {
      throw new MaryConfigurationException(
          "Error searching in tree when creating utterance model. ", e);
    }
  }
Exemplo n.º 13
0
  /**
   * Predict durations and state durations from predictFromElements and apply durations to
   * applyToElements. A utterance model is created that contains the predicted state durations.
   *
   * @param predictFromElements elements to predict from
   * @param applyToElements elements to apply predicted durations
   * @return HTSUttModel a utterance model
   * @throws MaryConfigurationException if error searching in HMM trees.
   */
  private HTSUttModel predictAndSetDuration(
      List<Element> predictFromElements, List<Element> applyToElements)
      throws MaryConfigurationException {
    List<Element> predictorElements = predictFromElements;
    List<Target> predictorTargets = getTargets(predictorElements);
    FeatureVector fv = null;
    HTSUttModel um = new HTSUttModel();
    FeatureDefinition feaDef = htsData.getFeatureDefinition();
    double diffdurOld = 0.0;
    double diffdurNew = 0.0;
    String durAttributeName = "d";
    try {
      // (1) Predict the values
      for (int i = 0; i < predictorTargets.size(); i++) {
        fv = predictorTargets.get(i).getFeatureVector();
        um.addUttModel(new HTSModel(cart.getNumStates()));
        HTSModel m = um.getUttModel(i);
        /* this function also sets the phone name, the phone between - and + */
        m.setPhoneName(fv.getFeatureAsString(feaDef.getFeatureIndex("phone"), feaDef));

        /* Check if context-dependent gv (gv without sil) */
        if (htsData.getUseContextDependentGV()) {
          if (m.getPhoneName().contentEquals("_")) m.setGvSwitch(false);
        }
        /* increment number of models in utterance model */
        um.setNumModel(um.getNumModel() + 1);
        /* update number of states */
        um.setNumState(um.getNumState() + cart.getNumStates());

        // Estimate state duration from state duration model (Gaussian)
        diffdurNew = cart.searchDurInCartTree(m, fv, htsData, diffdurOld);
        diffdurOld = diffdurNew;
        double duration = m.getTotalDur() * fperiodsec; // in seconds

        um.setTotalFrame(um.getTotalFrame() + m.getTotalDur());
        // System.out.format("HMMModel: phone=%s  duration=%.3f sec. m.getTotalDur()=%d\n",
        // m.getPhoneName(), duration, m.getTotalDur());

        /* Find pdf for LF0, this function sets the pdf for each state.
         * and determines, according to the HMM models, whether the states are voiced or unvoiced, (it can be possible that some states are voiced
         * and some unvoiced).*/
        cart.searchLf0InCartTree(m, fv, feaDef, htsData.getUV());
        for (int mstate = 0; mstate < cart.getNumStates(); mstate++) {
          for (int frame = 0; frame < m.getDur(mstate); frame++)
            if (m.getVoiced(mstate)) um.setLf0Frame(um.getLf0Frame() + 1);
        }

        // set the value in elements
        Element element = applyToElements.get(i);
        // "evaluate" pseudo XPath syntax:
        // TODO this needs to be extended to take into account targetAttributeNames like "foo/@bar",
        // which would add the
        // bar attribute to the foo child of this element, creating the child if not already
        // present...
        if (durAttributeName.startsWith("@")) {
          durAttributeName = durAttributeName.replaceFirst("@", "");
        }
        String formattedTargetValue = String.format(targetAttributeFormat, duration);

        // System.out.println("HMMModel: formattedTargetValue = " + formattedTargetValue);

        // if the attribute already exists for this element, append targetValue:
        if (element.hasAttribute(durAttributeName)) {
          formattedTargetValue =
              element.getAttribute(durAttributeName) + " " + formattedTargetValue;
        }

        // set the new attribute value:
        element.setAttribute(durAttributeName, formattedTargetValue);
      }
      return um;
    } catch (Exception e) {
      throw new MaryConfigurationException("Error searching in tree when predicting duration. ", e);
    }
  }