@Override
  public AugmentedStrongs augment(
      final String version, final String reference, final String[] keys) {
    final Map<String, String> augmentedStrongs = new HashMap<>((keys.length + 4) * 2);
    if (StringUtils.isBlank(version) || StringUtils.isBlank(reference)) {
      // won't be able to resolve so just return the keys as is
      for (String k : keys) {
        augmentedStrongs.put(k, k);
      }
      return new AugmentedStrongs(keys, new EntityDoc[0]);
    }

    // for each key, we see if there is an augment strong number
    final StringBuilder query = new StringBuilder(keys.length * 10 + 16);
    query.append("(");
    for (int i = 0; i < keys.length; i++) {
      // if Hebrew and not augmented
      if (isNonAugmentedHebrew(keys[i])) {
        // then we're looking at Hebrew, so look up the augmentedStrongs data
        // and we're looking for the first of any strong number
        // build the lucene query...
        query.append(StringConversionUtils.getStrongPaddedKey(keys[i]));
        query.append("? ");
      } else {
        // add directly to the augmented list
        augmentedStrongs.put(keys[i], keys[i]);
      }
    }

    final EntityDoc[] docs;
    if (query.length() > 1) {

      // add the reference in the query. We may have several due to versifications mapping, so we're
      // going to look for documents where at least 1 of the verses is in the doc
      query.append(") AND (");
      String[] individualVerses =
          StringUtils.split(
              this.versificationService
                  .convertReference(reference, version, JSwordPassageService.OT_BOOK)
                  .getKey()
                  .getOsisID());
      // a single chapter can be optimized, also JSword returns the key as Gen.1 rather than
      // expanded
      boolean queryAppended = false;
      if (individualVerses.length == 1) {
        int countSeparators = 0;
        for (int ii = 0; ii < individualVerses[0].length() && countSeparators < 2; ii++) {
          if (individualVerses[0].charAt(ii) == '.') {
            countSeparators++;
          }
        }
        if (countSeparators < 2) {
          query.append("references:");
          query.append(individualVerses[0]);
          query.append(".*");
          query.append(' ');
          queryAppended = true;
        }
      }

      if (!queryAppended) {
        for (String v : individualVerses) {
          query.append("references:");
          query.append(v);
          query.append(' ');
        }
      }
      query.append(")");

      // run the query for the hebrew words and add them to the list
      docs = this.augmentedStrongs.search("augmentedStrong", query.toString());
      for (EntityDoc d : docs) {
        final String augmentedStrong = d.get("augmentedStrong");
        augmentedStrongs.put(
            augmentedStrong.substring(0, augmentedStrong.length() - 1).toLowerCase(),
            augmentedStrong);
      }

      // now we need to work out which strongs were not augmented and add them to the list
      // check which strongs didn't make it
      for (String k : keys) {
        final String keyingStrong = StringConversionUtils.getStrongPaddedKey(k).toLowerCase();
        if (!augmentedStrongs.containsKey(keyingStrong)) {
          augmentedStrongs.put(keyingStrong, k);
        }
      }
    } else {
      docs = new EntityDoc[0];
    }
    final String[] augmented = new String[augmentedStrongs.size()];
    return new AugmentedStrongs(augmentedStrongs.values().toArray(augmented), docs);
  }
 @Override
 public AugmentedStrongs augment(
     final String version, final String verseRef, final String unAugmentedStrongNumbers) {
   return augment(version, verseRef, StringUtils.split(unAugmentedStrongNumbers));
 }