private String getToolName(byte[] data) {
   String text = new String(data);
   String name = null;
   Tokenizer tok = new Tokenizer();
   Program pgm = tok.tokenize(text);
   int[] code = pgm.getCode();
   Symbol[] symbolTable = pgm.getSymbolTable();
   for (int i = 0; i < code.length; i++) {
     int token = code[i] & MacroConstants.TOK_MASK;
     if (token == MacroConstants.MACRO) {
       int nextToken = code[i + 1] & MacroConstants.TOK_MASK;
       if (nextToken == MacroConstants.STRING_CONSTANT) {
         int address = code[i + 1] >> MacroConstants.TOK_SHIFT;
         Symbol symbol = symbolTable[address];
         name = symbol.str;
         break;
       }
     }
   }
   if (name == null) return null;
   int index = name.indexOf("Tool");
   if (index == -1) return null;
   name = name.substring(0, index + 4);
   name = name.replaceAll(" ", "_");
   name = name + ".ijm";
   return name;
 }
Пример #2
0
 /**
  * For a string of 1 or more surface words, return all of the lexical entries for each word as a
  * list of sign hashes. Tokenization is performed using the configured tokenizer.
  *
  * @param w the words in string format
  * @return a list of sign hashes
  * @exception LexException thrown if word not found
  */
 public List<SignHash> getEntriesFromWords(String s) throws LexException {
   List<SignHash> entries = new ArrayList<SignHash>();
   List<Word> words = tokenizer.tokenize(s);
   for (Iterator<Word> it = words.iterator(); it.hasNext(); ) {
     Word w = it.next();
     SignHash signs = getSignsFromWord(w);
     if (signs.size() == 0) {
       throw new LexException("Word not in lexicon: \"" + w + "\"");
     }
     entries.add(signs);
   }
   return entries;
 }