public static void main(String[] args) throws Exception {
   if (args.length != 1) {
     System.out.println("USAGE: StanfordParser <inputSentencesFile>");
     System.out.println("Output stored in: <inputSentencesFile>.parses");
     System.exit(0);
   }
   StanfordParser.initialize();
   List<String> sentences = new ArrayList<String>();
   BufferedReader in = new BufferedReader(new FileReader(args[0]));
   BufferedWriter out = new BufferedWriter(new FileWriter(args[0] + ".parses"));
   String sentence;
   while ((sentence = in.readLine()) != null) {
     sentences.add(sentence);
   }
   for (String s : sentences) {
     out.append(StanfordParser.parse(s) + "\n");
   }
   out.close();
   in.close();
 }
Exemple #2
0
  /**
   * Creates a new instance of Ephyra and initializes the system.
   *
   * <p>For use as an API.
   *
   * @param dir directory of Ephyra
   */
  public OpenEphyra(String dir) {
    this.dir = dir;

    MsgPrinter.printInitializing();

    // create tokenizer
    MsgPrinter.printStatusMsg("Creating tokenizer...");
    if (!OpenNLP.createTokenizer(dir + "res/nlp/tokenizer/opennlp/EnglishTok.bin.gz"))
      MsgPrinter.printErrorMsg("Could not create tokenizer.");
    //		LingPipe.createTokenizer();

    // create sentence detector
    MsgPrinter.printStatusMsg("Creating sentence detector...");
    if (!OpenNLP.createSentenceDetector(dir + "res/nlp/sentencedetector/opennlp/EnglishSD.bin.gz"))
      MsgPrinter.printErrorMsg("Could not create sentence detector.");
    LingPipe.createSentenceDetector();

    // create stemmer
    MsgPrinter.printStatusMsg("Creating stemmer...");
    SnowballStemmer.create();

    // create part of speech tagger
    MsgPrinter.printStatusMsg("Creating POS tagger...");
    if (!OpenNLP.createPosTagger(
        dir + "res/nlp/postagger/opennlp/tag.bin.gz", dir + "res/nlp/postagger/opennlp/tagdict"))
      MsgPrinter.printErrorMsg("Could not create OpenNLP POS tagger.");
    //		if (!StanfordPosTagger.init(dir + "res/nlp/postagger/stanford/" +
    //				"wsj3t0-18-bidirectional/train-wsj-0-18.holder"))
    //			MsgPrinter.printErrorMsg("Could not create Stanford POS tagger.");

    // create chunker
    MsgPrinter.printStatusMsg("Creating chunker...");
    if (!OpenNLP.createChunker(dir + "res/nlp/phrasechunker/opennlp/EnglishChunk.bin.gz"))
      MsgPrinter.printErrorMsg("Could not create chunker.");

    // create syntactic parser
    MsgPrinter.printStatusMsg("Creating syntactic parser...");
    //		if (!OpenNLP.createParser(dir + "res/nlp/syntacticparser/opennlp/"))
    //			MsgPrinter.printErrorMsg("Could not create OpenNLP parser.");
    try {
      StanfordParser.initialize();
    } catch (Exception e) {
      MsgPrinter.printErrorMsg("Could not create Stanford parser.");
    }

    // create named entity taggers
    MsgPrinter.printStatusMsg("Creating NE taggers...");
    NETagger.loadListTaggers(dir + "res/nlp/netagger/lists/");
    NETagger.loadRegExTaggers(dir + "res/nlp/netagger/patterns.lst");
    MsgPrinter.printStatusMsg("  ...loading models");
    //		if (!NETagger.loadNameFinders(dir + "res/nlp/netagger/opennlp/"))
    //			MsgPrinter.printErrorMsg("Could not create OpenNLP NE tagger.");
    if (!StanfordNeTagger.isInitialized() && !StanfordNeTagger.init())
      MsgPrinter.printErrorMsg("Could not create Stanford NE tagger.");
    MsgPrinter.printStatusMsg("  ...done");

    // create linker
    //		MsgPrinter.printStatusMsg("Creating linker...");
    //		if (!OpenNLP.createLinker(dir + "res/nlp/corefresolver/opennlp/"))
    //			MsgPrinter.printErrorMsg("Could not create linker.");

    // create WordNet dictionary
    MsgPrinter.printStatusMsg("Creating WordNet dictionary...");
    if (!WordNet.initialize(dir + "res/ontologies/wordnet/file_properties.xml"))
      MsgPrinter.printErrorMsg("Could not create WordNet dictionary.");

    // load function words (numbers are excluded)
    MsgPrinter.printStatusMsg("Loading function verbs...");
    if (!FunctionWords.loadIndex(dir + "res/indices/functionwords_nonumbers"))
      MsgPrinter.printErrorMsg("Could not load function words.");

    // load prepositions
    MsgPrinter.printStatusMsg("Loading prepositions...");
    if (!Prepositions.loadIndex(dir + "res/indices/prepositions"))
      MsgPrinter.printErrorMsg("Could not load prepositions.");

    // load irregular verbs
    MsgPrinter.printStatusMsg("Loading irregular verbs...");
    if (!IrregularVerbs.loadVerbs(dir + "res/indices/irregularverbs"))
      MsgPrinter.printErrorMsg("Could not load irregular verbs.");

    // load word frequencies
    MsgPrinter.printStatusMsg("Loading word frequencies...");
    if (!WordFrequencies.loadIndex(dir + "res/indices/wordfrequencies"))
      MsgPrinter.printErrorMsg("Could not load word frequencies.");

    // load query reformulators
    MsgPrinter.printStatusMsg("Loading query reformulators...");
    if (!QuestionReformulationG.loadReformulators(dir + "res/reformulations/"))
      MsgPrinter.printErrorMsg("Could not load query reformulators.");

    // load answer types
    //		MsgPrinter.printStatusMsg("Loading answer types...");
    //		if (!AnswerTypeTester.loadAnswerTypes(dir +
    //				"res/answertypes/patterns/answertypepatterns"))
    //			MsgPrinter.printErrorMsg("Could not load answer types.");

    // load question patterns
    MsgPrinter.printStatusMsg("Loading question patterns...");
    if (!QuestionInterpreter.loadPatterns(dir + "res/patternlearning/questionpatterns/"))
      MsgPrinter.printErrorMsg("Could not load question patterns.");

    // load answer patterns
    MsgPrinter.printStatusMsg("Loading answer patterns...");
    if (!AnswerPatternFilter.loadPatterns(dir + "res/patternlearning/answerpatterns/"))
      MsgPrinter.printErrorMsg("Could not load answer patterns.");
  }