コード例 #1
0
  public void testSigSnameFragment() throws Exception {
    Map<String, String> mapFqn = new HashMap<String, String>();
    mapFqn.put("extractSig", "1");
    mapFqn.put("shortNamesOnly", "1");

    fqnFactory.init(mapFqn);

    // note .. there is no whitespace here
    is =
        new WhitespaceTokenizer(
            new StringReader(
                "org.wonderly.ham.speech.DB23Announcements.connected(_UNRESOLVED_.Parameters<?java.util.List<java.lang.String>>,java.lang.String)"));

    FqnFilter fqnf = fqnFactory.create(is);
    RemoveSigOrderFilterFactory rsoff = new RemoveSigOrderFilterFactory();
    DelimiterFilter rsof = rsoff.create(fqnf);
    FragmentFilterFactory fff = new FragmentFilterFactory();
    FragmentFilter ff = fff.create(rsof);

    final Token reusableToken = new Token();
    Token nextToken = ff.next(reusableToken);
    assertEquals("Parameters", nextToken.term());
    nextToken = ff.next(reusableToken);
    assertEquals("String", nextToken.term());
    assertNull(ff.next(reusableToken));
  }
コード例 #2
0
  public void testFqnShortName3() throws Exception {
    Map<String, String> mapFqn = new HashMap<String, String>();
    mapFqn.put("extractSig", "0");
    mapFqn.put("shortNamesOnly", "1");

    fqnFactory.init(mapFqn);

    // note .. there is no whitespace here
    is = new WhitespaceTokenizer(new StringReader("connected"));

    FqnFilter tokenizer = fqnFactory.create(is);
    final Token reusableToken = new Token();
    Token nextToken = tokenizer.next(reusableToken);
    assertEquals("connected", nextToken.term());
    assertNull(tokenizer.next(reusableToken));
  }
コード例 #3
0
  public void testFqnShortName() throws Exception {
    Map<String, String> mapFqn = new HashMap<String, String>();
    mapFqn.put("extractSig", "0");
    mapFqn.put("shortNamesOnly", "1");

    fqnFactory.init(mapFqn);

    // note .. there is no whitespace here
    is =
        new WhitespaceTokenizer(
            new StringReader(
                "org.wonderly.ham.speech.DB23Announcements.connected(_UNRESOLVED_.Parameters<?java.util.List<java.lang.String>>,java.lang.String)"));

    FqnFilter tokenizer = fqnFactory.create(is);
    final Token reusableToken = new Token();
    Token nextToken = tokenizer.next(reusableToken);
    assertEquals("connected", nextToken.term());
    assertNull(tokenizer.next(reusableToken));
  }
コード例 #4
0
  public void testMethodSname() throws Exception {
    Map<String, String> mapFqn = new HashMap<String, String>();
    mapFqn.put("extractSig", "0");
    mapFqn.put("shortNamesOnly", "1");

    fqnFactory.init(mapFqn);

    // note .. there is no whitespace here
    is =
        new WhitespaceTokenizer(
            new StringReader("org.wonderly.ham.speech.DB23Announcements.connected()"));

    FqnFilter ff = fqnFactory.create(is);
    //		RemoveSigOrderFilterFactory rsoff = new RemoveSigOrderFilterFactory();
    //		DelimiterFilter rsof = rsoff.create(fqnf);
    //		FragmentFilterFactory fff = new FragmentFilterFactory();
    //		FragmentFilter ff = fff.create(rsof);

    final Token reusableToken = new Token();
    Token nextToken = ff.next(reusableToken);
    assertEquals("connected", nextToken.term());
    assertNull(ff.next(reusableToken));
  }
コード例 #5
0
  public void testAngBrackets() throws Exception {
    /*
    <tokenizer
    	class="solr.WhitespaceTokenizerFactory"/>
           <filter class="edu.uci.ics.sourcerer.search.analysis.FqnFilterFactory"
    	extractSig="0" shortNamesOnly="1" />
    <filter class="edu.uci.ics.sourcerer.search.analysis.NonAlphaNumTokenizerFilterFactory"/>
    <filter class="edu.uci.ics.sourcerer.search.analysis.CamelCaseSplitFilterFactory"/>
    <filter class="edu.uci.ics.sourcerer.search.analysis.LetterDigitSplitFilterFactory" preserveOriginal="1"/>
    <filter class="solr.LowerCaseFilterFactory"/>
    */
    String fqns =
        "org.ayutabeans.util.BindingTypes.getBindingTypes()"
            + " com.sun.xml.bind.v2.model.impl.ClassInfoImpl$ConflictException.<init>(java.util.List<java.lang.annotation.Annotation>)"
            + " com.redlenses.net.transport.pickling.reflection.PropertyAccessor.setAnnotations(java.util.List<java.lang.annotation.Annotation>)"
            + " org.cbf.impl.ModuleInfoImpl.getBeanConfigurations(java.lang.String)"
            + " org.cbf.impl.BeanDescriptionImpl.getBeanConfigurations()"
            + " org.cbf.BeanDescription.getBeanConfigurations()"
            + " org.ayutabeans.util.BindingTypes.match(java.util.List<java.lang.annotation.Annotation>)"
            +
            // this fqn was giving empty stack exception
            " org.ayutabeans.component.impl.JavaBeanComponentImpl.<init>"
            + "(org.ayutabeans.container.AyutaContainer,org.ayutabeans.util.BindingTypes<javax.webbeans.BindingType>,"
            + "java.lang.Class<<?+java.lang.annotation.Annotation>>,"
            + "java.lang.Class<<?+java.lang.annotation.Annotation>>,"
            + "java.lang.String,java.lang.Class<<T>>,"
            + "org.ayutabeans.component.ComponentConstructor<<T>>,"
            + "java.util.List<org.ayutabeans.component.impl.InjectFieldImpl>,"
            + "java.util.List<org.ayutabeans.component.impl.InjectMethodImpl>,"
            + "org.ayutabeans.component.impl.LifecycleMethodImpl,"
            + "org.ayutabeans.component.impl.LifecycleMethodImpl)"
            + " com.sun.jersey.core.spi.factory.InjectableProviderFactory.getInjectable(java.lang.Class<<?+java.lang.annotation.Annotation>>,com.sun.jersey.core.spi.component.ComponentContext,<A>,<C>,java.util.List<com.sun.jersey.core.spi.component.ComponentScope>)"
            + " com.sun.jersey.spi.inject.InjectableProviderContext.getInjectable(java.lang.Class<<?+java.lang.annotation.Annotation>>,com.sun.jersey.core.spi.component.ComponentContext,<A>,<C>,java.util.List<com.sun.jersey.core.spi.component.ComponentScope>) ";

    Map<String, String> mapFqn = new HashMap<String, String>();
    mapFqn.put("extractSig", "0");
    mapFqn.put("shortNamesOnly", "1");

    fqnFactory.init(mapFqn);

    // is = new WhitespaceTokenizer(new StringReader(fqns));
    is = new SingleSpaceTokenizer(new StringReader(fqns));

    FqnFilter fqnf = fqnFactory.create(is);
    NonAlphaNumTokenizerFilterFactory ntf = new NonAlphaNumTokenizerFilterFactory();
    NonAlphaNumTokenizerFilter nf = ntf.create(fqnf);
    CamelCaseSplitFilterFactory ccf = new CamelCaseSplitFilterFactory();
    CamelCaseSplitFilter cf = ccf.create(nf);
    LetterDigitSplitFilterFactory lff = new LetterDigitSplitFilterFactory();
    Map<String, String> args = new HashMap<String, String>();
    args.put("preserveOriginal", "1");
    lff.init(args);
    LetterDigitSplitFilter lf = lff.create(cf);
    LowerCaseFilterFactory loff = new LowerCaseFilterFactory();
    LowerCaseFilter lof = loff.create(lf);

    final Token reusableToken = new Token();
    Token nextToken = lof.next(reusableToken);
    assertEquals("get", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("binding", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("types", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("init", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("set", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("annotations", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("get", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("bean", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("configurations", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("get", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("bean", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("configurations", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("get", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("bean", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("configurations", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("match", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("init", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("get", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("injectable", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("get", nextToken.term());
    nextToken = lof.next(reusableToken);
    assertEquals("injectable", nextToken.term());
    nextToken = lof.next(reusableToken);

    // note the last space in the input string above
    assertEquals(1, nextToken.term().length());
    nextToken = lof.next(reusableToken);

    assertNull(nextToken);
  }