private String getPathForSetUpTearDown(WikiPage page, String setUpTearDownName) throws Exception {
   String path = null;
   WikiPage suiteSetUpTearDown = PageCrawlerImpl.getClosestInheritedPage(setUpTearDownName, page);
   if (suiteSetUpTearDown != null)
     path = suiteSetUpTearDown.getPageCrawler().getFullPath(suiteSetUpTearDown).toString();
   return path;
 }
 protected String createRedirectionUrl(WikiPage newParent, String newName) throws Exception {
   PageCrawler crawler = newParent.getPageCrawler();
   if (crawler.isRoot(newParent)) {
     return newName;
   }
   return PathParser.render(crawler.getFullPath(newParent).addNameToEnd(newName));
 }
  @Ignore
  @Test
  public void MoreComplexDependency() throws Exception {
    // Complex test : Full tree resolved from
    // http://repository.jboss.org/maven2/

    WikiPage root = InMemoryPage.makeRoot("RooT");

    PageCrawler crawler = root.getPageCrawler();
    // http://repository.jboss.org/nexus/content/groups/public
    WikiPage page =
        crawler.addPage(
            root,
            PathParser.parse("ClassPath"),
            "!define REMOTE_REPO {http://maven.springframework.org/milestone;  }\n!define LOCAL_REPO {target/repo}\n!artifact org.springframework:spring-core:3.0.0.RC1\n");

    List<String> paths = page.getData().getClasspaths();
    System.out.println(paths);
    assertEquals(
        repoDir
            + "/org/springframework/spring-core/3.0.0.RC1/spring-core-3.0.0.RC1.jar:"
            + repoDir
            + "/org/springframework/spring-asm/3.0.0.RC1/spring-asm-3.0.0.RC1.jar:"
            + repoDir
            + "/commons-logging/commons-logging/1.1.1/commons-logging-1.1.1.jar:"
            + repoDir
            + "/org/jboss/logging/com.springsource.org.jboss.logging/2.0.5.GA/com.springsource.org.jboss.logging-2.0.5.GA.jar:"
            + repoDir
            + "/org/jboss/util/com.springsource.org.jboss.util/2.2.9.GA/com.springsource.org.jboss.util-2.2.9.GA.jar",
        paths.get(0));
  }
 @Before
 public void setUp() throws Exception {
   PageCrawler crawler = root.getPageCrawler();
   crawler.addPage(root, PathParser.parse("PageOne"), "PageOne");
   WikiPage pageTwo = crawler.addPage(root, PathParser.parse("PageTwo"), "PageOne");
   crawler.addPage(pageTwo, PathParser.parse("ChildPage"), ".PageOne");
 }
  private String makePathForSymbolicLink(String linkPath) {
    WikiPagePath wikiPagePath = PathParser.parse(linkPath);

    if (wikiPagePath != null) {
      WikiPage parent = wikiPagePath.isRelativePath() ? page.getParent() : page;
      PageCrawler crawler = parent.getPageCrawler();
      WikiPage target = crawler.getPage(wikiPagePath);
      WikiPagePath fullPath;
      if (target != null) {
        fullPath = target.getPageCrawler().getFullPath();
        fullPath.makeAbsolute();
      } else fullPath = new WikiPagePath();
      return fullPath.toString();
    }
    return null;
  }
 public static String makePageHistoryFileName(
     FitNesseContext context, WikiPage page, TestSummary counts, long time) {
   return String.format(
       "%s/%s/%s",
       context.getTestHistoryDirectory(),
       page.getPageCrawler().getFullPath().toString(),
       makeResultFileName(counts, time));
 }
 public WikiPage createRemoteRoot() throws Exception {
   remoteRoot = InMemoryPage.makeRoot("RooT");
   PageCrawler crawler = remoteRoot.getPageCrawler();
   crawler.addPage(remoteRoot, PathParser.parse("PageOne"), "page one");
   crawler.addPage(remoteRoot, PathParser.parse("PageOne.ChildOne"), "child one");
   crawler.addPage(remoteRoot, PathParser.parse("PageTwo"), "page two");
   return remoteRoot;
 }
 private boolean isInternalPageThatDoesntExist(String linkPath) {
   String expandedPath = WikiWordReference.expandPrefix(page, linkPath);
   WikiPagePath path = PathParser.parse(expandedPath);
   if (path == null) {
     return true;
   }
   WikiPage start = path.isRelativePath() ? page.getParent() : page; // TODO -AcD- a better way?
   return !start.getPageCrawler().pageExists(path);
 }
  public Response makeResponse(FitNesseContext context, Request request) {
    this.context = context;
    WikiPage root = context.root;
    WikiPage page = root.getPageCrawler().getPage(PathParser.parse(request.getResource()));

    SuiteFilter filter =
        TestResponder.createSuiteFilter(request, page.getPageCrawler().getFullPath().toString());
    SuiteContentsFinder suiteTestFinder = new SuiteContentsFinder(page, filter, root);

    List<WikiPage> pagelist = suiteTestFinder.makePageList();

    SuiteOverviewTree treeview = new SuiteOverviewTree(pagelist);
    treeview.findLatestResults(context.getTestHistoryDirectory());
    treeview.countResults();

    WikiPagePath path = PathParser.parse(request.getResource());
    SimpleResponse response = makeResponse(treeview, path, request);
    return response;
  }
  private static List<String> convertToPageList(List<WikiPage> wikiPagelist) {
    List<String> allPages = new LinkedList<String>();

    for (WikiPage aPage : wikiPagelist) {
      try {
        allPages.add(aPage.getPageCrawler().getFullPath(aPage).toString());
      } catch (Exception e) {
        allPages.add("There was also a probem getting the path of one page.");
      }
    }
    return allPages;
  }
Exemple #11
0
  public Response makeResponse(Request request) throws Exception {
    SimpleResponse response = new SimpleResponse();
    resource = this.request.getResource();
    WikiPagePath path = PathParser.parse(resource);
    WikiPage page = root.getPageCrawler().getPage(root, path);
    existingContent = page.getData().getContent();
    newContent = (String) this.request.getInput(EditResponder.CONTENT_INPUT_NAME);

    response.setContent(makePageHtml());

    return response;
  }
 protected WikiPage createInternalSymbolicPage(
     String linkPath, String linkName, WikiPage parent) {
   WikiPagePath path = PathParser.parse(linkPath);
   if (path == null) {
     return null;
   }
   WikiPage start =
       (path.isRelativePath()) ? parent.getParent() : parent; // TODO -AcD- a better way?
   WikiPage wikiPage = start.getPageCrawler().getPage(path);
   if (wikiPage != null) wikiPage = new SymbolicPage(linkName, wikiPage, parent);
   return wikiPage;
 }
Exemple #13
0
  @Before
  public void setUp() throws Exception {
    root = InMemoryPage.makeRoot("RooT");
    crawler = root.getPageCrawler();
    errorLogsParentPage = crawler.addPage(root, PathParser.parse("ErrorLogs"));
    request = new MockRequest();
    responder = new TestResponder();
    responder.setFastTest(true);
    context = new FitNesseContext(root);

    receiver = new FitSocketReceiver(0, context.socketDealer);
    context.port = receiver.receiveSocket();
  }
 @Before
 public void setUp() throws Exception {
   suitePageName = "SuitePage";
   root = InMemoryPage.makeRoot("RooT");
   context = FitNesseUtil.makeTestContext(root);
   crawler = root.getPageCrawler();
   PageData data = root.getData();
   data.setContent(classpathWidgets());
   root.commit(data);
   suite = crawler.addPage(root, PathParser.parse(suitePageName), "This is the test suite\n");
   testPages = new LinkedList<WikiPage>();
   testPage = addTestPage(suite, "TestOne", "My test");
 }
  private WikiPage addLocalPageWithImportProperty(
      WikiPage parentPage, String pageName, boolean isRoot) throws Exception {
    WikiPage page = parentPage.addChildPage(pageName);
    PageData data = page.getData();

    WikiPagePath pagePath = localRoot.getPageCrawler().getFullPath(page);
    WikiImportProperty importProps =
        new WikiImportProperty(
            "http://localhost:" + FitNesseUtil.port + "/" + PathParser.render(pagePath));
    if (isRoot) importProps.setRoot(true);
    importProps.addTo(data.getProperties());
    page.commit(data);

    return page;
  }
Exemple #16
0
 private List<WikiPage> findScenarioLibraries() {
   final LinkedList<WikiPage> uncles = new LinkedList<WikiPage>();
   if (shouldIncludeScenarioLibraries()) {
     sourcePage
         .getPageCrawler()
         .traverseUncles(
             "ScenarioLibrary",
             new TraversalListener<WikiPage>() {
               @Override
               public void process(WikiPage page) {
                 uncles.addFirst(page);
               }
             });
   }
   return uncles;
 }
  /**
   * com.sun.jdmk:jmxtools:jar:1.2.1 com.sun.jmx:jmxri:jar:1.2.1 are not available on any repo : Too
   * bad...
   *
   * @throws Exception
   */
  @Ignore("This test is time consuming and not very usefull on Jenkins...")
  @Test(expected = DependencyResolutionException.class)
  public void Spring() throws Exception {
    // Complex test : Full tree resolved from

    WikiPage root = InMemoryPage.makeRoot("RooT");

    PageCrawler crawler = root.getPageCrawler();
    // http://repository.springsource.com/maven/bundles/external;
    WikiPage page =
        crawler.addPage(
            root,
            PathParser.parse("ClassPath"),
            "!define REMOTE_REPO {http://repo1.maven.org/maven2;http://oss.sonatype.org/content/repositories/JBoss/;https://oss.sonatype.org/content/repositories/appfuse-releases/}\n!define LOCAL_REPO {target/repo}\n!artifact org.springframework:spring:2.5.6.SEC02\n");

    List<String> paths = page.getData().getClasspaths();
    System.out.println(paths);
  }
  //	public void testWikiWidgetPlugins() throws Exception {
  //	    String symbolValues = MavenArtifact.class.getName();
  //	    testProperties.setProperty(ComponentFactory.SYMBOL_TYPES, symbolValues);
  @Ignore
  @Test
  public void Junit382NoRemoteRepo() throws Exception {
    // Very simple test : only 1 dependency resolved, jar is a dependency of
    // the current module

    WikiPage root = InMemoryPage.makeRoot("RooT");

    PageCrawler crawler = root.getPageCrawler();

    WikiPage page =
        crawler.addPage(
            root,
            PathParser.parse("ClassPath"),
            "!define LOCAL_REPO {target/repo}\n!artifact junit:junit:3.8.1\n");

    List<?> paths = page.getData().getClasspaths();

    assertEquals(repoDir + "/junit/junit/3.8.1/junit-3.8.1.jar", paths.get(0));
  }
  private String makePathForSymbolicLink(String linkPath) throws Exception {
    WikiPagePath wikiPagePath = PathParser.parse(linkPath);

    if (wikiPagePath != null) {
      WikiPage parent = wikiPagePath.isRelativePath() ? page.getParent() : page; // TODO
      // -AcD-
      // a
      // better
      // way?
      PageCrawler crawler = parent.getPageCrawler();
      WikiPage target = crawler.getPage(parent, wikiPagePath);
      WikiPagePath fullPath;
      if (target != null) {
        fullPath = crawler.getFullPath(target);
        fullPath.makeAbsolute();
      } else fullPath = new WikiPagePath();
      return fullPath.toString();
    }
    return null;
  }
  @Ignore
  @Test
  public void multiplePathIssues() throws Exception {
    // Very simple test : only 1 dependency resolved, jar is a dependency of
    // the current module

    WikiPage root = InMemoryPage.makeRoot("RooT");

    PageCrawler crawler = root.getPageCrawler();

    WikiPage page =
        crawler.addPage(
            root,
            PathParser.parse("ClassPath"),
            "!define REMOTE_REPO {http://repo1.maven.org/maven2/}\n!define LOCAL_REPO {target/repo}\n!artifact junit:junit:3.8.2\n!path target/classes\n");

    List<String> paths = page.getData().getClasspaths();

    assertEquals(repoDir + "/junit/junit/3.8.2/junit-3.8.2.jar", paths.get(0));
    assertEquals("target/classes", paths.get(1));
  }
  @Ignore
  @Test
  public void ComplexDependency() throws Exception {
    // Complex test : Full tree resolved from
    // http://repository.jboss.org/nexus/content/groups/public

    WikiPage root = InMemoryPage.makeRoot("RooT");

    PageCrawler crawler = root.getPageCrawler();

    WikiPage page =
        crawler.addPage(
            root,
            PathParser.parse("ClassPath"),
            "!define REMOTE_REPO { http://repository.jboss.org/nexus/content/groups/public }\n!define LOCAL_REPO {target/repo}\n!artifact org.hibernate:hibernate-core:3.3.0.CR1\n");

    List<String> paths = page.getData().getClasspaths();

    assertEquals(
        repoDir
            + "/org/hibernate/hibernate-core/3.3.0.CR1/hibernate-core-3.3.0.CR1.jar:"
            + repoDir
            + "/antlr/antlr/2.7.6/antlr-2.7.6.jar:"
            + repoDir
            + "/commons-collections/commons-collections/3.1/commons-collections-3.1.jar:"
            + repoDir
            + "/dom4j/dom4j/1.6.1/dom4j-1.6.1.jar:"
            + repoDir
            + "/xml-apis/xml-apis/1.0.b2/xml-apis-1.0.b2.jar:"
            + repoDir
            + "/javax/transaction/jta/1.1/jta-1.1.jar:"
            + repoDir
            + "/asm/asm/1.5.3/asm-1.5.3.jar:"
            + repoDir
            + "/org/slf4j/slf4j-api/1.4.2/slf4j-api-1.4.2.jar",
        paths.get(0));
  }
Exemple #22
0
 @Override
 public String getFullPath() {
   return PathParser.render(sourcePage.getPageCrawler().getFullPath());
 }
Exemple #23
0
 protected WikiPage findInheritedPage(String pageName) {
   return sourcePage.getPageCrawler().getClosestInheritedPage(pageName);
 }
Exemple #24
0
 private String getPathNameForPage(WikiPage page) {
   WikiPagePath pagePath = page.getPageCrawler().getFullPath();
   return PathParser.render(pagePath);
 }
 private void insertSetUpForThisGroup(String setUpAndTearDown) throws Exception {
   String setUpPath = setUpAndTearDown.split(",")[0];
   WikiPage setUpPage = root.getPageCrawler().getPage(root, PathParser.parse(setUpPath));
   if (setUpPage != null) pageList.add(setUpPage);
 }
 private void insertTearDownForThisGroup(String setUpAndTearDownGroupKey) throws Exception {
   String tearDownPath = setUpAndTearDownGroupKey.split(",")[1];
   WikiPage tearDownPage = root.getPageCrawler().getPage(root, PathParser.parse(tearDownPath));
   if (tearDownPage != null) pageList.add(tearDownPage);
 }
 public void setUp() throws Exception {
   root = InMemoryPage.makeRoot("RooT");
   crawler = root.getPageCrawler();
 }
 public void setUp() throws Exception {
   root = InMemoryPage.makeRoot("RooT");
   page = root.getPageCrawler().addPage(root, PathParser.parse("PageOne"), "some content");
 }
 @Before
 public void setUp() throws Exception {
   crawler = root.getPageCrawler();
   somePage = crawler.addPage(root, PathParser.parse("SomePage"), "some page");
 }
 public void doUpdate() throws Exception {
   root.getPageCrawler().traverse(root, this);
   properties.setProperty(getName(), "applied");
 }