protected void diveIn(
     final ScrapeContext context,
     final Page page,
     final int currentDepth,
     final ParentOMatic parentOMatic,
     final Node<Payload> currentNode)
     throws IOException {
   // entry protection
   if (currentDepth >= context.getScrapeDepth()) {
     return;
   }
   // cancelation
   CancelableUtil.checkInterruption();
   getLogger().debug("Processing page response from URL {}", page.getUrl());
   final Elements elements = page.getDocument().getElementsByTag("a");
   final List<String> pathElements = currentNode.getPathElements();
   final String currentPath = currentNode.getPath();
   for (Element element : elements) {
     if (isDeeperRepoLink(context, pathElements, element)) {
       if (element.text().startsWith(".")) {
         // skip hidden paths
         continue;
       }
       final Node<Payload> newSibling = parentOMatic.addPath(currentPath + "/" + element.text());
       if (element.absUrl("href").endsWith("/")) {
         // "cut" recursion preemptively to save remote fetch (and then stop recursion due to
         // depth)
         final int siblingDepth = currentDepth + 1;
         if (siblingDepth < context.getScrapeDepth()) {
           maySleepBeforeSubsequentFetch();
           final String newSiblingEncodedUrl =
               getRemoteUrlForRepositoryPath(context, newSibling.getPathElements()) + "/";
           final Page siblingPage = Page.getPageFor(context, newSiblingEncodedUrl);
           if (siblingPage.getHttpResponse().getStatusLine().getStatusCode() == 200) {
             diveIn(context, siblingPage, siblingDepth, parentOMatic, newSibling);
           } else {
             // we do expect strictly 200 here
             throw new UnexpectedPageResponse(
                 page.getUrl(), page.getHttpResponse().getStatusLine());
           }
         }
       }
     }
   }
 }
 @Override
 protected List<String> diveIn(final ScrapeContext context, final Page page) throws IOException {
   // we use the great and all-mighty ParentOMatic
   final ParentOMatic parentOMatic = new ParentOMatic();
   diveIn(context, page, 0, parentOMatic, parentOMatic.getRoot());
   // Special case: scraped with 0 entry, we consider this as an error
   // Remote repo empty? Why are you proxying it? Or worse, some scrape
   // exotic index page and we end up with 0 entries by mistake?
   if (parentOMatic.getRoot().isLeaf()) {
     context.stop(
         "Remote recognized as "
             + getTargetedServer()
             + ", but scraped 0 entries. This is considered a failure.");
     return null;
   }
   final List<String> entries = parentOMatic.getAllLeafPaths();
   return entries;
 }