// Real onFailure
 protected void onFailure(
     FetchException e, Object token, ObjectContainer container, ClientContext context) {
   if (persistent) {
     container.activate(segment, 1);
     container.activate(parent, 1);
     container.activate(segment.errors, 1);
   }
   boolean forceFatal = false;
   if (parent.isCancelled()) {
     if (Logger.shouldLog(Logger.MINOR, this)) Logger.minor(this, "Failing: cancelled");
     e = new FetchException(FetchException.CANCELLED);
     forceFatal = true;
   }
   segment.errors.inc(e.getMode());
   if (e.isFatal() && token == null) {
     segment.fail(e, container, context, false);
   } else if (e.isFatal() || forceFatal) {
     segment.onFatalFailure(e, ((MySendableRequestItem) token).x, this, container, context);
   } else {
     segment.onNonFatalFailure(e, ((MySendableRequestItem) token).x, this, container, context);
   }
   removeBlockNum(((MySendableRequestItem) token).x, container, false);
   if (persistent) {
     container.deactivate(segment, 1);
     container.deactivate(parent, 1);
     container.deactivate(segment.errors, 1);
   }
 }
 public void onFailure(
     FetchException e, Object token, ObjectContainer container, ClientContext context) {
   if (persistent) {
     container.activate(segment, 1);
     container.activate(parent, 1);
     container.activate(segment.errors, 1);
   }
   boolean forceFatal = false;
   if (parent.isCancelled()) {
     if (logMINOR) Logger.minor(this, "Failing: cancelled");
     e = new FetchException(FetchException.CANCELLED);
     forceFatal = true;
   }
   segment.errors.inc(e.getMode());
   if (persistent) segment.errors.storeTo(container);
   if (e.isFatal() && token == null) {
     segment.fail(e, container, context, false);
   } else if (e.isFatal() || forceFatal) {
     segment.onFatalFailure(
         e, ((SplitFileFetcherSegmentSendableRequestItem) token).blockNum, container, context);
   } else {
     segment.onNonFatalFailure(
         e, ((SplitFileFetcherSegmentSendableRequestItem) token).blockNum, container, context);
   }
   if (persistent) {
     container.deactivate(segment, 1);
     container.deactivate(parent, 1);
     container.deactivate(segment.errors, 1);
   }
 }
Esempio n. 3
0
  /**
   * Simply check whether the block exists, in such a way that we don't fetch the full content. If
   * it does exist then the USK tracker, and therefore any fetchers, will be updated. You can pass
   * either an SSK or a USK.
   */
  public void hintCheck(
      FreenetURI uri,
      final Object token,
      ClientContext context,
      short priority,
      final HintCallback cb)
      throws MalformedURLException {
    final FreenetURI origURI = uri;
    if (uri.isUSK()) uri = uri.sskForUSK();
    if (logMINOR) Logger.minor(this, "Doing hint fetch for " + uri);
    final ClientGetter get =
        new ClientGetter(
            new ClientGetCallback() {

              @Override
              public void onMajorProgress(ObjectContainer container) {
                // Ignore
              }

              @Override
              public void onSuccess(
                  FetchResult result, ClientGetter state, ObjectContainer container) {
                cb.success(origURI, token);
              }

              @Override
              public void onFailure(
                  FetchException e, ClientGetter state, ObjectContainer container) {
                if (e.isDataFound()) cb.success(origURI, token);
                else if (e.isDNF()) cb.dnf(origURI, token, e);
                else cb.failed(origURI, token, e);
              }
            },
            uri,
            new FetchContext(backgroundFetchContext, FetchContext.IDENTICAL_MASK, false, null),
            priority,
            rcBulk,
            new NullBucket(),
            null,
            null);
    try {
      get.start(null, context);
    } catch (FetchException e) {
      if (logMINOR) Logger.minor(this, "Cannot start hint fetch for " + uri + " : " + e, e);
      if (e.isDataFound()) cb.success(origURI, token);
      else if (e.isDNF()) cb.dnf(origURI, token, e);
      else cb.failed(origURI, token, e);
    }
  }
 private void addRequest(FreenetURI uri) {
   if (isRunning) {
     FetchContext mFetchContext = mFetcher.getFetchContext();
     mFetchContext.allowSplitfiles = true; // FIXME: disable as soon as its fixed!
     mFetchContext.canWriteClientCache = true;
     mFetchContext.dontEnterImplicitArchives = true; // ?
     mFetchContext.filterData = false; // ?
     mFetchContext.followRedirects = false;
     mFetchContext.ignoreStore = false;
     // final? mFetchContext.ignoreTooManyPathComponents = false;
     mFetchContext.ignoreUSKDatehints = true; // ?
     mFetchContext.localRequestOnly = false;
     mFetchContext.maxArchiveLevels = 0; // ?
     mFetchContext.maxArchiveRestarts = 0; // ?
     mFetchContext.maxCheckBlocksPerSegment = 0; // ?
     mFetchContext.maxDataBlocksPerSegment = 0; // ?
     // mFetchContext.maxMetadataSize = ?
     // cooldown for 30 minutes, wtf? this is a real time chat plugin.
     // mFetchContext.maxNonSplitfileRetries = -1;
     mFetchContext.maxNonSplitfileRetries = 2;
     // mFetchContext.maxOutputLength = 1024 ?
     mFetchContext.maxRecursionLevel = 0; // ?
     mFetchContext.maxSplitfileBlockRetries = 0;
     // mFetchContext.maxTempLength = ?
     // final? mFetchContext.maxUSKRetries = -1; //?
     // mFetchContext.overrideMIME = "text/plain"; //?
     // mFetchContext.prefetchHook = ?
     // mFetchContext.returnZIPManifests = true ?
     // mFetchContext.tagReplacer = ?
     // mFetchContext.setCooldownRetries(cooldownRetries);
     // mFetchContext.setCooldownTime(cooldownTime);
     try {
       mFetcher.fetch(uri, this, this, mFetchContext, (short) 1);
     } catch (FetchException e) {
       System.err.println(
           "[Async_AnnounceFetcher]::addRequest() FetchException: " + e.getMessage());
     }
   }
 }
Esempio n. 5
0
  private void makeMainPage(
      ToadletContext ctx,
      List<String> errors,
      String key,
      int hexWidth,
      boolean automf,
      boolean deep,
      boolean ml)
      throws ToadletContextClosedException, IOException, RedirectException, URISyntaxException {
    PageNode page = pluginContext.pageMaker.getPageNode(i18n("KeyExplorer.PageTitle"), ctx);
    HTMLNode pageNode = page.outer;
    HTMLNode contentNode = page.content;

    byte[] data = null;
    GetResult getresult = null;
    String extraParams = "&hexwidth=" + hexWidth;
    if (automf) {
      extraParams += "&automf=checked";
    }
    if (deep) {
      extraParams += "&deep=checked";
    }
    if (ml) {
      extraParams += "&ml=checked";
    }
    FreenetURI furi = null;
    FreenetURI retryUri = null;

    try {
      if (key != null && (key.trim().length() > 0)) {
        furi =
            URISanitizer.sanitizeURI(
                errors,
                key,
                false,
                URISanitizer.Options.NOMETASTRINGS,
                URISanitizer.Options.SSKFORUSK);
        retryUri = furi;
        if (ml) { // multilevel is requestet
          Metadata tempMD =
              KeyExplorerUtils.simpleManifestGet(pluginContext.pluginRespirator, furi);
          FetchResult tempResult =
              KeyExplorerUtils.splitGet(pluginContext.pluginRespirator, tempMD);
          getresult = new GetResult(tempResult.asBucket(), true);
          data = tempResult.asByteArray();
        } else { // normal get
          getresult = KeyExplorerUtils.simpleGet(pluginContext.pluginRespirator, furi);
          data = BucketTools.toByteArray(getresult.getData());
        }
      }
    } catch (MalformedURLException e) {
      errors.add("MalformedURL: " + key);
    } catch (IOException e) {
      Logger.error(this, "500", e);
      errors.add("IO Error: " + e.getMessage());
    } catch (MetadataParseException e) {
      errors.add("Metadata Parse Error: " + e.getMessage());
    } catch (FetchException e) {
      errors.add("Get failed (" + e.mode + "): " + e.getMessage());
    } catch (KeyListenerConstructionException e) {
      Logger.error(this, "Hu?", e);
      errors.add("Internal Error: " + e.getMessage());
    } finally {
      if (getresult != null) getresult.free();
    }

    HTMLNode uriBox =
        createUriBox(
            pluginContext,
            ((furi == null) ? null : furi.toString(false, false)),
            hexWidth,
            automf,
            deep,
            errors);

    if (errors.size() > 0) {
      contentNode.addChild(createErrorBox(errors, path(), retryUri, extraParams));
      errors.clear();
    }

    contentNode.addChild(uriBox);

    if (data != null) {
      Metadata md = null;

      if (getresult.isMetaData()) {
        try {
          md = Metadata.construct(data);
        } catch (MetadataParseException e) {
          errors.add("Metadata parse error: " + e.getMessage());
        }
        if (md != null) {
          if (automf && md.isArchiveManifest()) {
            if (md.getArchiveType() == ARCHIVE_TYPE.TAR) {
              writeTemporaryRedirect(
                  ctx,
                  "",
                  KeyUtilsPlugin.PLUGIN_URI
                      + "/Site/?mftype=TARmanifest&key="
                      + furi
                      + extraParams);
              return;
            } else if (md.getArchiveType() == ARCHIVE_TYPE.ZIP) {
              writeTemporaryRedirect(
                  ctx,
                  "",
                  KeyUtilsPlugin.PLUGIN_URI
                      + "/Site/?mftype=ZIPmanifest&key="
                      + furi
                      + extraParams);
              return;
            } else {
              errors.add("Unknown Archive Type: " + md.getArchiveType().name());
            }
          }
          if (automf && md.isSimpleManifest()) {
            writeTemporaryRedirect(
                ctx,
                "",
                KeyUtilsPlugin.PLUGIN_URI
                    + "/Site/?mftype=simplemanifest&key="
                    + furi
                    + extraParams);
            return;
          }
        }
      }

      String title = "Key: " + furi.toString(false, false);
      if (getresult.isMetaData()) title = title + "\u00a0(MetaData)";
      HTMLNode dataBox2 = pluginContext.pageMaker.getInfobox("#", title, contentNode);

      dataBox2.addChild("%", "<pre lang=\"en\" style=\"font-family: monospace;\">\n");
      dataBox2.addChild("#", hexDump(data, hexWidth));
      dataBox2.addChild("%", "\n</pre>");

      if (getresult.isMetaData()) {
        if (md != null) {
          HTMLNode metaBox =
              pluginContext.pageMaker.getInfobox("#", "Decomposed metadata", contentNode);

          metaBox.addChild("#", "Metadata version " + Short.toString(md.getParsedVersion()));
          metaBox.addChild("br");
          metaBox.addChild("#", "Document type:\u00a0");
          if (md.isSimpleRedirect()) {
            metaBox.addChild("#", "SimpleRedirect");
          } else if (md.isSimpleManifest()) {
            metaBox.addChild("#", "SimpleManifest");
          } else if (md.isArchiveInternalRedirect()) {
            metaBox.addChild("#", "ArchiveInternalRedirect");
          } else if (md.isArchiveMetadataRedirect()) {
            metaBox.addChild("#", "ArchiveMetadataRedirect");
          } else if (md.isArchiveManifest()) {
            metaBox.addChild("#", "ArchiveManifest");
          } else if (md.isMultiLevelMetadata()) {
            metaBox.addChild("#", "MultiLevelMetadata");
          } else if (md.isSymbolicShortlink()) {
            metaBox.addChild("#", "SymbolicShortlink");
          } else {
            metaBox.addChild("#", "<Unknown document type>");
          }
          metaBox.addChild("br");

          final String MIMEType = md.getMIMEType();
          if (MIMEType != null) {
            metaBox.addChild("#", "MIME Type: " + MIMEType);
            metaBox.addChild("br");
          }

          if (md.haveFlags()) {
            metaBox.addChild("#", "Flags:\u00a0");
            boolean isFirst = true;

            if (md.isSplitfile()) {
              metaBox.addChild("#", "SplitFile");
              isFirst = false;
            }
            if (md.isCompressed()) {
              if (isFirst) isFirst = false;
              else metaBox.addChild("#", "\u00a0");
              metaBox.addChild("#", "Compressed (" + md.getCompressionCodec().name + ")");
            }
            if (md.hasTopData()) {
              if (isFirst) isFirst = false;
              else metaBox.addChild("#", "\u00a0");
              metaBox.addChild("#", "HasTopData");
            }
            if (isFirst) metaBox.addChild("#", "<No flag set>");
          }
          metaBox.addChild("br");

          if (md.isCompressed()) {
            metaBox.addChild("#", "Decompressed size: " + md.uncompressedDataLength() + " bytes.");
          } else {
            metaBox.addChild("#", "Uncompressed");
          }

          metaBox.addChild("br");

          if (md.topCompatibilityMode != 0) {
            metaBox.addChild("#", "Compatibility mode: " + md.getTopCompatibilityMode().toString());
            metaBox.addChild("br");
          }

          if (md.hasTopData()) {
            metaBox.addChild("#", "Top Block Data:");
            metaBox.addChild("br");
            metaBox.addChild(
                "#", "\u00a0\u00a0DontCompress: " + Boolean.toString(md.topDontCompress));
            metaBox.addChild("br");
            metaBox.addChild(
                "#",
                "\u00a0\u00a0Compressed size: " + Long.toString(md.topCompressedSize) + " bytes.");
            metaBox.addChild("br");
            metaBox.addChild(
                "#", "\u00a0\u00a0Decompressed Size: " + Long.toString(md.topSize) + " bytes.");
            metaBox.addChild("br");
            metaBox.addChild(
                "#",
                "\u00a0\u00a0Blocks: "
                    + Integer.toString(md.topBlocksRequired)
                    + " required, "
                    + Integer.toString(md.topBlocksTotal)
                    + " total.");
            metaBox.addChild("br");
          }
          final HashResult[] hashes = md.getHashes();
          if (hashes != null && hashes.length > 0) {
            metaBox.addChild("#", "Hashes:");
            metaBox.addChild("br");
            for (final HashResult hash : hashes) {
              metaBox.addChild(
                  "#", "\u00a0\u00a0" + hash.type.name() + ": " + HexUtil.bytesToHex(hash.result));
              metaBox.addChild("br");
            }
          }

          if (md.isSplitfile()) {
            metaBox.addChild("#", "Splitfile size\u00a0=\u00a0" + md.dataLength() + " bytes.");
            metaBox.addChild("br");

            byte[] splitfileCryptoKey = md.getCustomSplitfileKey();
            if (splitfileCryptoKey != null) {
              metaBox.addChild(
                  "#", "Splitfile CryptoKey\u00a0=\u00a0" + HexUtil.bytesToHex(splitfileCryptoKey));
              metaBox.addChild("br");
            }
          }

          metaBox.addChild("#", "Options:");
          metaBox.addChild("br");

          if (md.isSimpleManifest()) {
            metaBox.addChild(
                new HTMLNode(
                    "a",
                    "href",
                    KeyUtilsPlugin.PLUGIN_URI
                        + "/Site/?mftype=simplemanifest&key="
                        + furi
                        + extraParams,
                    "reopen as manifest"));
            metaBox.addChild("br");
          }
          if (md.isArchiveManifest()) {
            metaBox.addChild(
                new HTMLNode(
                    "a",
                    "href",
                    KeyUtilsPlugin.PLUGIN_URI
                        + "/Site/?mftype="
                        + md.getArchiveType().name()
                        + "manifest&key="
                        + furi
                        + extraParams,
                    "reopen as manifest"));
            metaBox.addChild("br");
          }
          if (md.isMultiLevelMetadata()) {
            if (ml)
              metaBox.addChild(
                  new HTMLNode(
                      "a",
                      "href",
                      KeyUtilsPlugin.PLUGIN_URI + "/?key=" + furi + extraParams,
                      "explore multilevel"));
            else
              metaBox.addChild(
                  new HTMLNode(
                      "a",
                      "href",
                      KeyUtilsPlugin.PLUGIN_URI + "/?ml=checked&key=" + furi + extraParams,
                      "explore multilevel"));
            metaBox.addChild("br");
          }

          FreenetURI uri = md.getSingleTarget();
          if (uri != null) {
            String sfrUri = uri.toString(false, false);
            metaBox.addChild("#", sfrUri);
            metaBox.addChild("#", "\u00a0");
            metaBox.addChild(new HTMLNode("a", "href", "/?key=" + sfrUri, "open"));
            metaBox.addChild("#", "\u00a0");
            metaBox.addChild(
                new HTMLNode(
                    "a",
                    "href",
                    KeyUtilsPlugin.PLUGIN_URI + "/?key=" + sfrUri + extraParams,
                    "explore"));
          } else {
            metaBox.addChild(new HTMLNode("a", "href", "/?key=" + furi, "reopen normal"));
          }
          metaBox.addChild("br");

          if ((uri == null) && md.isSplitfile()) {
            metaBox.addChild(
                new HTMLNode(
                    "a",
                    "href",
                    KeyUtilsPlugin.PLUGIN_URI + "/Split?key=" + furi.toString(false, false),
                    "reopen as splitfile"));
            metaBox.addChild("br");
            metaBox.addChild(
                new HTMLNode(
                    "a",
                    "href",
                    KeyUtilsPlugin.PLUGIN_URI
                        + "/Download?action=splitdownload&key="
                        + furi.toString(false, false),
                    "split-download"));
            metaBox.addChild("br");
          }
        }
      }
      if (errors.size() > 0) contentNode.addChild(createErrorBox(errors));
    }
    contentNode.addChild(Utils.makeDonateFooter(_intl));
    writeHTMLReply(ctx, 200, "OK", pageNode.generate());
  }
  /**
   * @param args
   * @throws InvalidThresholdException
   * @throws IOException
   * @throws NodeInitException
   * @throws InterruptedException
   */
  public static void main(String[] args)
      throws InvalidThresholdException, IOException, NodeInitException, InterruptedException {
    Node secondNode = null;
    try {
      String ipOverride = null;
      if (args.length > 0) ipOverride = args[0];
      File dir = new File("bootstrap-pull-test");
      FileUtil.removeAll(dir);
      RandomSource random =
          NodeStarter.globalTestInit(dir.getPath(), false, Logger.ERROR, "", false);
      byte[] seed = new byte[64];
      random.nextBytes(seed);
      MersenneTwister fastRandom = new MersenneTwister(seed);
      File seednodes = new File("seednodes.fref");
      if (!seednodes.exists() || seednodes.length() == 0 || !seednodes.canRead()) {
        System.err.println("Unable to read seednodes.fref, it doesn't exist, or is empty");
        System.exit(EXIT_NO_SEEDNODES);
      }
      File secondInnerDir = new File(dir, Integer.toString(DARKNET_PORT));
      secondInnerDir.mkdir();
      FileInputStream fis = new FileInputStream(seednodes);
      FileUtil.writeTo(fis, new File(secondInnerDir, "seednodes.fref"));
      fis.close();

      // Create the test data
      System.out.println("Creating test data.");
      File dataFile = File.createTempFile("testdata", ".tmp", dir);
      OutputStream os = new FileOutputStream(dataFile);
      byte[] buf = new byte[4096];
      for (long written = 0; written < TEST_SIZE; ) {
        fastRandom.nextBytes(buf);
        int toWrite = (int) Math.min(TEST_SIZE - written, buf.length);
        os.write(buf, 0, toWrite);
        written += toWrite;
      }
      os.close();

      // Insert it to the established node.
      System.out.println("Inserting test data to an established node.");
      FreenetURI uri = insertData(dataFile);

      // Bootstrap a second node.
      secondInnerDir.mkdir();
      fis = new FileInputStream(seednodes);
      FileUtil.writeTo(fis, new File(secondInnerDir, "seednodes.fref"));
      fis.close();
      PooledExecutor executor = new PooledExecutor();
      secondNode =
          NodeStarter.createTestNode(
              DARKNET_PORT,
              OPENNET_PORT,
              dir.getPath(),
              false,
              Node.DEFAULT_MAX_HTL,
              0,
              random,
              executor,
              1000,
              5 * 1024 * 1024,
              true,
              true,
              true,
              true,
              true,
              true,
              true,
              12 * 1024,
              false,
              true,
              ipOverride);
      secondNode.start(true);

      if (!TestUtil.waitForNodes(secondNode, TARGET_PEERS)) {
        secondNode.park();
        System.exit(EXIT_FAILED_TARGET);
      }

      // Fetch the data
      long startFetchTime = System.currentTimeMillis();
      HighLevelSimpleClient client = secondNode.clientCore.makeClient((short) 0);
      try {
        client.fetch(uri);
      } catch (FetchException e) {
        System.err.println("FETCH FAILED: " + e);
        e.printStackTrace();
        System.exit(EXIT_FETCH_FAILED);
        return;
      }
      long endFetchTime = System.currentTimeMillis();
      System.out.println(
          "RESULT: Fetch took "
              + (endFetchTime - startFetchTime)
              + "ms ("
              + TimeUtil.formatTime(endFetchTime - startFetchTime)
              + ") of "
              + uri
              + " .");
      secondNode.park();
      System.exit(0);
    } catch (Throwable t) {
      System.err.println("CAUGHT: " + t);
      t.printStackTrace();
      try {
        if (secondNode != null) secondNode.park();
      } catch (Throwable t1) {
      }
      ;
      System.exit(EXIT_THREW_SOMETHING);
    }
  }
  public static void main(String[] args)
      throws InvalidThresholdException, IOException, NodeInitException, InterruptedException {
    Node node = null;
    Node secondNode = null;
    try {
      String ipOverride = null;
      if (args.length > 0) ipOverride = args[0];
      File dir = new File("bootstrap-push-pull-test");
      FileUtil.removeAll(dir);
      RandomSource random =
          NodeStarter.globalTestInit(dir.getPath(), false, LogLevel.ERROR, "", false);
      File seednodes = new File("seednodes.fref");
      if (!seednodes.exists() || seednodes.length() == 0 || !seednodes.canRead()) {
        System.err.println("Unable to read seednodes.fref, it doesn't exist, or is empty");
        System.exit(EXIT_NO_SEEDNODES);
      }
      File innerDir = new File(dir, Integer.toString(DARKNET_PORT1));
      innerDir.mkdir();
      FileInputStream fis = new FileInputStream(seednodes);
      FileUtil.writeTo(fis, new File(innerDir, "seednodes.fref"));
      fis.close();
      // Create one node
      Executor executor = new PooledExecutor();
      node =
          NodeStarter.createTestNode(
              DARKNET_PORT1,
              OPENNET_PORT1,
              dir.getPath(),
              false,
              Node.DEFAULT_MAX_HTL,
              0,
              random,
              executor,
              1000,
              5 * 1024 * 1024,
              true,
              true,
              true,
              true,
              true,
              true,
              true,
              12 * 1024,
              false,
              true,
              false,
              false,
              ipOverride);
      // NodeCrypto.DISABLE_GROUP_STRIP = true;
      // Logger.setupStdoutLogging(LogLevel.MINOR,
      // "freenet:NORMAL,freenet.node.NodeDispatcher:MINOR,freenet.node.FNPPacketMangler:MINOR");
      Logger.getChain().setThreshold(LogLevel.ERROR); // kill logging
      // Start it
      node.start(true);
      if (!TestUtil.waitForNodes(node)) {
        node.park();
        System.exit(EXIT_FAILED_TARGET);
      }
      System.err.println("Creating test data: " + TEST_SIZE + " bytes.");
      Bucket data = node.clientCore.tempBucketFactory.makeBucket(TEST_SIZE);
      OutputStream os = data.getOutputStream();
      byte[] buf = new byte[4096];
      for (long written = 0; written < TEST_SIZE; ) {
        node.fastWeakRandom.nextBytes(buf);
        int toWrite = (int) Math.min(TEST_SIZE - written, buf.length);
        os.write(buf, 0, toWrite);
        written += toWrite;
      }
      os.close();
      System.err.println("Inserting test data.");
      HighLevelSimpleClient client = node.clientCore.makeClient((short) 0);
      InsertBlock block = new InsertBlock(data, new ClientMetadata(), FreenetURI.EMPTY_CHK_URI);
      long startInsertTime = System.currentTimeMillis();
      FreenetURI uri;
      try {
        uri = client.insert(block, false, null);
      } catch (InsertException e) {
        System.err.println("INSERT FAILED: " + e);
        e.printStackTrace();
        System.exit(EXIT_INSERT_FAILED);
        return;
      }
      long endInsertTime = System.currentTimeMillis();
      System.err.println(
          "RESULT: Insert took "
              + (endInsertTime - startInsertTime)
              + "ms ("
              + TimeUtil.formatTime(endInsertTime - startInsertTime)
              + ") to "
              + uri
              + " .");
      node.park();

      // Bootstrap a second node.
      File secondInnerDir = new File(dir, Integer.toString(DARKNET_PORT2));
      secondInnerDir.mkdir();
      fis = new FileInputStream(seednodes);
      FileUtil.writeTo(fis, new File(secondInnerDir, "seednodes.fref"));
      fis.close();
      executor = new PooledExecutor();
      secondNode =
          NodeStarter.createTestNode(
              DARKNET_PORT2,
              OPENNET_PORT2,
              dir.getPath(),
              false,
              Node.DEFAULT_MAX_HTL,
              0,
              random,
              executor,
              1000,
              5 * 1024 * 1024,
              true,
              true,
              true,
              true,
              true,
              true,
              true,
              12 * 1024,
              false,
              true,
              false,
              false,
              ipOverride);
      secondNode.start(true);
      if (!TestUtil.waitForNodes(secondNode)) {
        secondNode.park();
        System.exit(EXIT_FAILED_TARGET);
      }

      // Fetch the data
      long startFetchTime = System.currentTimeMillis();
      client = secondNode.clientCore.makeClient((short) 0);
      try {
        client.fetch(uri);
      } catch (FetchException e) {
        System.err.println("FETCH FAILED: " + e);
        e.printStackTrace();
        System.exit(EXIT_FETCH_FAILED);
        return;
      }
      long endFetchTime = System.currentTimeMillis();
      System.err.println(
          "RESULT: Fetch took "
              + (endFetchTime - startFetchTime)
              + "ms ("
              + TimeUtil.formatTime(endFetchTime - startFetchTime)
              + ") of "
              + uri
              + " .");
      secondNode.park();
      System.exit(0);
    } catch (Throwable t) {
      System.err.println("CAUGHT: " + t);
      t.printStackTrace();
      try {
        if (node != null) node.park();
      } catch (Throwable t1) {
      }
      ;
      try {
        if (secondNode != null) secondNode.park();
      } catch (Throwable t1) {
      }
      ;

      System.exit(EXIT_THREW_SOMETHING);
    }
  }
  public static void main(String[] args) {
    if (args.length < 1 || args.length > 2) {
      System.err.println(
          "Usage: java freenet.node.simulator.LongTermPushPullTest <unique identifier>");
      System.exit(1);
    }
    String uid = args[0];

    List<String> csvLine = new ArrayList<String>();
    System.out.println("DATE:" + dateFormat.format(today.getTime()));
    csvLine.add(dateFormat.format(today.getTime()));

    System.out.println("Version:" + Version.buildNumber());
    csvLine.add(String.valueOf(Version.buildNumber()));

    int exitCode = 0;
    Node node = null;
    Node node2 = null;
    FileInputStream fis = null;
    File file = new File("many-single-blocks-test-" + uid + ".csv");
    long t1, t2;

    try {

      // INSERT STUFF

      final File dir = new File("longterm-mhk-test-" + uid);
      FileUtil.removeAll(dir);
      RandomSource random =
          NodeStarter.globalTestInit(dir.getPath(), false, LogLevel.ERROR, "", false);
      File seednodes = new File("seednodes.fref");
      if (!seednodes.exists() || seednodes.length() == 0 || !seednodes.canRead()) {
        System.err.println("Unable to read seednodes.fref, it doesn't exist, or is empty");
        System.exit(EXIT_NO_SEEDNODES);
      }

      final File innerDir = new File(dir, Integer.toString(DARKNET_PORT1));
      innerDir.mkdir();
      fis = new FileInputStream(seednodes);
      FileUtil.writeTo(fis, new File(innerDir, "seednodes.fref"));
      fis.close();

      // Create one node
      node =
          NodeStarter.createTestNode(
              DARKNET_PORT1,
              OPENNET_PORT1,
              dir.getPath(),
              false,
              Node.DEFAULT_MAX_HTL,
              0,
              random,
              new PooledExecutor(),
              1000,
              4 * 1024 * 1024,
              true,
              true,
              true,
              true,
              true,
              true,
              true,
              12 * 1024,
              true,
              true,
              false,
              false,
              null);
      Logger.getChain().setThreshold(LogLevel.ERROR);

      // Start it
      node.start(true);
      t1 = System.currentTimeMillis();
      if (!TestUtil.waitForNodes(node)) {
        exitCode = EXIT_FAILED_TARGET;
        return;
      }

      t2 = System.currentTimeMillis();
      System.out.println("SEED-TIME:" + (t2 - t1));
      csvLine.add(String.valueOf(t2 - t1));

      HighLevelSimpleClient client = node.clientCore.makeClient((short) 0, false, false);

      int successes = 0;

      long startInsertsTime = System.currentTimeMillis();

      InsertBatch batch = new InsertBatch(client);

      // Inserts are sloooooow so do them in parallel.

      for (int i = 0; i < INSERTED_BLOCKS; i++) {

        System.err.println("Inserting block " + i);

        RandomAccessBucket single = randomData(node);

        InsertBlock block = new InsertBlock(single, new ClientMetadata(), FreenetURI.EMPTY_CHK_URI);

        batch.startInsert(block);
      }

      batch.waitUntilFinished();
      FreenetURI[] uris = batch.getURIs();
      long[] times = batch.getTimes();
      InsertException[] errors = batch.getErrors();

      for (int i = 0; i < INSERTED_BLOCKS; i++) {
        if (uris[i] != null) {
          csvLine.add(String.valueOf(times[i]));
          csvLine.add(uris[i].toASCIIString());
          System.out.println("Pushed block " + i + " : " + uris[i] + " in " + times[i]);
          successes++;
        } else {
          csvLine.add(InsertException.getShortMessage(errors[i].getMode()));
          csvLine.add("N/A");
          System.out.println("Failed to push block " + i + " : " + errors[i]);
        }
      }

      long endInsertsTime = System.currentTimeMillis();

      System.err.println(
          "Succeeded inserts: "
              + successes
              + " of "
              + INSERTED_BLOCKS
              + " in "
              + (endInsertsTime - startInsertsTime)
              + "ms");

      FetchContext fctx = client.getFetchContext();
      fctx.maxNonSplitfileRetries = 0;
      fctx.maxSplitfileBlockRetries = 0;
      RequestClient requestContext = new RequestClientBuilder().build();

      // PARSE FILE AND FETCH OLD STUFF IF APPROPRIATE

      FreenetURI[] mhkURIs = new FreenetURI[3];
      fis = new FileInputStream(file);
      BufferedReader br = new BufferedReader(new InputStreamReader(fis, ENCODING));
      String line = null;
      GregorianCalendar target = (GregorianCalendar) today.clone();
      target.set(Calendar.HOUR_OF_DAY, 0);
      target.set(Calendar.MINUTE, 0);
      target.set(Calendar.MILLISECOND, 0);
      target.set(Calendar.SECOND, 0);
      GregorianCalendar[] targets = new GregorianCalendar[MAX_N + 1];
      for (int i = 0; i < targets.length; i++) {
        targets[i] = ((GregorianCalendar) target.clone());
        targets[i].add(Calendar.DAY_OF_MONTH, -((1 << i) - 1));
        targets[i].getTime();
      }
      int[] totalFetchesByDelta = new int[MAX_N + 1];
      int[] totalSuccessfulFetchesByDelta = new int[MAX_N + 1];
      long[] totalFetchTimeByDelta = new long[MAX_N + 1];

      loopOverLines:
      while ((line = br.readLine()) != null) {

        for (int i = 0; i < mhkURIs.length; i++) mhkURIs[i] = null;
        // System.out.println("LINE: "+line);
        String[] split = line.split("!");
        Date date = dateFormat.parse(split[0]);
        GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("GMT"));
        calendar.setTime(date);
        System.out.println("Date: " + dateFormat.format(calendar.getTime()));
        calendar.set(Calendar.HOUR_OF_DAY, 0);
        calendar.set(Calendar.MINUTE, 0);
        calendar.set(Calendar.MILLISECOND, 0);
        calendar.set(Calendar.SECOND, 0);
        calendar.getTime();
        FreenetURI[] insertedURIs = new FreenetURI[INSERTED_BLOCKS];
        int[] insertTimes = new int[INSERTED_BLOCKS];
        if (split.length < 3) continue;
        int seedTime = Integer.parseInt(split[2]);
        System.out.println("Seed time: " + seedTime);
        if (split.length < 4) continue;

        int token = 3;

        if (split.length < token + INSERTED_BLOCKS * 2) continue;

        for (int i = 0; i < INSERTED_BLOCKS; i++) {
          try {
            insertTimes[i] = Integer.parseInt(split[token]);
          } catch (NumberFormatException e) {
            insertTimes[i] = -1;
          }
          token++;
          try {
            insertedURIs[i] = new FreenetURI(split[token]);
          } catch (MalformedURLException e) {
            insertedURIs[i] = null;
          }
          token++;
          System.out.println("Key insert " + i + " : " + insertedURIs[i] + " in " + insertTimes[i]);
        }
        for (int i = 0; i < targets.length; i++) {
          if (Math.abs(targets[i].getTimeInMillis() - calendar.getTimeInMillis())
              < HOURS.toMillis(12)) {
            System.out.println("Found row for target date " + ((1 << i) - 1) + " days ago.");
            System.out.println("Version: " + split[1]);
            csvLine.add(Integer.toString(i));
            int pulled = 0;
            int inserted = 0;
            for (int j = 0; j < INSERTED_BLOCKS; j++) {
              if (insertedURIs[j] == null) {
                csvLine.add("INSERT FAILED");
                continue;
              }
              inserted++;
              try {
                t1 = System.currentTimeMillis();
                FetchWaiter fw = new FetchWaiter(requestContext);
                client.fetch(insertedURIs[j], 32768, fw, fctx);
                fw.waitForCompletion();
                t2 = System.currentTimeMillis();

                System.out.println("PULL-TIME FOR BLOCK " + j + ": " + (t2 - t1));
                csvLine.add(String.valueOf(t2 - t1));
                pulled++;
              } catch (FetchException e) {
                if (e.getMode() != FetchExceptionMode.ALL_DATA_NOT_FOUND
                    && e.getMode() != FetchExceptionMode.DATA_NOT_FOUND) e.printStackTrace();
                csvLine.add(FetchException.getShortMessage(e.getMode()));
                System.err.println("FAILED PULL FOR BLOCK " + j + ": " + e);
              }
            }
            System.out.println(
                "Pulled "
                    + pulled
                    + " blocks of "
                    + inserted
                    + " from "
                    + ((1 << i) - 1)
                    + " days ago.");
          }
        }

        while (split.length > token + INSERTED_BLOCKS) {
          int delta;
          try {
            delta = Integer.parseInt(split[token]);
          } catch (NumberFormatException e) {
            System.err.println("Unable to parse token " + token + " = \"" + token + "\"");
            System.err.println("This is supposed to be a delta");
            System.err.println(
                "Skipping the rest of the line for date " + dateFormat.format(calendar.getTime()));
            continue loopOverLines;
          }
          System.out.println("Delta: " + ((1 << delta) - 1) + " days");
          token++;
          int totalFetchTime = 0;
          int totalSuccesses = 0;
          int totalFetches = 0;
          for (int i = 0; i < INSERTED_BLOCKS; i++) {
            if (split[token].equals("")) continue;
            int mhkFetchTime = -1;
            totalFetches++;
            try {
              mhkFetchTime = Integer.parseInt(split[token]);
              System.out.println(
                  "Fetched block #" + i + " on " + date + " in " + mhkFetchTime + "ms");
              totalSuccesses++;
              totalFetchTime += mhkFetchTime;
            } catch (NumberFormatException e) {
              System.out.println("Failed block #" + i + " on " + date + " : " + split[token]);
            }
            token++;
          }
          totalFetchesByDelta[delta] += totalFetches;
          totalSuccessfulFetchesByDelta[delta] += totalSuccesses;
          totalFetchTimeByDelta[delta] += totalFetchTime;
          System.err.println(
              "Succeeded: "
                  + totalSuccesses
                  + " of "
                  + totalFetches
                  + " average "
                  + ((double) totalFetchTime) / ((double) totalSuccesses)
                  + "ms for delta "
                  + delta
                  + " on "
                  + dateFormat.format(date));
        }
      }

      System.out.println();
      System.out.println();

      for (int i = 0; i < MAX_N + 1; i++) {
        System.out.println(
            "DELTA: "
                + i
                + " days: Total fetches: "
                + totalFetchesByDelta[i]
                + " total successes "
                + totalSuccessfulFetchesByDelta[i]
                + " = "
                + ((totalSuccessfulFetchesByDelta[i] * 100.0) / totalFetchesByDelta[i])
                + "% in "
                + (totalFetchTimeByDelta[i] * 1.0) / totalSuccessfulFetchesByDelta[i]
                + "ms");
      }

      fis.close();
      fis = null;

    } catch (Throwable t) {
      t.printStackTrace();
      exitCode = EXIT_THREW_SOMETHING;
    } finally {
      try {
        if (node != null) node.park();
      } catch (Throwable tt) {
      }
      try {
        if (node2 != null) node2.park();
      } catch (Throwable tt) {
      }
      Closer.close(fis);
      writeToStatusLog(file, csvLine);

      System.out.println("Exiting with status " + exitCode);
      System.exit(exitCode);
    }
  }
 @Override
 public void onFailure(FetchException e, ClientGetter state, ObjectContainer container) {
   switch (e.getMode()) {
     case FetchException.RECENTLY_FAILED:
       // pretty normal for polling.. just add the request again
       if (state.getURI().toString().contains(mStorage.getCurrentDateString())) {
         restartRequest(state.getURI());
       } else {
         startRequestForNewEdition();
       }
       break;
     case FetchException.DATA_NOT_FOUND:
       // pretty normal for polling.. just add the request again
       if (state.getURI().toString().contains(mStorage.getCurrentDateString())) {
         restartRequest(state.getURI());
       } else {
         startRequestForNewEdition();
       }
       break;
     case FetchException.ALL_DATA_NOT_FOUND:
       // should not possible while fetching KSKs without following redirects. ?
       System.err.println(
           "[Async_AnnounceFetcher] ALL_DATA_NOT_FOUND. you should not see me. ignoring this announce. "
               + e.getMessage()
               + " "
               + state.getURI().toString());
       startRequestForNewEdition();
       break;
     case FetchException.ROUTE_NOT_FOUND:
       // if hit it we are trying to fetch something but the node does not have a proper
       // connection.
       // just add the request again
       if (state.getURI().toString().contains(mStorage.getCurrentDateString())) {
         restartRequest(state.getURI());
       } else {
         startRequestForNewEdition();
       }
       break;
     case FetchException.REJECTED_OVERLOAD:
       // just add the request again
       if (state.getURI().toString().contains(mStorage.getCurrentDateString())) {
         restartRequest(state.getURI());
       } else {
         startRequestForNewEdition();
       }
       break;
     case FetchException.INVALID_METADATA:
       // wtf?
       mStorage.announce_ddos += 1;
       System.err.println(
           "[Async_AnnounceFetcher] INVALID_METADATA. you should not see me. ignoring this announce. "
               + e.getMessage()
               + " "
               + state.getURI().toString());
       startRequestForNewEdition();
       break;
     case FetchException.TOO_BIG_METADATA:
       // wtf?
       mStorage.announce_ddos += 1;
       System.err.println(
           "[Async_AnnounceFetcher] TOO_BIG_METADATA. you should not see me. ignoring this announce. "
               + e.getMessage()
               + " "
               + state.getURI().toString());
       startRequestForNewEdition();
       break;
     case FetchException.TOO_BIG:
       // should not be possible while polling KSK's without following redirects
       mStorage.announce_ddos += 1;
       System.err.println(
           "[Async_AnnounceFetcher] TOO_BIG. you should not see me. ignoring this announce. "
               + e.getMessage()
               + " "
               + state.getURI().toString());
       startRequestForNewEdition();
       break;
     case FetchException.TOO_MANY_REDIRECTS:
       mStorage.announce_ddos += 1;
       System.err.println(
           "[Async_AnnounceFetcher] TOO_MANY_REDIRECTS. you should not see me. ignoring this announce. "
               + e.getMessage()
               + " "
               + state.getURI().toString());
       startRequestForNewEdition();
       break;
     case FetchException.TOO_MUCH_RECURSION:
       // FIXME: wtf?
       mStorage.announce_ddos += 1;
       System.err.println(
           "[Async_AnnounceFetcher] TOO_MUCH_RECURSION. you should not see me. ignoring this announce. "
               + e.getMessage()
               + " "
               + state.getURI().toString());
       startRequestForNewEdition();
       break;
     case FetchException.PERMANENT_REDIRECT:
       mStorage.announce_ddos += 1;
       System.err.println(
           "[Async_AnnounceFetcher] TOO_MUCH_RECURSION. you should not see me. ignoring this announce. "
               + e.getMessage()
               + " "
               + state.getURI().toString());
       startRequestForNewEdition();
       break;
     default:
       // now we have a serious problem.
       mStorage.announce_ddos += 1;
       System.err.println(
           "[Async_AnnounceFetcher]::onFailure() unknown error: "
               + e.getMessage()
               + " mode="
               + e.getMode()
               + " ignoring this announce. uri="
               + state.getURI().toString());
       e.printStackTrace();
       startRequestForNewEdition();
       break;
   }
 }