@Override public void onBlocksDownloaded(Peer peer, Block block, int blocksLeft) { if (caughtUp) return; if (blocksLeft == 0) { caughtUp = true; doneDownload(); done.release(); } if (blocksLeft < 0 || originalBlocksLeft <= 0) return; double pct = 100.0 - (100.0 * (blocksLeft / (double) originalBlocksLeft)); if ((int) pct != lastPercent) { progress(pct, blocksLeft, new Date(block.getTimeSeconds() * 1000)); lastPercent = (int) pct; } }
/** * Called as part of connecting a block when the new block results in a different chain having * higher total work. * * <p>if (shouldVerifyTransactions) Either newChainHead needs to be in the block store as a * FullStoredBlock, or (block != null && block.transactions != null) */ private void handleNewBestChain( StoredBlock storedPrev, StoredBlock newChainHead, Block block, boolean expensiveChecks) throws BlockStoreException, VerificationException, PrunedException { checkState(lock.isHeldByCurrentThread()); // This chain has overtaken the one we currently believe is best. Reorganize is required. // // Firstly, calculate the block at which the chain diverged. We only need to examine the // chain from beyond this block to find differences. StoredBlock head = getChainHead(); final StoredBlock splitPoint = findSplit(newChainHead, head, blockStore); log.info("Re-organize after split at height {}", splitPoint.getHeight()); log.info("Old chain head: {}", head.getHeader().getHashAsString()); log.info("New chain head: {}", newChainHead.getHeader().getHashAsString()); log.info("Split at block: {}", splitPoint.getHeader().getHashAsString()); // Then build a list of all blocks in the old part of the chain and the new part. final LinkedList<StoredBlock> oldBlocks = getPartialChain(head, splitPoint, blockStore); final LinkedList<StoredBlock> newBlocks = getPartialChain(newChainHead, splitPoint, blockStore); // Disconnect each transaction in the previous main chain that is no longer in the new main // chain StoredBlock storedNewHead = splitPoint; if (shouldVerifyTransactions()) { for (StoredBlock oldBlock : oldBlocks) { try { disconnectTransactions(oldBlock); } catch (PrunedException e) { // We threw away the data we need to re-org this deep! We need to go back to a peer with // full // block contents and ask them for the relevant data then rebuild the indexs. Or we could // just // give up and ask the human operator to help get us unstuck (eg, rescan from the genesis // block). // TODO: Retry adding this block when we get a block with hash e.getHash() throw e; } } StoredBlock cursor; // Walk in ascending chronological order. for (Iterator<StoredBlock> it = newBlocks.descendingIterator(); it.hasNext(); ) { cursor = it.next(); Block cursorBlock = cursor.getHeader(); if (expensiveChecks && cursorBlock.getTimeSeconds() <= getMedianTimestampOfRecentBlocks(cursor.getPrev(blockStore), blockStore)) throw new VerificationException("Block's timestamp is too early during reorg"); TransactionOutputChanges txOutChanges; if (cursor != newChainHead || block == null) txOutChanges = connectTransactions(cursor); else txOutChanges = connectTransactions(newChainHead.getHeight(), block); storedNewHead = addToBlockStore(storedNewHead, cursorBlock.cloneAsHeader(), txOutChanges); } } else { // (Finally) write block to block store storedNewHead = addToBlockStore(storedPrev, newChainHead.getHeader()); } // Now inform the listeners. This is necessary so the set of currently active transactions (that // we can spend) // can be updated to take into account the re-organize. We might also have received new coins we // didn't have // before and our previous spends might have been undone. for (final ListenerRegistration<BlockChainListener> registration : listeners) { if (registration.executor == Threading.SAME_THREAD) { // Short circuit the executor so we can propagate any exceptions. // TODO: Do we really need to do this or should it be irrelevant? registration.listener.reorganize(splitPoint, oldBlocks, newBlocks); } else { registration.executor.execute( new Runnable() { @Override public void run() { try { registration.listener.reorganize(splitPoint, oldBlocks, newBlocks); } catch (VerificationException e) { log.error("Block chain listener threw exception during reorg", e); } } }); } } // Update the pointer to the best known block. setChainHead(storedNewHead); }
// expensiveChecks enables checks that require looking at blocks further back in the chain // than the previous one when connecting (eg median timestamp check) // It could be exposed, but for now we just set it to shouldVerifyTransactions() private void connectBlock( final Block block, StoredBlock storedPrev, boolean expensiveChecks, @Nullable final List<Sha256Hash> filteredTxHashList, @Nullable final Map<Sha256Hash, Transaction> filteredTxn) throws BlockStoreException, VerificationException, PrunedException { checkState(lock.isHeldByCurrentThread()); boolean filtered = filteredTxHashList != null && filteredTxn != null; // Check that we aren't connecting a block that fails a checkpoint check if (!params.passesCheckpoint(storedPrev.getHeight() + 1, block.getHash())) throw new VerificationException( "Block failed checkpoint lockin at " + (storedPrev.getHeight() + 1)); if (shouldVerifyTransactions()) { checkNotNull(block.transactions); for (Transaction tx : block.transactions) if (!tx.isFinal(storedPrev.getHeight() + 1, block.getTimeSeconds())) throw new VerificationException("Block contains non-final transaction"); } StoredBlock head = getChainHead(); if (storedPrev.equals(head)) { if (filtered && filteredTxn.size() > 0) { log.debug( "Block {} connects to top of best chain with {} transaction(s) of which we were sent {}", block.getHashAsString(), filteredTxHashList.size(), filteredTxn.size()); for (Sha256Hash hash : filteredTxHashList) log.debug(" matched tx {}", hash); } if (expensiveChecks && block.getTimeSeconds() <= getMedianTimestampOfRecentBlocks(head, blockStore)) throw new VerificationException("Block's timestamp is too early"); // This block connects to the best known block, it is a normal continuation of the system. TransactionOutputChanges txOutChanges = null; if (shouldVerifyTransactions()) txOutChanges = connectTransactions(storedPrev.getHeight() + 1, block); StoredBlock newStoredBlock = addToBlockStore( storedPrev, block.transactions == null ? block : block.cloneAsHeader(), txOutChanges); setChainHead(newStoredBlock); log.debug("Chain is now {} blocks high, running listeners", newStoredBlock.getHeight()); informListenersForNewBlock( block, NewBlockType.BEST_CHAIN, filteredTxHashList, filteredTxn, newStoredBlock); } else { // This block connects to somewhere other than the top of the best known chain. We treat these // differently. // // Note that we send the transactions to the wallet FIRST, even if we're about to re-organize // this block // to become the new best chain head. This simplifies handling of the re-org in the Wallet // class. StoredBlock newBlock = storedPrev.build(block); boolean haveNewBestChain = newBlock.moreWorkThan(head); if (haveNewBestChain) { log.info("Block is causing a re-organize"); } else { StoredBlock splitPoint = findSplit(newBlock, head, blockStore); if (splitPoint != null && splitPoint.equals(newBlock)) { // newStoredBlock is a part of the same chain, there's no fork. This happens when we // receive a block // that we already saw and linked into the chain previously, which isn't the chain head. // Re-processing it is confusing for the wallet so just skip. log.warn( "Saw duplicated block in main chain at height {}: {}", newBlock.getHeight(), newBlock.getHeader().getHash()); return; } if (splitPoint == null) { // This should absolutely never happen // (lets not write the full block to disk to keep any bugs which allow this to happen // from writing unreasonable amounts of data to disk) throw new VerificationException("Block forks the chain but splitPoint is null"); } else { // We aren't actually spending any transactions (yet) because we are on a fork addToBlockStore(storedPrev, block); int splitPointHeight = splitPoint.getHeight(); String splitPointHash = splitPoint.getHeader().getHashAsString(); log.info( "Block forks the chain at height {}/block {}, but it did not cause a reorganize:\n{}", splitPointHeight, splitPointHash, newBlock.getHeader().getHashAsString()); } } // We may not have any transactions if we received only a header, which can happen during fast // catchup. // If we do, send them to the wallet but state that they are on a side chain so it knows not // to try and // spend them until they become activated. if (block.transactions != null || filtered) { informListenersForNewBlock( block, NewBlockType.SIDE_CHAIN, filteredTxHashList, filteredTxn, newBlock); } if (haveNewBestChain) handleNewBestChain(storedPrev, newBlock, block, expensiveChecks); } }
@Override protected TransactionOutputChanges connectTransactions(int height, Block block) throws VerificationException, BlockStoreException { checkState(lock.isHeldByCurrentThread()); if (block.transactions == null) throw new RuntimeException( "connectTransactions called with Block that didn't have transactions!"); if (!params.passesCheckpoint(height, block.getHash())) throw new VerificationException("Block failed checkpoint lockin at " + height); blockStore.beginDatabaseBatchWrite(); LinkedList<StoredTransactionOutput> txOutsSpent = new LinkedList<StoredTransactionOutput>(); LinkedList<StoredTransactionOutput> txOutsCreated = new LinkedList<StoredTransactionOutput>(); long sigOps = 0; final boolean enforcePayToScriptHash = block.getTimeSeconds() >= NetworkParameters.BIP16_ENFORCE_TIME; if (scriptVerificationExecutor.isShutdown()) scriptVerificationExecutor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); List<Future<VerificationException>> listScriptVerificationResults = new ArrayList<Future<VerificationException>>(block.transactions.size()); try { if (!params.isCheckpoint(height)) { // BIP30 violator blocks are ones that contain a duplicated transaction. They are all in the // checkpoints list and we therefore only check non-checkpoints for duplicated transactions // here. See the // BIP30 document for more details on this: // https://github.com/bitcoin/bips/blob/master/bip-0030.mediawiki for (Transaction tx : block.transactions) { Sha256Hash hash = tx.getHash(); // If we already have unspent outputs for this hash, we saw the tx already. Either the // block is // being added twice (bug) or the block is a BIP30 violator. if (blockStore.hasUnspentOutputs(hash, tx.getOutputs().size())) throw new VerificationException("Block failed BIP30 test!"); if (enforcePayToScriptHash) // We already check non-BIP16 sigops in // Block.verifyTransactions(true) sigOps += tx.getSigOpCount(); } } Coin totalFees = Coin.ZERO; Coin coinbaseValue = null; for (final Transaction tx : block.transactions) { boolean isCoinBase = tx.isCoinBase(); Coin valueIn = Coin.ZERO; Coin valueOut = Coin.ZERO; final List<Script> prevOutScripts = new LinkedList<Script>(); if (!isCoinBase) { // For each input of the transaction remove the corresponding output from the set of // unspent // outputs. for (int index = 0; index < tx.getInputs().size(); index++) { TransactionInput in = tx.getInputs().get(index); StoredTransactionOutput prevOut = blockStore.getTransactionOutput( in.getOutpoint().getHash(), in.getOutpoint().getIndex()); if (prevOut == null) throw new VerificationException( "Attempted to spend a non-existent or already spent output!"); // Coinbases can't be spent until they mature, to avoid re-orgs destroying entire // transaction // chains. The assumption is there will ~never be re-orgs deeper than the spendable // coinbase // chain depth. if (height - prevOut.getHeight() < params.getSpendableCoinbaseDepth()) throw new VerificationException( "Tried to spend coinbase at depth " + (height - prevOut.getHeight())); // TODO: Check we're not spending the genesis transaction here. Satoshis code won't // allow it. valueIn = valueIn.add(prevOut.getValue()); if (enforcePayToScriptHash) { if (new Script(prevOut.getScriptBytes()).isPayToScriptHash()) sigOps += Script.getP2SHSigOpCount(in.getScriptBytes()); if (sigOps > Block.MAX_BLOCK_SIGOPS) throw new VerificationException("Too many P2SH SigOps in block"); } prevOutScripts.add(new Script(prevOut.getScriptBytes())); // in.getScriptSig().correctlySpends(tx, index, new Script(params, // prevOut.getScriptBytes(), 0, prevOut.getScriptBytes().length)); blockStore.removeUnspentTransactionOutput(prevOut); txOutsSpent.add(prevOut); } } Sha256Hash hash = tx.getHash(); for (TransactionOutput out : tx.getOutputs()) { valueOut = valueOut.add(out.getValue()); // For each output, add it to the set of unspent outputs so it can be consumed in future. StoredTransactionOutput newOut = new StoredTransactionOutput( hash, out.getIndex(), out.getValue(), height, isCoinBase, out.getScriptBytes()); blockStore.addUnspentTransactionOutput(newOut); txOutsCreated.add(newOut); } // All values were already checked for being non-negative (as it is verified in // Transaction.verify()) // but we check again here just for defence in depth. Transactions with zero output value // are OK. if (valueOut.signum() < 0 || valueOut.compareTo(NetworkParameters.MAX_MONEY) > 0) throw new VerificationException("Transaction output value out of range"); if (isCoinBase) { coinbaseValue = valueOut; } else { if (valueIn.compareTo(valueOut) < 0 || valueIn.compareTo(NetworkParameters.MAX_MONEY) > 0) throw new VerificationException("Transaction input value out of range"); totalFees = totalFees.add(valueIn.subtract(valueOut)); } if (!isCoinBase && runScripts) { // Because correctlySpends modifies transactions, this must come after we are done with tx FutureTask<VerificationException> future = new FutureTask<VerificationException>( new Verifier(tx, prevOutScripts, enforcePayToScriptHash)); scriptVerificationExecutor.execute(future); listScriptVerificationResults.add(future); } } if (totalFees.compareTo(NetworkParameters.MAX_MONEY) > 0 || block.getBlockInflation(height).add(totalFees).compareTo(coinbaseValue) < 0) throw new VerificationException("Transaction fees out of range"); for (Future<VerificationException> future : listScriptVerificationResults) { VerificationException e; try { e = future.get(); } catch (InterruptedException thrownE) { throw new RuntimeException(thrownE); // Shouldn't happen } catch (ExecutionException thrownE) { log.error("Script.correctlySpends threw a non-normal exception: " + thrownE.getCause()); throw new VerificationException( "Bug in Script.correctlySpends, likely script malformed in some new and interesting way.", thrownE); } if (e != null) throw e; } } catch (VerificationException e) { scriptVerificationExecutor.shutdownNow(); blockStore.abortDatabaseBatchWrite(); throw e; } catch (BlockStoreException e) { scriptVerificationExecutor.shutdownNow(); blockStore.abortDatabaseBatchWrite(); throw e; } return new TransactionOutputChanges(txOutsCreated, txOutsSpent); }
private static void kimotoGravityWellCheck( StoredBlock prevBlock, Block added, BlockStore store, NetworkParameters params) throws BlockStoreException { final long blocksTargetSpacing = (long) (2.5 * 60); // 2.5 minutes int timeDaySeconds = 60 * 60 * 24; long pastSecondsMin = timeDaySeconds / 40; long pastSecondsMax = timeDaySeconds * 7; long pastBlocksMin = pastSecondsMin / blocksTargetSpacing; long pastBlocksMax = pastSecondsMax / blocksTargetSpacing; StoredBlock blockReading = prevBlock; long pastBlocksMass = 0; long pastRateActualSeconds = 0; long pastRateTargetSeconds = 0; double pastRateAdjustmentRatio = 1.0f; BigInteger pastDifficultyAverage = BigInteger.valueOf(0); BigInteger pastDifficultyAveragePrev = BigInteger.valueOf(0); double eventHorizonDeviation; double eventHorizonDeviationFast; double eventHorizonDeviationSlow; if (prevBlock == null || prevBlock.getHeight() == 0 || (long) prevBlock.getHeight() < pastBlocksMin) { verifyDifficulty(prevBlock, added, params.getMaxTarget(), params); return; } final Block prevHeader = prevBlock.getHeader(); long latestBlockTime = prevHeader.getTimeSeconds(); for (int i = 1; blockReading.getHeight() > 0; i++) { if (pastBlocksMax > 0 && i > pastBlocksMax) { break; } pastBlocksMass++; if (i == 1) { pastDifficultyAverage = blockReading.getHeader().getDifficultyTargetAsInteger(); } else { pastDifficultyAverage = (blockReading .getHeader() .getDifficultyTargetAsInteger() .subtract(pastDifficultyAveragePrev)) .divide(BigInteger.valueOf(i)) .add(pastDifficultyAveragePrev); } pastDifficultyAveragePrev = pastDifficultyAverage; if (blockReading.getHeight() > 646120 && latestBlockTime < blockReading.getHeader().getTimeSeconds()) { // eliminates the ability to go back in time latestBlockTime = blockReading.getHeader().getTimeSeconds(); } pastRateActualSeconds = prevHeader.getTimeSeconds() - blockReading.getHeader().getTimeSeconds(); pastRateTargetSeconds = blocksTargetSpacing * pastBlocksMass; if (blockReading.getHeight() > 646120) { // this should slow down the upward difficulty change if (pastRateActualSeconds < 5) { pastRateActualSeconds = 5; } } else { if (pastRateActualSeconds < 0) { pastRateActualSeconds = 0; } } if (pastRateActualSeconds != 0 && pastRateTargetSeconds != 0) { pastRateAdjustmentRatio = (double) pastRateTargetSeconds / pastRateActualSeconds; } eventHorizonDeviation = 1 + 0.7084 * Math.pow((double) pastBlocksMass / 28.2d, -1.228); eventHorizonDeviationFast = eventHorizonDeviation; eventHorizonDeviationSlow = 1 / eventHorizonDeviation; if (pastBlocksMass >= pastBlocksMin) { if (pastRateAdjustmentRatio <= eventHorizonDeviationSlow || pastRateAdjustmentRatio >= eventHorizonDeviationFast) { break; } } blockReading = store.get(blockReading.getHeader().getPrevBlockHash()); if (blockReading == null) { return; } } BigInteger newDifficulty = pastDifficultyAverage; if (pastRateActualSeconds != 0 && pastRateTargetSeconds != 0) { newDifficulty = newDifficulty.multiply(BigInteger.valueOf(pastRateActualSeconds)); newDifficulty = newDifficulty.divide(BigInteger.valueOf(pastRateTargetSeconds)); } if (newDifficulty.compareTo(params.getMaxTarget()) > 0) { log.info("Difficulty hit proof of work limit: {}", newDifficulty.toString(16)); newDifficulty = params.getMaxTarget(); } verifyDifficulty(prevBlock, added, newDifficulty, params); }