mirror of https://github.com/bitcoin/bitcoin.git
Merge 45bd891465
into b510893d00
This commit is contained in:
commit
6b43abaa8d
|
@ -2422,34 +2422,43 @@ bool Chainstate::ConnectBlock(const CBlock& block, BlockValidationState& state,
|
|||
return true;
|
||||
}
|
||||
|
||||
bool fScriptChecks = true;
|
||||
if (!m_chainman.AssumedValidBlock().IsNull()) {
|
||||
const char* script_check_reason;
|
||||
if (m_chainman.AssumedValidBlock().IsNull()) {
|
||||
script_check_reason = "assumevalid=0 (always verify)";
|
||||
} else {
|
||||
constexpr int64_t TWO_WEEKS_IN_SECONDS{60 * 60 * 24 * 7 * 2};
|
||||
// We've been configured with the hash of a block which has been externally verified to have a valid history.
|
||||
// A suitable default value is included with the software and updated from time to time. Because validity
|
||||
// relative to a piece of software is an objective fact these defaults can be easily reviewed.
|
||||
// This setting doesn't force the selection of any particular chain but makes validating some faster by
|
||||
// effectively caching the result of part of the verification.
|
||||
BlockMap::const_iterator it{m_blockman.m_block_index.find(m_chainman.AssumedValidBlock())};
|
||||
if (it != m_blockman.m_block_index.end()) {
|
||||
if (it->second.GetAncestor(pindex->nHeight) == pindex &&
|
||||
m_chainman.m_best_header->GetAncestor(pindex->nHeight) == pindex &&
|
||||
m_chainman.m_best_header->nChainWork >= m_chainman.MinimumChainWork()) {
|
||||
// This block is a member of the assumed verified chain and an ancestor of the best header.
|
||||
// Script verification is skipped when connecting blocks under the
|
||||
// assumevalid block. Assuming the assumevalid block is valid this
|
||||
// is safe because block merkle hashes are still computed and checked,
|
||||
// Of course, if an assumed valid block is invalid due to false scriptSigs
|
||||
// this optimization would allow an invalid chain to be accepted.
|
||||
// The equivalent time check discourages hash power from extorting the network via DOS attack
|
||||
// into accepting an invalid block through telling users they must manually set assumevalid.
|
||||
// Requiring a software change or burying the invalid block, regardless of the setting, makes
|
||||
// it hard to hide the implication of the demand. This also avoids having release candidates
|
||||
// that are hardly doing any signature verification at all in testing without having to
|
||||
// artificially set the default assumed verified block further back.
|
||||
// The test against the minimum chain work prevents the skipping when denied access to any chain at
|
||||
// least as good as the expected chain.
|
||||
fScriptChecks = (GetBlockProofEquivalentTime(*m_chainman.m_best_header, *pindex, *m_chainman.m_best_header, params.GetConsensus()) <= 60 * 60 * 24 * 7 * 2);
|
||||
}
|
||||
if (it == m_blockman.m_block_index.end()) {
|
||||
script_check_reason = "assumevalid hash not in headers";
|
||||
} else if (it->second.GetAncestor(pindex->nHeight) != pindex) {
|
||||
script_check_reason = (pindex->nHeight > it->second.nHeight) ? "block height above assumevalid height" : "block not in assumevalid chain";
|
||||
} else if (m_chainman.m_best_header->GetAncestor(pindex->nHeight) != pindex) {
|
||||
script_check_reason = "block not in best header chain";
|
||||
} else if (m_chainman.m_best_header->nChainWork < m_chainman.MinimumChainWork()) {
|
||||
script_check_reason = "best header chainwork below minimumchainwork";
|
||||
} else if (GetBlockProofEquivalentTime(*m_chainman.m_best_header, *pindex, *m_chainman.m_best_header, params.GetConsensus()) <= TWO_WEEKS_IN_SECONDS) {
|
||||
script_check_reason = "block too recent relative to best header";
|
||||
} else {
|
||||
// This block is a member of the assumed verified chain and an ancestor of the best header.
|
||||
// Script verification is skipped when connecting blocks under the
|
||||
// assumevalid block. Assuming the assumevalid block is valid this
|
||||
// is safe because block merkle hashes are still computed and checked,
|
||||
// Of course, if an assumed valid block is invalid due to false scriptSigs
|
||||
// this optimization would allow an invalid chain to be accepted.
|
||||
// The equivalent time check discourages hash power from extorting the network via DOS attack
|
||||
// into accepting an invalid block through telling users they must manually set assumevalid.
|
||||
// Requiring a software change or burying the invalid block, regardless of the setting, makes
|
||||
// it hard to hide the implication of the demand. This also avoids having release candidates
|
||||
// that are hardly doing any signature verification at all in testing without having to
|
||||
// artificially set the default assumed verified block further back.
|
||||
// The test against the minimum chain work prevents the skipping when denied access to any chain at
|
||||
// least as good as the expected chain.
|
||||
script_check_reason = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2562,9 +2571,16 @@ bool Chainstate::ConnectBlock(const CBlock& block, BlockValidationState& state,
|
|||
Ticks<SecondsDouble>(m_chainman.time_forks),
|
||||
Ticks<MillisecondsDouble>(m_chainman.time_forks) / m_chainman.num_blocks_total);
|
||||
|
||||
if (fScriptChecks != m_prev_script_checks_logged && GetRole() == ChainstateRole::NORMAL) {
|
||||
LogInfo("%s signature validations at block #%d (%s).", fScriptChecks ? "Enabling" : "Disabling", pindex->nHeight, block_hash.ToString());
|
||||
m_prev_script_checks_logged = fScriptChecks;
|
||||
const bool fScriptChecks{!!script_check_reason};
|
||||
if (script_check_reason != m_last_script_check_reason_logged && GetRole() == ChainstateRole::NORMAL) {
|
||||
if (fScriptChecks) {
|
||||
LogInfo("Enabling script verification at block #%d (%s): %s.",
|
||||
pindex->nHeight, block_hash.ToString(), script_check_reason);
|
||||
} else {
|
||||
LogInfo("Disabling script verification at block #%d (%s).",
|
||||
pindex->nHeight, block_hash.ToString());
|
||||
}
|
||||
m_last_script_check_reason_logged = script_check_reason;
|
||||
}
|
||||
|
||||
CBlockUndo blockundo;
|
||||
|
|
|
@ -561,7 +561,7 @@ protected:
|
|||
//! Cached result of LookupBlockIndex(*m_from_snapshot_blockhash)
|
||||
mutable const CBlockIndex* m_cached_snapshot_base GUARDED_BY(::cs_main){nullptr};
|
||||
|
||||
std::atomic_bool m_prev_script_checks_logged{true};
|
||||
std::optional<const char*> m_last_script_check_reason_logged GUARDED_BY(::cs_main){};
|
||||
|
||||
public:
|
||||
//! Reference to a BlockManager instance which itself is shared across all
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
# Copyright (c) 2014-2022 The Bitcoin Core developers
|
||||
# Copyright (c) 2014-present The Bitcoin Core developers
|
||||
# Distributed under the MIT software license, see the accompanying
|
||||
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
|
||||
"""Test logic for skipping signature validation on old blocks.
|
||||
|
@ -7,8 +7,7 @@
|
|||
Test logic for skipping signature validation on blocks which we've assumed
|
||||
valid (https://github.com/bitcoin/bitcoin/pull/9484)
|
||||
|
||||
We build a chain that includes and invalid signature for one of the
|
||||
transactions:
|
||||
We build a chain that includes an invalid signature for one of the transactions:
|
||||
|
||||
0: genesis block
|
||||
1: block 1 with coinbase transaction output.
|
||||
|
@ -19,7 +18,7 @@ transactions:
|
|||
103-2202: bury the bad block with just over two weeks' worth of blocks
|
||||
(2100 blocks)
|
||||
|
||||
Start three nodes:
|
||||
Start a few nodes:
|
||||
|
||||
- node0 has no -assumevalid parameter. Try to sync to block 2202. It will
|
||||
reject block 102 and only sync as far as block 101
|
||||
|
@ -28,6 +27,12 @@ Start three nodes:
|
|||
- node2 has -assumevalid set to the hash of block 102. Try to sync to
|
||||
block 200. node2 will reject block 102 since it's assumed valid, but it
|
||||
isn't buried by at least two weeks' work.
|
||||
- node3 has -assumevalid set to the hash of block 102. Feed a longer
|
||||
competing headers-only branch so block #1 is not on the best header chain.
|
||||
- node4 has -assumevalid set to the hash of block 102. Submit an alternative
|
||||
block #1 that is not part of the assumevalid chain.
|
||||
- node5 starts with no -assumevalid parameter. Reindex to hit
|
||||
"assumevalid hash not in headers" and "below minimum chainwork".
|
||||
"""
|
||||
|
||||
from test_framework.blocktools import (
|
||||
|
@ -64,11 +69,11 @@ class BaseNode(P2PInterface):
|
|||
class AssumeValidTest(BitcoinTestFramework):
|
||||
def set_test_params(self):
|
||||
self.setup_clean_chain = True
|
||||
self.num_nodes = 3
|
||||
self.num_nodes = 6
|
||||
self.rpc_timeout = 120
|
||||
|
||||
def setup_network(self):
|
||||
self.add_nodes(3)
|
||||
self.add_nodes(self.num_nodes)
|
||||
# Start node0. We don't start the other nodes yet since
|
||||
# we need to pre-mine a block with an invalid transaction
|
||||
# signature so we can pass in the block hash as assumevalid.
|
||||
|
@ -136,38 +141,117 @@ class AssumeValidTest(BitcoinTestFramework):
|
|||
self.tip = block.hash_int
|
||||
self.block_time += 1
|
||||
height += 1
|
||||
block_1_hash = self.blocks[0].hash_hex
|
||||
|
||||
# Start node1 and node2 with assumevalid so they accept a block with a bad signature.
|
||||
self.start_node(1, extra_args=["-assumevalid=" + block102.hash_hex])
|
||||
self.start_node(2, extra_args=["-assumevalid=" + block102.hash_hex])
|
||||
self.start_node(1, extra_args=[f"-assumevalid={block102.hash_hex}"])
|
||||
self.start_node(2, extra_args=[f"-assumevalid={block102.hash_hex}"])
|
||||
self.start_node(3, extra_args=[f"-assumevalid={block102.hash_hex}"])
|
||||
self.start_node(4, extra_args=[f"-assumevalid={block102.hash_hex}"])
|
||||
self.start_node(5)
|
||||
|
||||
p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
|
||||
p2p0.send_header_for_blocks(self.blocks[0:2000])
|
||||
p2p0.send_header_for_blocks(self.blocks[2000:])
|
||||
|
||||
# nodes[0]
|
||||
# Send blocks to node0. Block 102 will be rejected.
|
||||
self.send_blocks_until_disconnected(p2p0)
|
||||
self.wait_until(lambda: self.nodes[0].getblockcount() >= COINBASE_MATURITY + 1)
|
||||
assert_equal(self.nodes[0].getblockcount(), COINBASE_MATURITY + 1)
|
||||
with self.nodes[0].assert_debug_log(expected_msgs=[
|
||||
f"Enabling script verification at block #1 ({block_1_hash}): assumevalid=0 (always verify).",
|
||||
"Block validation error: block-script-verify-flag-failed",
|
||||
]):
|
||||
p2p0 = self.nodes[0].add_p2p_connection(BaseNode())
|
||||
|
||||
p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
|
||||
p2p1.send_header_for_blocks(self.blocks[0:2000])
|
||||
p2p1.send_header_for_blocks(self.blocks[2000:])
|
||||
with self.nodes[1].assert_debug_log(expected_msgs=['Disabling signature validations at block #1', 'Enabling signature validations at block #103']):
|
||||
p2p0.send_header_for_blocks(self.blocks[0:2000])
|
||||
p2p0.send_header_for_blocks(self.blocks[2000:])
|
||||
|
||||
self.send_blocks_until_disconnected(p2p0)
|
||||
self.wait_until(lambda: self.nodes[0].getblockcount() >= COINBASE_MATURITY + 1)
|
||||
assert_equal(self.nodes[0].getblockcount(), COINBASE_MATURITY + 1)
|
||||
|
||||
|
||||
# nodes[1]
|
||||
with self.nodes[1].assert_debug_log(expected_msgs=[
|
||||
f"Disabling script verification at block #1 ({self.blocks[0].hash_hex}).",
|
||||
f"Enabling script verification at block #103 ({self.blocks[102].hash_hex}): block height above assumevalid height.",
|
||||
]):
|
||||
p2p1 = self.nodes[1].add_p2p_connection(BaseNode())
|
||||
|
||||
p2p1.send_header_for_blocks(self.blocks[0:2000])
|
||||
p2p1.send_header_for_blocks(self.blocks[2000:])
|
||||
# Send all blocks to node1. All blocks will be accepted.
|
||||
for i in range(2202):
|
||||
p2p1.send_without_ping(msg_block(self.blocks[i]))
|
||||
# Syncing 2200 blocks can take a while on slow systems. Give it plenty of time to sync.
|
||||
p2p1.sync_with_ping(timeout=960)
|
||||
assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)
|
||||
assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)
|
||||
|
||||
p2p2 = self.nodes[2].add_p2p_connection(BaseNode())
|
||||
p2p2.send_header_for_blocks(self.blocks[0:200])
|
||||
|
||||
# nodes[2]
|
||||
# Send blocks to node2. Block 102 will be rejected.
|
||||
self.send_blocks_until_disconnected(p2p2)
|
||||
self.wait_until(lambda: self.nodes[2].getblockcount() >= COINBASE_MATURITY + 1)
|
||||
assert_equal(self.nodes[2].getblockcount(), COINBASE_MATURITY + 1)
|
||||
with self.nodes[2].assert_debug_log(expected_msgs=[
|
||||
f"Enabling script verification at block #1 ({block_1_hash}): block too recent relative to best header.",
|
||||
"Block validation error: block-script-verify-flag-failed",
|
||||
]):
|
||||
p2p2 = self.nodes[2].add_p2p_connection(BaseNode())
|
||||
p2p2.send_header_for_blocks(self.blocks[0:200])
|
||||
|
||||
self.send_blocks_until_disconnected(p2p2)
|
||||
|
||||
self.wait_until(lambda: self.nodes[2].getblockcount() >= COINBASE_MATURITY + 1)
|
||||
assert_equal(self.nodes[2].getblockcount(), COINBASE_MATURITY + 1)
|
||||
|
||||
|
||||
# nodes[3]
|
||||
with self.nodes[3].assert_debug_log(expected_msgs=[
|
||||
f"Enabling script verification at block #1 ({block_1_hash}): block not in best header chain.",
|
||||
]):
|
||||
best_hash = self.nodes[3].getbestblockhash()
|
||||
tip_block = self.nodes[3].getblock(best_hash)
|
||||
second_chain_tip, second_chain_time, second_chain_height = int(best_hash, 16), tip_block["time"] + 1, tip_block["height"] + 1
|
||||
second_chain = []
|
||||
for _ in range(150):
|
||||
block = create_block(second_chain_tip, create_coinbase(second_chain_height), second_chain_time)
|
||||
block.solve()
|
||||
second_chain.append(block)
|
||||
second_chain_tip, second_chain_time, second_chain_height = block.hash_int, second_chain_time + 1, second_chain_height + 1
|
||||
|
||||
p2p3 = self.nodes[3].add_p2p_connection(BaseNode())
|
||||
|
||||
p2p3.send_header_for_blocks(second_chain)
|
||||
p2p3.send_header_for_blocks(self.blocks[0:103])
|
||||
|
||||
p2p3.send_without_ping(msg_block(self.blocks[0]))
|
||||
self.wait_until(lambda: self.nodes[3].getblockcount() == 1)
|
||||
|
||||
|
||||
# nodes[4]
|
||||
genesis_hash = self.nodes[4].getbestblockhash()
|
||||
genesis_time = self.nodes[4].getblock(genesis_hash)['time']
|
||||
alt1 = create_block(int(genesis_hash, 16), create_coinbase(1), genesis_time + 2)
|
||||
alt1.solve()
|
||||
with self.nodes[4].assert_debug_log(expected_msgs=[
|
||||
f"Enabling script verification at block #1 ({alt1.hash_hex}): block not in assumevalid chain.",
|
||||
]):
|
||||
p2p4 = self.nodes[4].add_p2p_connection(BaseNode())
|
||||
p2p4.send_header_for_blocks(self.blocks[0:103])
|
||||
|
||||
p2p4.send_without_ping(msg_block(alt1))
|
||||
self.wait_until(lambda: self.nodes[4].getblockcount() == 1)
|
||||
|
||||
|
||||
# nodes[5]
|
||||
# Reindex to hit specific assumevalid gates (no races with header downloads/chainwork during startup).
|
||||
p2p5 = self.nodes[5].add_p2p_connection(BaseNode())
|
||||
p2p5.send_header_for_blocks(self.blocks[0:200])
|
||||
p2p5.send_without_ping(msg_block(self.blocks[0]))
|
||||
self.wait_until(lambda: self.nodes[5].getblockcount() == 1)
|
||||
with self.nodes[5].assert_debug_log(expected_msgs=[
|
||||
f"Enabling script verification at block #1 ({block_1_hash}): assumevalid hash not in headers.",
|
||||
]):
|
||||
self.restart_node(5, extra_args=["-reindex-chainstate", "-assumevalid=1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"])
|
||||
assert_equal(self.nodes[5].getblockcount(), 1)
|
||||
with self.nodes[5].assert_debug_log(expected_msgs=[
|
||||
f"Enabling script verification at block #1 ({block_1_hash}): best header chainwork below minimumchainwork.",
|
||||
]):
|
||||
self.restart_node(5, extra_args=["-reindex-chainstate", f"-assumevalid={block102.hash_hex}", "-minimumchainwork=0xffff"])
|
||||
assert_equal(self.nodes[5].getblockcount(), 1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Loading…
Reference in New Issue