Merge #9221: [qa] Get rid of duplicate code

facbfa5 [qa] Get rid of duplicate code (MarcoFalke)
This commit is contained in:
MarcoFalke 2016-12-02 20:17:07 +01:00
commit 9e4bb312e6
No known key found for this signature in database
GPG key ID: 2D7F2372E50FE137
3 changed files with 19 additions and 82 deletions

View file

@ -81,49 +81,16 @@ class TestNode(NodeConnCB):
class MaxUploadTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("BITCOIND", "bitcoind"),
help="bitcoind binary to test")
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
self.utxo = []
self.txouts = gen_return_txouts()
def setup_network(self):
# Start a node with maxuploadtarget of 200 MB (/24h)
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug", "-maxuploadtarget=800", "-blockmaxsize=999000"]))
def mine_full_block(self, node, address):
# Want to create a full block
# We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limit
for j in range(14):
if len(self.utxo) < 14:
self.utxo = node.listunspent()
inputs=[]
outputs = {}
t = self.utxo.pop()
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
remchange = t["amount"] - Decimal("0.001000")
outputs[address]=remchange
# Create a basic transaction that will send change back to ourself after account for a fee
# And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
# of txouts is stored and is the only thing we overwrite from the original transaction
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + self.txouts
newtx = newtx + rawtx[94:]
# Appears to be ever so slightly faster to sign with SIGHASH_NONE
signresult = node.signrawtransaction(newtx,None,None,"NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
# Mine a full sized block which will be these transactions we just created
node.generate(1)
def run_test(self):
# Before we connect anything, we first set the time on the node
# to be in the past, otherwise things break because the CNode
@ -151,7 +118,7 @@ class MaxUploadTest(BitcoinTestFramework):
# Test logic begins here
# Now mine a big block
self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
mine_large_block(self.nodes[0])
# Store the hash; we'll request this later
big_old_block = self.nodes[0].getbestblockhash()
@ -162,11 +129,10 @@ class MaxUploadTest(BitcoinTestFramework):
self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
# Mine one more block, so that the prior block looks old
self.mine_full_block(self.nodes[0], self.nodes[0].getnewaddress())
mine_large_block(self.nodes[0])
# We'll be requesting this new block too
big_new_block = self.nodes[0].getbestblockhash()
new_block_size = self.nodes[0].getblock(big_new_block)['size']
big_new_block = int(big_new_block, 16)
# test_nodes[0] will test what happens if we just keep requesting the

View file

@ -24,10 +24,6 @@ class PruneTest(BitcoinTestFramework):
self.setup_clean_chain = True
self.num_nodes = 3
self.utxo = []
self.address = ["",""]
self.txouts = gen_return_txouts()
def setup_network(self):
self.nodes = []
self.is_network_split = False
@ -40,12 +36,6 @@ class PruneTest(BitcoinTestFramework):
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-prune=550"], timewait=900))
self.prunedir = self.options.tmpdir+"/node2/regtest/blocks/"
self.address[0] = self.nodes[0].getnewaddress()
self.address[1] = self.nodes[1].getnewaddress()
# Determine default relay fee
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[2], 0)
@ -58,7 +48,7 @@ class PruneTest(BitcoinTestFramework):
self.nodes[0].generate(150)
# Then mine enough full blocks to create more than 550MiB of data
for i in range(645):
self.mine_full_block(self.nodes[0], self.address[0])
mine_large_block(self.nodes[0])
sync_blocks(self.nodes[0:3])
@ -70,7 +60,7 @@ class PruneTest(BitcoinTestFramework):
print("Mining 25 more blocks should cause the first block file to be pruned")
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
for i in range(25):
self.mine_full_block(self.nodes[0],self.address[0])
mine_large_block(self.nodes[0])
waitstart = time.time()
while os.path.isfile(self.prunedir+"blk00000.dat"):
@ -95,17 +85,15 @@ class PruneTest(BitcoinTestFramework):
stop_node(self.nodes[0],0)
self.nodes[0]=start_node(0, self.options.tmpdir, ["-debug","-maxreceivebuffer=20000","-blockmaxsize=999000", "-checkblocks=5"], timewait=900)
# Mine 24 blocks in node 1
self.utxo = self.nodes[1].listunspent()
for i in range(24):
if j == 0:
self.mine_full_block(self.nodes[1],self.address[1])
mine_large_block(self.nodes[1])
else:
self.nodes[1].generate(1) #tx's already in mempool from previous disconnects
# Reorg back with 25 block chain from node 0
self.utxo = self.nodes[0].listunspent()
for i in range(25):
self.mine_full_block(self.nodes[0],self.address[0])
mine_large_block(self.nodes[0])
# Create connections in the order so both nodes can see the reorg at the same time
connect_nodes(self.nodes[1], 0)
@ -217,31 +205,6 @@ class PruneTest(BitcoinTestFramework):
# Verify we can now have the data for a block previously pruned
assert(self.nodes[2].getblock(self.forkhash)["height"] == self.forkheight)
def mine_full_block(self, node, address):
# Want to create a full block
# We'll generate a 66k transaction below, and 14 of them is close to the 1MB block limit
for j in range(14):
if len(self.utxo) < 14:
self.utxo = node.listunspent()
inputs=[]
outputs = {}
t = self.utxo.pop()
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
remchange = t["amount"] - 100*self.relayfee # Fee must be above min relay rate for 66kb tx
outputs[address]=remchange
# Create a basic transaction that will send change back to ourself after account for a fee
# And then insert the 128 generated transaction outs in the middle rawtx[92] is where the #
# of txouts is stored and is the only thing we overwrite from the original transaction
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + self.txouts
newtx = newtx + rawtx[94:]
# Appears to be ever so slightly faster to sign with SIGHASH_NONE
signresult = node.signrawtransaction(newtx,None,None,"NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
# Mine a full sized block which will be these transactions we just created
node.generate(1)
def run_test(self):
print("Warning! This test requires 4GB of disk space and takes over 30 mins (up to 2 hours)")

View file

@ -657,13 +657,12 @@ def create_tx(node, coinbase, to_address, amount):
def create_lots_of_big_transactions(node, txouts, utxos, fee):
addr = node.getnewaddress()
txids = []
for i in range(len(utxos)):
for _ in range(len(utxos)):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
inputs=[{ "txid" : t["txid"], "vout" : t["vout"]}]
outputs = {}
send_value = t['amount'] - fee
outputs[addr] = satoshi_round(send_value)
change = t['amount'] - fee
outputs[addr] = satoshi_round(change)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
@ -673,6 +672,15 @@ def create_lots_of_big_transactions(node, txouts, utxos, fee):
txids.append(txid)
return txids
def mine_large_block(node):
# generate a 66k transaction,
# and 14 of them is close to the 1MB block limit
txouts = gen_return_txouts()
utxos = node.listunspent()[:14]
fee = 100 * node.getnetworkinfo()["relayfee"]
create_lots_of_big_transactions(node, txouts, utxos, fee=fee)
node.generate(1)
def get_bip9_status(node, key):
info = node.getblockchaininfo()
return info['bip9_softforks'][key]