Browse Source

Merge #9580: Fix various minor linearization script issues

b9d95bd Fix various minor linearization script issues (Douglas Roark)
0.14
Wladimir J. van der Laan 8 years ago
parent
commit
ae972a5e99
No known key found for this signature in database
GPG Key ID: 74810B012346C9A6
  1. 10
      contrib/linearize/README.md
  2. 13
      contrib/linearize/example-linearize.cfg
  3. 36
      contrib/linearize/linearize-data.py

10
contrib/linearize/README.md

@ -32,8 +32,11 @@ Required configuration file settings:
* `output`: Output directory for linearized `blocks/blkNNNNN.dat` output. * `output`: Output directory for linearized `blocks/blkNNNNN.dat` output.
Optional config file setting for linearize-data: Optional config file setting for linearize-data:
* `file_timestamp`: Set each file's last-modified time to that of the most * `debug_output`: Some printouts may not always be desired. If true, such output
recent block in that file. will be printed.
* `file_timestamp`: Set each file's last-accessed and last-modified times,
respectively, to the current time and to the timestamp of the most recent block
written to the script's blockchain.
* `genesis`: The hash of the genesis block in the blockchain. * `genesis`: The hash of the genesis block in the blockchain.
* `input`: bitcoind blocks/ directory containing blkNNNNN.dat * `input`: bitcoind blocks/ directory containing blkNNNNN.dat
* `hashlist`: text file containing list of block hashes created by * `hashlist`: text file containing list of block hashes created by
@ -41,6 +44,9 @@ linearize-hashes.py.
* `max_out_sz`: Maximum size for files created by the `output_file` option. * `max_out_sz`: Maximum size for files created by the `output_file` option.
(Default: `1000*1000*1000 bytes`) (Default: `1000*1000*1000 bytes`)
* `netmagic`: Network magic number. * `netmagic`: Network magic number.
* `out_of_order_cache_sz`: If out-of-order blocks are being read, the block can
be written to a cache so that the blockchain doesn't have to be seeked again.
This option specifies the cache size. (Default: `100*1000*1000 bytes`)
* `rev_hash_bytes`: If true, the block hash list written by linearize-hashes.py * `rev_hash_bytes`: If true, the block hash list written by linearize-hashes.py
will be byte-reversed when read by linearize-data.py. See the linearize-hashes will be byte-reversed when read by linearize-data.py. See the linearize-hashes
entry for more information. entry for more information.

13
contrib/linearize/example-linearize.cfg

@ -1,4 +1,3 @@
# bitcoind RPC settings (linearize-hashes) # bitcoind RPC settings (linearize-hashes)
rpcuser=someuser rpcuser=someuser
rpcpassword=somepassword rpcpassword=somepassword
@ -21,6 +20,9 @@ input=/home/example/.bitcoin/blocks
#genesis=000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943 #genesis=000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943
#input=/home/example/.bitcoin/testnet3/blocks #input=/home/example/.bitcoin/testnet3/blocks
# "output" option causes blockchain files to be written to the given location,
# with "output_file" ignored. If not used, "output_file" is used instead.
# output=/home/example/blockchain_directory
output_file=/home/example/Downloads/bootstrap.dat output_file=/home/example/Downloads/bootstrap.dat
hashlist=hashlist.txt hashlist=hashlist.txt
@ -29,3 +31,12 @@ out_of_order_cache_sz = 100000000
# Do we want the reverse the hash bytes coming from getblockhash? # Do we want the reverse the hash bytes coming from getblockhash?
rev_hash_bytes = False rev_hash_bytes = False
# On a new month, do we want to set the access and modify times of the new
# blockchain file?
file_timestamp = 0
# Do we want to split the blockchain files given a new month or specific height?
split_timestamp = 0
# Do we want debug printouts?
debug_output = False

36
contrib/linearize/linearize-data.py

@ -134,7 +134,7 @@ class BlockDataCopier:
if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz): if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz):
self.outF.close() self.outF.close()
if self.setFileTime: if self.setFileTime:
os.utime(outFname, (int(time.time()), highTS)) os.utime(self.outFname, (int(time.time()), self.highTS))
self.outF = None self.outF = None
self.outFname = None self.outFname = None
self.outFn = self.outFn + 1 self.outFn = self.outFn + 1
@ -142,12 +142,12 @@ class BlockDataCopier:
(blkDate, blkTS) = get_blk_dt(blk_hdr) (blkDate, blkTS) = get_blk_dt(blk_hdr)
if self.timestampSplit and (blkDate > self.lastDate): if self.timestampSplit and (blkDate > self.lastDate):
print("New month " + blkDate.strftime("%Y-%m") + " @ " + hash_str) print("New month " + blkDate.strftime("%Y-%m") + " @ " + self.hash_str)
lastDate = blkDate self.lastDate = blkDate
if outF: if self.outF:
outF.close() self.outF.close()
if setFileTime: if self.setFileTime:
os.utime(outFname, (int(time.time()), highTS)) os.utime(self.outFname, (int(time.time()), self.highTS))
self.outF = None self.outF = None
self.outFname = None self.outFname = None
self.outFn = self.outFn + 1 self.outFn = self.outFn + 1
@ -155,11 +155,11 @@ class BlockDataCopier:
if not self.outF: if not self.outF:
if self.fileOutput: if self.fileOutput:
outFname = self.settings['output_file'] self.outFname = self.settings['output_file']
else: else:
outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn) self.outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn)
print("Output file " + outFname) print("Output file " + self.outFname)
self.outF = open(outFname, "wb") self.outF = open(self.outFname, "wb")
self.outF.write(inhdr) self.outF.write(inhdr)
self.outF.write(blk_hdr) self.outF.write(blk_hdr)
@ -223,13 +223,16 @@ class BlockDataCopier:
blk_hdr = self.inF.read(80) blk_hdr = self.inF.read(80)
inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen) inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen)
hash_str = calc_hash_str(blk_hdr) self.hash_str = calc_hash_str(blk_hdr)
if not hash_str in blkmap: if not self.hash_str in blkmap:
print("Skipping unknown block " + hash_str) # Because blocks can be written to files out-of-order as of 0.10, the script
# may encounter blocks it doesn't know about. Treat as debug output.
if settings['debug_output'] == 'true':
print("Skipping unknown block " + self.hash_str)
self.inF.seek(inLen, os.SEEK_CUR) self.inF.seek(inLen, os.SEEK_CUR)
continue continue
blkHeight = self.blkmap[hash_str] blkHeight = self.blkmap[self.hash_str]
self.blkCountIn += 1 self.blkCountIn += 1
if self.blkCountOut == blkHeight: if self.blkCountOut == blkHeight:
@ -295,12 +298,15 @@ if __name__ == '__main__':
settings['max_out_sz'] = 1000 * 1000 * 1000 settings['max_out_sz'] = 1000 * 1000 * 1000
if 'out_of_order_cache_sz' not in settings: if 'out_of_order_cache_sz' not in settings:
settings['out_of_order_cache_sz'] = 100 * 1000 * 1000 settings['out_of_order_cache_sz'] = 100 * 1000 * 1000
if 'debug_output' not in settings:
settings['debug_output'] = 'false'
settings['max_out_sz'] = int(settings['max_out_sz']) settings['max_out_sz'] = int(settings['max_out_sz'])
settings['split_timestamp'] = int(settings['split_timestamp']) settings['split_timestamp'] = int(settings['split_timestamp'])
settings['file_timestamp'] = int(settings['file_timestamp']) settings['file_timestamp'] = int(settings['file_timestamp'])
settings['netmagic'] = unhexlify(settings['netmagic'].encode('utf-8')) settings['netmagic'] = unhexlify(settings['netmagic'].encode('utf-8'))
settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz']) settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz'])
settings['debug_output'] = settings['debug_output'].lower()
if 'output_file' not in settings and 'output' not in settings: if 'output_file' not in settings and 'output' not in settings:
print("Missing output file / directory") print("Missing output file / directory")

Loading…
Cancel
Save