diff --git a/database/cmd/dbtool/insecureimport.go b/database/cmd/dbtool/insecureimport.go index 5d0715d10..f60274ca5 100644 --- a/database/cmd/dbtool/insecureimport.go +++ b/database/cmd/dbtool/insecureimport.go @@ -131,11 +131,11 @@ func (bi *blockImporter) processBlock(serializedBlock []byte) (bool, error) { } // Don't bother trying to process orphans. - prevHash := &block.MsgBlock().Header.PrevBlock - if !prevHash.IsEqual(&zeroHash) { + prevHashes := block.MsgBlock().Header.PrevBlocks + for _, prevHash := range prevHashes { var exists bool err := bi.db.View(func(tx database.Tx) error { - exists, err = tx.HasBlock(prevHash) + exists, err = tx.HasBlock(&prevHash) return err }) if err != nil { diff --git a/database/example_test.go b/database/example_test.go index 4184a970a..2deef653f 100644 --- a/database/example_test.go +++ b/database/example_test.go @@ -173,5 +173,5 @@ func Example_blockStorageAndRetrieval() { fmt.Printf("Serialized block size: %d bytes\n", len(loadedBlockBytes)) // Output: - // Serialized block size: 285 bytes + // Serialized block size: 254 bytes } diff --git a/database/ffldb/db.go b/database/ffldb/db.go index a935b7726..834621219 100644 --- a/database/ffldb/db.go +++ b/database/ffldb/db.go @@ -14,11 +14,6 @@ import ( "sort" "sync" - "github.com/daglabs/btcd/dagconfig/daghash" - "github.com/daglabs/btcd/database" - "github.com/daglabs/btcd/database/internal/treap" - "github.com/daglabs/btcd/wire" - "github.com/daglabs/btcutil" "github.com/btcsuite/goleveldb/leveldb" "github.com/btcsuite/goleveldb/leveldb/comparer" ldberrors "github.com/btcsuite/goleveldb/leveldb/errors" @@ -26,17 +21,17 @@ import ( "github.com/btcsuite/goleveldb/leveldb/iterator" "github.com/btcsuite/goleveldb/leveldb/opt" "github.com/btcsuite/goleveldb/leveldb/util" + "github.com/daglabs/btcd/dagconfig/daghash" + "github.com/daglabs/btcd/database" + "github.com/daglabs/btcd/database/internal/treap" + "github.com/daglabs/btcd/wire" + "github.com/daglabs/btcutil" ) const ( // metadataDbName is the name used for the metadata database. metadataDbName = "metadata" - // blockHdrSize is the size of a block header. This is simply the - // constant from wire and is only provided here for convenience since - // wire.MaxBlockHeaderPayload is quite long. - blockHdrSize = wire.MaxBlockHeaderPayload - // blockHdrOffset defines the offsets into a block index row for the // block header. // @@ -1238,6 +1233,25 @@ func (tx *transaction) fetchBlockRow(hash *daghash.Hash) ([]byte, error) { return blockRow, nil } +// The offset in a block header at which numPrevBlocks resides. +const numPrevBlocksOffset = 4 + +// fetchBlockHeaderSize fetches the numPrevBlocks field out of the block header +// and uses it to compute the total size of the block header +func (tx *transaction) fetchBlockHeaderSize(hash *daghash.Hash) (byte, error) { + r, err := tx.FetchBlockRegion(&database.BlockRegion{ + Hash: hash, + Offset: numPrevBlocksOffset, + Len: 1, + }) + if err != nil { + return 0, err + } + + numPrevBlocks := r[0] + return numPrevBlocks*daghash.HashSize + wire.BaseBlockHeaderPayload, nil +} + // FetchBlockHeader returns the raw serialized bytes for the block header // identified by the given hash. The raw bytes are in the format returned by // Serialize on a wire.BlockHeader. @@ -1255,13 +1269,41 @@ func (tx *transaction) fetchBlockRow(hash *daghash.Hash) ([]byte, error) { // // This function is part of the database.Tx interface implementation. func (tx *transaction) FetchBlockHeader(hash *daghash.Hash) ([]byte, error) { + headerSize, err := tx.fetchBlockHeaderSize(hash) + if err != nil { + return nil, err + } + return tx.FetchBlockRegion(&database.BlockRegion{ Hash: hash, Offset: 0, - Len: blockHdrSize, + Len: uint32(headerSize), }) } +// fetchBlockHeadersSizes fetches the numPrevBlocks fields out of the block headers +// and uses it to compute the total sizes of the block headers +func (tx *transaction) fetchBlockHeadersSizes(hashes []daghash.Hash) ([]byte, error) { + regions := make([]database.BlockRegion, len(hashes)) + for i := range hashes { + regions[i].Hash = &hashes[i] + regions[i].Offset = numPrevBlocksOffset + regions[i].Len = 1 + } + rs, err := tx.FetchBlockRegions(regions) + if err != nil { + return nil, err + } + + sizes := make([]byte, len(hashes)) + for i, r := range rs { + numPrevBlocks := r[0] + sizes[i] = numPrevBlocks*daghash.HashSize + wire.BaseBlockHeaderPayload + } + + return sizes, nil +} + // FetchBlockHeaders returns the raw serialized bytes for the block headers // identified by the given hashes. The raw bytes are in the format returned by // Serialize on a wire.BlockHeader. @@ -1278,11 +1320,16 @@ func (tx *transaction) FetchBlockHeader(hash *daghash.Hash) ([]byte, error) { // // This function is part of the database.Tx interface implementation. func (tx *transaction) FetchBlockHeaders(hashes []daghash.Hash) ([][]byte, error) { + headerSizes, err := tx.fetchBlockHeadersSizes(hashes) + if err != nil { + return nil, err + } + regions := make([]database.BlockRegion, len(hashes)) for i := range hashes { regions[i].Hash = &hashes[i] regions[i].Offset = 0 - regions[i].Len = blockHdrSize + regions[i].Len = uint32(headerSizes[i]) } return tx.FetchBlockRegions(regions) } diff --git a/database/ffldb/interface_test.go b/database/ffldb/interface_test.go index ff1e1d8a3..f488e59bf 100644 --- a/database/ffldb/interface_test.go +++ b/database/ffldb/interface_test.go @@ -1226,6 +1226,7 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool { allBlockBytes := make([][]byte, len(tc.blocks)) allBlockTxLocs := make([][]wire.TxLoc, len(tc.blocks)) allBlockRegions := make([]database.BlockRegion, len(tc.blocks)) + allBlockHeaderSizes := make([]int, len(tc.blocks)) for i, block := range tc.blocks { blockHash := block.Hash() allBlockHashes[i] = *blockHash @@ -1238,6 +1239,8 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool { } allBlockBytes[i] = blockBytes + allBlockHeaderSizes[i] = block.MsgBlock().Header.SerializeSize() + txLocs, err := block.TxLoc() if err != nil { tc.t.Errorf("block.TxLoc(%d): unexpected error: %v", i, @@ -1260,9 +1263,7 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool { return false } - // Ensure the block header fetched from the database matches the - // expected bytes. - wantHeaderBytes := blockBytes[0:wire.MaxBlockHeaderPayload] + wantHeaderBytes := blockBytes[0:allBlockHeaderSizes[i]] gotHeaderBytes, err := tx.FetchBlockHeader(blockHash) if err != nil { tc.t.Errorf("FetchBlockHeader(%s): unexpected error: %v", @@ -1405,7 +1406,7 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool { } for i := 0; i < len(blockHeaderData); i++ { blockHash := allBlockHashes[i] - wantHeaderBytes := allBlockBytes[i][0:wire.MaxBlockHeaderPayload] + wantHeaderBytes := allBlockBytes[i][0:allBlockHeaderSizes[i]] gotHeaderBytes := blockHeaderData[i] if !bytes.Equal(gotHeaderBytes, wantHeaderBytes) { tc.t.Errorf("FetchBlockHeaders(%s): bytes mismatch: "+ diff --git a/database/testdata/blocks1-256.bz2 b/database/testdata/blocks1-256.bz2 index 6b8bda442..13d959499 100644 Binary files a/database/testdata/blocks1-256.bz2 and b/database/testdata/blocks1-256.bz2 differ diff --git a/database/testdata/generator.go b/database/testdata/generator.go new file mode 100644 index 000000000..220de9e58 --- /dev/null +++ b/database/testdata/generator.go @@ -0,0 +1,154 @@ +// This is a small tool to generate testdata blocks file + +package main + +import ( + "bytes" + "encoding/binary" + "fmt" + "os" + "strconv" + "time" + + "github.com/daglabs/btcd/dagconfig" + "github.com/daglabs/btcd/dagconfig/daghash" + "github.com/daglabs/btcd/wire" +) + +func main() { + targetFile, numBlocks := parseArgs() + + out, err := os.Create(targetFile) + if err != nil { + panic(fmt.Errorf("error reading target file: %s", err)) + } + defer func() { + err := out.Close() + if err != nil { + panic(fmt.Errorf("error closing target file: %s", err)) + } + }() + + generateBlocks(out, numBlocks) +} + +func generateBlocks(out *os.File, numBlocks int) { + lastBlock := dagconfig.MainNetParams.GenesisBlock + + for i := 0; i < numBlocks; i++ { + lastBlock = generateBlock(lastBlock) + writeBlock(out, lastBlock) + } +} + +func generateBlock(parent *wire.MsgBlock) *wire.MsgBlock { + return &wire.MsgBlock{ + Header: wire.BlockHeader{ + Version: 1, + NumPrevBlocks: 1, + PrevBlocks: []daghash.Hash{parent.BlockHash()}, + MerkleRoot: genesisMerkleRoot, + Timestamp: time.Unix(0x5b28c4c8, 0), // 2018-06-19 08:54:32 +0000 UTC + Bits: 0x1e00ffff, // 503382015 [000000ffff000000000000000000000000000000000000000000000000000000] + Nonce: 0xc0192550, // 2148484547 + }, + Transactions: []*wire.MsgTx{&genesisCoinbaseTx}, + } +} + +func writeBlock(out *os.File, block *wire.MsgBlock) { + writeNet(out) + + blockLen := uint32(block.SerializeSize()) + buf := bytes.NewBuffer(make([]byte, 0, blockLen)) + + err := block.Serialize(buf) + if err != nil { + panic(fmt.Errorf("error serializing block: %s", err)) + } + + err = binary.Write(out, binary.LittleEndian, blockLen) + if err != nil { + panic(fmt.Errorf("error writing blockLen: %s", err)) + } + + _, err = out.Write(buf.Bytes()) + if err != nil { + panic(fmt.Errorf("error writing block: %s", err)) + } +} + +func writeNet(out *os.File) { + err := binary.Write(out, binary.LittleEndian, wire.MainNet) + if err != nil { + panic(fmt.Errorf("error writing net to file: %s", err)) + } +} + +func parseArgs() (targetFile string, numBlocks int) { + if len(os.Args) != 3 { + printUsage() + } + + targetFile = os.Args[1] + numBlocks, err := strconv.Atoi(os.Args[2]) + if err != nil { + printUsage() + } + + return +} + +func printUsage() { + fmt.Println("Usage: generator [targetFile] [numBlocks]") + os.Exit(1) +} + +var genesisCoinbaseTx = wire.MsgTx{ + Version: 1, + TxIn: []*wire.TxIn{ + { + PreviousOutPoint: wire.OutPoint{ + Hash: daghash.Hash{}, + Index: 0xffffffff, + }, + SignatureScript: []byte{ + 0x04, 0xff, 0xff, 0x00, 0x1d, 0x01, 0x04, 0x45, /* |.......E| */ + 0x54, 0x68, 0x65, 0x20, 0x54, 0x69, 0x6d, 0x65, /* |The Time| */ + 0x73, 0x20, 0x30, 0x33, 0x2f, 0x4a, 0x61, 0x6e, /* |s 03/Jan| */ + 0x2f, 0x32, 0x30, 0x30, 0x39, 0x20, 0x43, 0x68, /* |/2009 Ch| */ + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x6f, 0x72, /* |ancellor| */ + 0x20, 0x6f, 0x6e, 0x20, 0x62, 0x72, 0x69, 0x6e, /* | on brin| */ + 0x6b, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x65, 0x63, /* |k of sec|*/ + 0x6f, 0x6e, 0x64, 0x20, 0x62, 0x61, 0x69, 0x6c, /* |ond bail| */ + 0x6f, 0x75, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, /* |out for |*/ + 0x62, 0x61, 0x6e, 0x6b, 0x73, /* |banks| */ + }, + Sequence: 0xffffffff, + }, + }, + TxOut: []*wire.TxOut{ + { + Value: 0x12a05f200, + PkScript: []byte{ + 0x41, 0x04, 0x67, 0x8a, 0xfd, 0xb0, 0xfe, 0x55, /* |A.g....U| */ + 0x48, 0x27, 0x19, 0x67, 0xf1, 0xa6, 0x71, 0x30, /* |H'.g..q0| */ + 0xb7, 0x10, 0x5c, 0xd6, 0xa8, 0x28, 0xe0, 0x39, /* |..\..(.9| */ + 0x09, 0xa6, 0x79, 0x62, 0xe0, 0xea, 0x1f, 0x61, /* |..yb...a| */ + 0xde, 0xb6, 0x49, 0xf6, 0xbc, 0x3f, 0x4c, 0xef, /* |..I..?L.| */ + 0x38, 0xc4, 0xf3, 0x55, 0x04, 0xe5, 0x1e, 0xc1, /* |8..U....| */ + 0x12, 0xde, 0x5c, 0x38, 0x4d, 0xf7, 0xba, 0x0b, /* |..\8M...| */ + 0x8d, 0x57, 0x8a, 0x4c, 0x70, 0x2b, 0x6b, 0xf1, /* |.W.Lp+k.| */ + 0x1d, 0x5f, 0xac, /* |._.| */ + }, + }, + }, + LockTime: 0, +} + +var genesisMerkleRoot = daghash.Hash([daghash.HashSize]byte{ // Make go vet happy. + 0x3b, 0xa3, 0xed, 0xfd, 0x7a, 0x7b, 0x12, 0xb2, + 0x7a, 0xc7, 0x2c, 0x3e, 0x67, 0x76, 0x8f, 0x61, + 0x7f, 0xc8, 0x1b, 0xc3, 0x88, 0x8a, 0x51, 0x32, + 0x3a, 0x9f, 0xb8, 0xaa, 0x4b, 0x1e, 0x5e, 0x4a, +})