Merge branch 'dev-20-primitive-blockdag' into dev-34-update-blockdag

This commit is contained in:
Stas Boutenko 2018-06-21 12:29:12 +03:00
commit ab75fcf5fb
6 changed files with 222 additions and 20 deletions

View File

@ -131,11 +131,11 @@ func (bi *blockImporter) processBlock(serializedBlock []byte) (bool, error) {
} }
// Don't bother trying to process orphans. // Don't bother trying to process orphans.
prevHash := &block.MsgBlock().Header.PrevBlock prevHashes := block.MsgBlock().Header.PrevBlocks
if !prevHash.IsEqual(&zeroHash) { for _, prevHash := range prevHashes {
var exists bool var exists bool
err := bi.db.View(func(tx database.Tx) error { err := bi.db.View(func(tx database.Tx) error {
exists, err = tx.HasBlock(prevHash) exists, err = tx.HasBlock(&prevHash)
return err return err
}) })
if err != nil { if err != nil {

View File

@ -173,5 +173,5 @@ func Example_blockStorageAndRetrieval() {
fmt.Printf("Serialized block size: %d bytes\n", len(loadedBlockBytes)) fmt.Printf("Serialized block size: %d bytes\n", len(loadedBlockBytes))
// Output: // Output:
// Serialized block size: 285 bytes // Serialized block size: 254 bytes
} }

View File

@ -14,11 +14,6 @@ import (
"sort" "sort"
"sync" "sync"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/database"
"github.com/daglabs/btcd/database/internal/treap"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcutil"
"github.com/btcsuite/goleveldb/leveldb" "github.com/btcsuite/goleveldb/leveldb"
"github.com/btcsuite/goleveldb/leveldb/comparer" "github.com/btcsuite/goleveldb/leveldb/comparer"
ldberrors "github.com/btcsuite/goleveldb/leveldb/errors" ldberrors "github.com/btcsuite/goleveldb/leveldb/errors"
@ -26,17 +21,17 @@ import (
"github.com/btcsuite/goleveldb/leveldb/iterator" "github.com/btcsuite/goleveldb/leveldb/iterator"
"github.com/btcsuite/goleveldb/leveldb/opt" "github.com/btcsuite/goleveldb/leveldb/opt"
"github.com/btcsuite/goleveldb/leveldb/util" "github.com/btcsuite/goleveldb/leveldb/util"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/database"
"github.com/daglabs/btcd/database/internal/treap"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcutil"
) )
const ( const (
// metadataDbName is the name used for the metadata database. // metadataDbName is the name used for the metadata database.
metadataDbName = "metadata" metadataDbName = "metadata"
// blockHdrSize is the size of a block header. This is simply the
// constant from wire and is only provided here for convenience since
// wire.MaxBlockHeaderPayload is quite long.
blockHdrSize = wire.MaxBlockHeaderPayload
// blockHdrOffset defines the offsets into a block index row for the // blockHdrOffset defines the offsets into a block index row for the
// block header. // block header.
// //
@ -1238,6 +1233,25 @@ func (tx *transaction) fetchBlockRow(hash *daghash.Hash) ([]byte, error) {
return blockRow, nil return blockRow, nil
} }
// The offset in a block header at which numPrevBlocks resides.
const numPrevBlocksOffset = 4
// fetchBlockHeaderSize fetches the numPrevBlocks field out of the block header
// and uses it to compute the total size of the block header
func (tx *transaction) fetchBlockHeaderSize(hash *daghash.Hash) (byte, error) {
r, err := tx.FetchBlockRegion(&database.BlockRegion{
Hash: hash,
Offset: numPrevBlocksOffset,
Len: 1,
})
if err != nil {
return 0, err
}
numPrevBlocks := r[0]
return numPrevBlocks*daghash.HashSize + wire.BaseBlockHeaderPayload, nil
}
// FetchBlockHeader returns the raw serialized bytes for the block header // FetchBlockHeader returns the raw serialized bytes for the block header
// identified by the given hash. The raw bytes are in the format returned by // identified by the given hash. The raw bytes are in the format returned by
// Serialize on a wire.BlockHeader. // Serialize on a wire.BlockHeader.
@ -1255,13 +1269,41 @@ func (tx *transaction) fetchBlockRow(hash *daghash.Hash) ([]byte, error) {
// //
// This function is part of the database.Tx interface implementation. // This function is part of the database.Tx interface implementation.
func (tx *transaction) FetchBlockHeader(hash *daghash.Hash) ([]byte, error) { func (tx *transaction) FetchBlockHeader(hash *daghash.Hash) ([]byte, error) {
headerSize, err := tx.fetchBlockHeaderSize(hash)
if err != nil {
return nil, err
}
return tx.FetchBlockRegion(&database.BlockRegion{ return tx.FetchBlockRegion(&database.BlockRegion{
Hash: hash, Hash: hash,
Offset: 0, Offset: 0,
Len: blockHdrSize, Len: uint32(headerSize),
}) })
} }
// fetchBlockHeadersSizes fetches the numPrevBlocks fields out of the block headers
// and uses it to compute the total sizes of the block headers
func (tx *transaction) fetchBlockHeadersSizes(hashes []daghash.Hash) ([]byte, error) {
regions := make([]database.BlockRegion, len(hashes))
for i := range hashes {
regions[i].Hash = &hashes[i]
regions[i].Offset = numPrevBlocksOffset
regions[i].Len = 1
}
rs, err := tx.FetchBlockRegions(regions)
if err != nil {
return nil, err
}
sizes := make([]byte, len(hashes))
for i, r := range rs {
numPrevBlocks := r[0]
sizes[i] = numPrevBlocks*daghash.HashSize + wire.BaseBlockHeaderPayload
}
return sizes, nil
}
// FetchBlockHeaders returns the raw serialized bytes for the block headers // FetchBlockHeaders returns the raw serialized bytes for the block headers
// identified by the given hashes. The raw bytes are in the format returned by // identified by the given hashes. The raw bytes are in the format returned by
// Serialize on a wire.BlockHeader. // Serialize on a wire.BlockHeader.
@ -1278,11 +1320,16 @@ func (tx *transaction) FetchBlockHeader(hash *daghash.Hash) ([]byte, error) {
// //
// This function is part of the database.Tx interface implementation. // This function is part of the database.Tx interface implementation.
func (tx *transaction) FetchBlockHeaders(hashes []daghash.Hash) ([][]byte, error) { func (tx *transaction) FetchBlockHeaders(hashes []daghash.Hash) ([][]byte, error) {
headerSizes, err := tx.fetchBlockHeadersSizes(hashes)
if err != nil {
return nil, err
}
regions := make([]database.BlockRegion, len(hashes)) regions := make([]database.BlockRegion, len(hashes))
for i := range hashes { for i := range hashes {
regions[i].Hash = &hashes[i] regions[i].Hash = &hashes[i]
regions[i].Offset = 0 regions[i].Offset = 0
regions[i].Len = blockHdrSize regions[i].Len = uint32(headerSizes[i])
} }
return tx.FetchBlockRegions(regions) return tx.FetchBlockRegions(regions)
} }

View File

@ -1226,6 +1226,7 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool {
allBlockBytes := make([][]byte, len(tc.blocks)) allBlockBytes := make([][]byte, len(tc.blocks))
allBlockTxLocs := make([][]wire.TxLoc, len(tc.blocks)) allBlockTxLocs := make([][]wire.TxLoc, len(tc.blocks))
allBlockRegions := make([]database.BlockRegion, len(tc.blocks)) allBlockRegions := make([]database.BlockRegion, len(tc.blocks))
allBlockHeaderSizes := make([]int, len(tc.blocks))
for i, block := range tc.blocks { for i, block := range tc.blocks {
blockHash := block.Hash() blockHash := block.Hash()
allBlockHashes[i] = *blockHash allBlockHashes[i] = *blockHash
@ -1238,6 +1239,8 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool {
} }
allBlockBytes[i] = blockBytes allBlockBytes[i] = blockBytes
allBlockHeaderSizes[i] = block.MsgBlock().Header.SerializeSize()
txLocs, err := block.TxLoc() txLocs, err := block.TxLoc()
if err != nil { if err != nil {
tc.t.Errorf("block.TxLoc(%d): unexpected error: %v", i, tc.t.Errorf("block.TxLoc(%d): unexpected error: %v", i,
@ -1260,9 +1263,7 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool {
return false return false
} }
// Ensure the block header fetched from the database matches the wantHeaderBytes := blockBytes[0:allBlockHeaderSizes[i]]
// expected bytes.
wantHeaderBytes := blockBytes[0:wire.MaxBlockHeaderPayload]
gotHeaderBytes, err := tx.FetchBlockHeader(blockHash) gotHeaderBytes, err := tx.FetchBlockHeader(blockHash)
if err != nil { if err != nil {
tc.t.Errorf("FetchBlockHeader(%s): unexpected error: %v", tc.t.Errorf("FetchBlockHeader(%s): unexpected error: %v",
@ -1405,7 +1406,7 @@ func testFetchBlockIO(tc *testContext, tx database.Tx) bool {
} }
for i := 0; i < len(blockHeaderData); i++ { for i := 0; i < len(blockHeaderData); i++ {
blockHash := allBlockHashes[i] blockHash := allBlockHashes[i]
wantHeaderBytes := allBlockBytes[i][0:wire.MaxBlockHeaderPayload] wantHeaderBytes := allBlockBytes[i][0:allBlockHeaderSizes[i]]
gotHeaderBytes := blockHeaderData[i] gotHeaderBytes := blockHeaderData[i]
if !bytes.Equal(gotHeaderBytes, wantHeaderBytes) { if !bytes.Equal(gotHeaderBytes, wantHeaderBytes) {
tc.t.Errorf("FetchBlockHeaders(%s): bytes mismatch: "+ tc.t.Errorf("FetchBlockHeaders(%s): bytes mismatch: "+

Binary file not shown.

154
database/testdata/generator.go vendored Normal file
View File

@ -0,0 +1,154 @@
// This is a small tool to generate testdata blocks file
package main
import (
"bytes"
"encoding/binary"
"fmt"
"os"
"strconv"
"time"
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/wire"
)
func main() {
targetFile, numBlocks := parseArgs()
out, err := os.Create(targetFile)
if err != nil {
panic(fmt.Errorf("error reading target file: %s", err))
}
defer func() {
err := out.Close()
if err != nil {
panic(fmt.Errorf("error closing target file: %s", err))
}
}()
generateBlocks(out, numBlocks)
}
func generateBlocks(out *os.File, numBlocks int) {
lastBlock := dagconfig.MainNetParams.GenesisBlock
for i := 0; i < numBlocks; i++ {
lastBlock = generateBlock(lastBlock)
writeBlock(out, lastBlock)
}
}
func generateBlock(parent *wire.MsgBlock) *wire.MsgBlock {
return &wire.MsgBlock{
Header: wire.BlockHeader{
Version: 1,
NumPrevBlocks: 1,
PrevBlocks: []daghash.Hash{parent.BlockHash()},
MerkleRoot: genesisMerkleRoot,
Timestamp: time.Unix(0x5b28c4c8, 0), // 2018-06-19 08:54:32 +0000 UTC
Bits: 0x1e00ffff, // 503382015 [000000ffff000000000000000000000000000000000000000000000000000000]
Nonce: 0xc0192550, // 2148484547
},
Transactions: []*wire.MsgTx{&genesisCoinbaseTx},
}
}
func writeBlock(out *os.File, block *wire.MsgBlock) {
writeNet(out)
blockLen := uint32(block.SerializeSize())
buf := bytes.NewBuffer(make([]byte, 0, blockLen))
err := block.Serialize(buf)
if err != nil {
panic(fmt.Errorf("error serializing block: %s", err))
}
err = binary.Write(out, binary.LittleEndian, blockLen)
if err != nil {
panic(fmt.Errorf("error writing blockLen: %s", err))
}
_, err = out.Write(buf.Bytes())
if err != nil {
panic(fmt.Errorf("error writing block: %s", err))
}
}
func writeNet(out *os.File) {
err := binary.Write(out, binary.LittleEndian, wire.MainNet)
if err != nil {
panic(fmt.Errorf("error writing net to file: %s", err))
}
}
func parseArgs() (targetFile string, numBlocks int) {
if len(os.Args) != 3 {
printUsage()
}
targetFile = os.Args[1]
numBlocks, err := strconv.Atoi(os.Args[2])
if err != nil {
printUsage()
}
return
}
func printUsage() {
fmt.Println("Usage: generator [targetFile] [numBlocks]")
os.Exit(1)
}
var genesisCoinbaseTx = wire.MsgTx{
Version: 1,
TxIn: []*wire.TxIn{
{
PreviousOutPoint: wire.OutPoint{
Hash: daghash.Hash{},
Index: 0xffffffff,
},
SignatureScript: []byte{
0x04, 0xff, 0xff, 0x00, 0x1d, 0x01, 0x04, 0x45, /* |.......E| */
0x54, 0x68, 0x65, 0x20, 0x54, 0x69, 0x6d, 0x65, /* |The Time| */
0x73, 0x20, 0x30, 0x33, 0x2f, 0x4a, 0x61, 0x6e, /* |s 03/Jan| */
0x2f, 0x32, 0x30, 0x30, 0x39, 0x20, 0x43, 0x68, /* |/2009 Ch| */
0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x6f, 0x72, /* |ancellor| */
0x20, 0x6f, 0x6e, 0x20, 0x62, 0x72, 0x69, 0x6e, /* | on brin| */
0x6b, 0x20, 0x6f, 0x66, 0x20, 0x73, 0x65, 0x63, /* |k of sec|*/
0x6f, 0x6e, 0x64, 0x20, 0x62, 0x61, 0x69, 0x6c, /* |ond bail| */
0x6f, 0x75, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, /* |out for |*/
0x62, 0x61, 0x6e, 0x6b, 0x73, /* |banks| */
},
Sequence: 0xffffffff,
},
},
TxOut: []*wire.TxOut{
{
Value: 0x12a05f200,
PkScript: []byte{
0x41, 0x04, 0x67, 0x8a, 0xfd, 0xb0, 0xfe, 0x55, /* |A.g....U| */
0x48, 0x27, 0x19, 0x67, 0xf1, 0xa6, 0x71, 0x30, /* |H'.g..q0| */
0xb7, 0x10, 0x5c, 0xd6, 0xa8, 0x28, 0xe0, 0x39, /* |..\..(.9| */
0x09, 0xa6, 0x79, 0x62, 0xe0, 0xea, 0x1f, 0x61, /* |..yb...a| */
0xde, 0xb6, 0x49, 0xf6, 0xbc, 0x3f, 0x4c, 0xef, /* |..I..?L.| */
0x38, 0xc4, 0xf3, 0x55, 0x04, 0xe5, 0x1e, 0xc1, /* |8..U....| */
0x12, 0xde, 0x5c, 0x38, 0x4d, 0xf7, 0xba, 0x0b, /* |..\8M...| */
0x8d, 0x57, 0x8a, 0x4c, 0x70, 0x2b, 0x6b, 0xf1, /* |.W.Lp+k.| */
0x1d, 0x5f, 0xac, /* |._.| */
},
},
},
LockTime: 0,
}
var genesisMerkleRoot = daghash.Hash([daghash.HashSize]byte{ // Make go vet happy.
0x3b, 0xa3, 0xed, 0xfd, 0x7a, 0x7b, 0x12, 0xb2,
0x7a, 0xc7, 0x2c, 0x3e, 0x67, 0x76, 0x8f, 0x61,
0x7f, 0xc8, 0x1b, 0xc3, 0x88, 0x8a, 0x51, 0x32,
0x3a, 0x9f, 0xb8, 0xaa, 0x4b, 0x1e, 0x5e, 0x4a,
})