[NOD-87] Rename hash to txID when necessary (#283)

* [NOD-87] Rename hash to txID when necessary

* [NOD-87] Fix NewTxIDFromStr error messages in msgtx_test.go
This commit is contained in:
Ori Newman 2019-05-02 14:59:59 +03:00 committed by Evgeny Khirin
parent 22046bebc5
commit 6163d3b4ec
7 changed files with 28 additions and 25 deletions

View File

@ -866,10 +866,10 @@ func (dus *DiffUTXOSet) collection() utxoCollection {
}
func TestUTXOSetAddEntry(t *testing.T) {
hash0, _ := daghash.NewTxIDFromStr("0000000000000000000000000000000000000000000000000000000000000000")
hash1, _ := daghash.NewTxIDFromStr("1111111111111111111111111111111111111111111111111111111111111111")
outPoint0 := wire.NewOutPoint(hash0, 0)
outPoint1 := wire.NewOutPoint(hash1, 0)
txID0, _ := daghash.NewTxIDFromStr("0000000000000000000000000000000000000000000000000000000000000000")
txID1, _ := daghash.NewTxIDFromStr("1111111111111111111111111111111111111111111111111111111111111111")
outPoint0 := wire.NewOutPoint(txID0, 0)
outPoint1 := wire.NewOutPoint(txID1, 0)
utxoEntry0 := NewUTXOEntry(&wire.TxOut{PkScript: []byte{}, Value: 10}, true, 0)
utxoEntry1 := NewUTXOEntry(&wire.TxOut{PkScript: []byte{}, Value: 20}, false, 1)

View File

@ -16,6 +16,9 @@ import (
// HashSize of array used to store hashes. See Hash.
const HashSize = 32
// TxIDSize of array used to store TxID. See TxID.
const TxIDSize = HashSize
// MaxHashStringSize is the maximum length of a Hash hash string.
const MaxHashStringSize = HashSize * 2

View File

@ -77,7 +77,7 @@ func makeTestOutput(r *rpctest.Harness, t *testing.T,
}
utxo := &wire.OutPoint{
Hash: fundTx.TxID(),
TxID: fundTx.TxID(),
Index: outputIndex,
}
@ -282,7 +282,7 @@ func createCSVOutput(r *rpctest.Harness, t *testing.T,
}
utxo := &wire.OutPoint{
Hash: tx.TxID(),
TxID: tx.TxID(),
Index: outputIndex,
}

View File

@ -837,8 +837,8 @@ func (mp *TxPool) maybeAcceptTransaction(tx *util.Tx, isNew, rateLimit, rejectDu
// is replaced and taking its address directly would
// result in all of the entries pointing to the same
// memory location and thus all be the final hash.
hashCopy := txIn.PreviousOutPoint.TxID
missingParents = append(missingParents, &hashCopy)
txIDCopy := txIn.PreviousOutPoint.TxID
missingParents = append(missingParents, &txIDCopy)
}
if mp.isTransactionInPool(&txIn.PreviousOutPoint.TxID) {
parentsInPool = append(parentsInPool, &txIn.PreviousOutPoint)
@ -1293,10 +1293,10 @@ func (mp *TxPool) RawMempoolVerbose() map[string]*btcjson.GetRawMempoolVerboseRe
Depends: make([]string, 0),
}
for _, txIn := range tx.MsgTx().TxIn {
hash := &txIn.PreviousOutPoint.TxID
if mp.haveTransaction(hash) {
txID := &txIn.PreviousOutPoint.TxID
if mp.haveTransaction(txID) {
mpd.Depends = append(mpd.Depends,
hash.String())
txID.String())
}
}

View File

@ -605,11 +605,11 @@ func TestProcessTransaction(t *testing.T) {
t.Fatalf("Script: error creating wrappedP2shNonSigScript: %v", err)
}
dummyPrevOutHash, err := daghash.NewTxIDFromStr("01")
dummyPrevOutTxID, err := daghash.NewTxIDFromStr("01")
if err != nil {
t.Fatalf("NewShaHashFromStr: unexpected error: %v", err)
}
dummyPrevOut := wire.OutPoint{TxID: *dummyPrevOutHash, Index: 1}
dummyPrevOut := wire.OutPoint{TxID: *dummyPrevOutTxID, Index: 1}
dummySigScript := bytes.Repeat([]byte{0x00}, 65)
addrHash := [20]byte{0x01}
@ -766,7 +766,7 @@ func TestAddrIndex(t *testing.T) {
})
defer guard.Unpatch()
enteredRemoveUnconfirmedTx := false
guard = monkey.Patch((*indexers.AddrIndex).RemoveUnconfirmedTx, func(idx *indexers.AddrIndex, hash *daghash.TxID) {
guard = monkey.Patch((*indexers.AddrIndex).RemoveUnconfirmedTx, func(_ *indexers.AddrIndex, _ *daghash.TxID) {
enteredRemoveUnconfirmedTx = true
})
defer guard.Unpatch()

View File

@ -69,20 +69,20 @@ func (s sortableInputSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
// First sort based on input hash (reversed / rpc-style), then index.
func (s sortableInputSlice) Less(i, j int) bool {
// Input hashes are the same, so compare the index.
ihash := s[i].PreviousOutPoint.TxID
jhash := s[j].PreviousOutPoint.TxID
if ihash == jhash {
iTxID := s[i].PreviousOutPoint.TxID
jTxID := s[j].PreviousOutPoint.TxID
if iTxID == jTxID {
return s[i].PreviousOutPoint.Index < s[j].PreviousOutPoint.Index
}
// At this point, the hashes are not equal, so reverse them to
// big-endian and return the result of the comparison.
const hashSize = daghash.HashSize
for b := 0; b < hashSize/2; b++ {
ihash[b], ihash[hashSize-1-b] = ihash[hashSize-1-b], ihash[b]
jhash[b], jhash[hashSize-1-b] = jhash[hashSize-1-b], jhash[b]
const txIDSize = daghash.TxIDSize
for b := 0; b < txIDSize/2; b++ {
iTxID[b], iTxID[txIDSize-1-b] = iTxID[txIDSize-1-b], iTxID[b]
jTxID[b], jTxID[txIDSize-1-b] = jTxID[txIDSize-1-b], jTxID[b]
}
return bytes.Compare(ihash[:], jhash[:]) == -1
return bytes.Compare(iTxID[:], jTxID[:]) == -1
}
// Output comparison function.

View File

@ -132,7 +132,7 @@ func TestTxHashAndID(t *testing.T) {
txID1Str := "5b92e6ed52bc78745905e0d104069e46407f62ea8d7d2bce78cd13f80ce220dc"
wantTxID1, err := daghash.NewTxIDFromStr(txID1Str)
if err != nil {
t.Errorf("NewHashFromStr: %v", err)
t.Errorf("NewTxIDFromStr: %v", err)
return
}
@ -180,14 +180,14 @@ func TestTxHashAndID(t *testing.T) {
hash2Str := "b11924b7eeffea821522222576c53dc5b8ddd97602f81e5e124d2626646d74ca"
wantHash2, err := daghash.NewHashFromStr(hash2Str)
if err != nil {
t.Errorf("NewHashFromStr: %v", err)
t.Errorf("NewTxIDFromStr: %v", err)
return
}
id2Str := "750499ae9e6d44961ef8bad8af27a44dd4bcbea166b71baf181e8d3997e1ff72"
wantID2, err := daghash.NewTxIDFromStr(id2Str)
if err != nil {
t.Errorf("NewHashFromStr: %v", err)
t.Errorf("NewTxIDFromStr: %v", err)
return
}
payload := []byte{1, 2, 3}