[DEV-134] Implement Continuous Integration (#105)

* [DEV-134] Implement Continuous Integration

Squashed commit:

[5e41d830] Dev 223 fix txindex (#100)

* [DEV-201] In handleGetBlockDAGInfo calculate difficulty by the tip with the lowest bits

* [DEV-202] Move VirtualBlock.GetUTXOEntry to BlockDAG

* [DEV-203] Move VirtualBlock.SelectedTip() to BlockDAG

* [DEV-203] Move VirtualBlock.SelectedTip() to BlockDAG

* [DEV-204] Unexport VirtualBlock() and add CalcMedianTime method for DAG

* [DEV-204] add explanation about difficulty in CurrentBits() comment

* [DEV-204] unexport VirtualBlock type

* [DEV-223] make applyUTXOChanges return pastUTXOResults

* [DEV-223] add bluestxdata for current block as well

* [DEV-223] re-design tx index

* [DEV-223] edit txindex comments

* [DEV-223] rename BluesTxData -> AcceptedTxData, and return from applyUTXOChanges only transactions that got accepted

* [DEV-223] add unit test for txindex

* [DEV-223] fix comments and unite blueTransaction and AcceptedTxData to one type

* [DEV-223] use bucket cursor for dbFetchFirstTxRegion

* [DEV-223] use the same cursor instance for dbFetchFirstTxRegion

* [DEV-223] write in dbFetchFirstTxRegion's comment that it returns the first block region

* [DEV-223] rename type BlueBlockTransaction to TxWithBlockHash

* [DEV-223] add named returned value for applyUTXOChanges

[4c95e293] [DEV-134] Made golint ignore the vendor directory.

[21736dbc] [DEV-134] Renamed ExampleBlockChain_ProcessBlock to ExampleBlockDAG_ProcessBlock to satisfy go vet.

[beea6486] [DEV-134] Removed pushing the built docker to a remove repository. That's unnecessary at this stage.

[bee911ed] [DEV-134] Made all precompilation checks run on everything instead of only the root dir.

[585f92ae] [DEV-134] Added "github.com/pkg/errors" to dep.

[5f02f570] [DEV-134] -vendor-only is written with only one hyphen.

[3eee7f95] [DEV-134] go vet instead of go tool vet.

[0c2d4343] [DEV-134] Split all the pre-compile checks to separate lines to be able to tell which of them is failing.

[780519c8] [DEV-134] Ran gofmt on everything.

[8247146b] Dev 223 fix txindex (#100)

* [DEV-201] In handleGetBlockDAGInfo calculate difficulty by the tip with the lowest bits

* [DEV-202] Move VirtualBlock.GetUTXOEntry to BlockDAG

* [DEV-203] Move VirtualBlock.SelectedTip() to BlockDAG

* [DEV-203] Move VirtualBlock.SelectedTip() to BlockDAG

* [DEV-204] Unexport VirtualBlock() and add CalcMedianTime method for DAG

* [DEV-204] add explanation about difficulty in CurrentBits() comment

* [DEV-204] unexport VirtualBlock type

* [DEV-223] make applyUTXOChanges return pastUTXOResults

* [DEV-223] add bluestxdata for current block as well

* [DEV-223] re-design tx index

* [DEV-223] edit txindex comments

* [DEV-223] rename BluesTxData -> AcceptedTxData, and return from applyUTXOChanges only transactions that got accepted

* [DEV-223] add unit test for txindex

* [DEV-223] fix comments and unite blueTransaction and AcceptedTxData to one type

* [DEV-223] use bucket cursor for dbFetchFirstTxRegion

* [DEV-223] use the same cursor instance for dbFetchFirstTxRegion

* [DEV-223] write in dbFetchFirstTxRegion's comment that it returns the first block region

* [DEV-223] rename type BlueBlockTransaction to TxWithBlockHash

* [DEV-223] add named returned value for applyUTXOChanges

[bff68aa3] [DEV-134] Gave executable permission to deploy.sh

[638a99d9] [DEV-134] Added jenkinsfile and deploy script.

* [DEV-134] Added a robust testing script.

* [DEV-134] Fixed a bash-ism.

* [DEV-134] Disabled testing with coverage for now.

* [DEV-134] Disabled golint and removed removing debug symbols.

* [DEV-134] Disabled aligncheck.

* [DEV-134] Disabled structcheck and varcheck.

* [DEV-134] Added "don't inline functions" to compiler flags for testing.

* [DEV-134] Made build fail if gofmt prints out anything.

* [DEV-134] Fixed misleading comment.

* [DEV-134] Added comments to test.sh.

* [DEV-134] Renamed tm to measure_runtime and removed do_ prefixes from functions.

* [DEV-134] Fixed gofmt line in build script.

* [DEV-134] Fixed gofmt some more.

* [DEV-134]  Fixed gofmt not actually failing due to logical or.
This commit is contained in:
stasatdaglabs 2018-10-25 18:58:41 +03:00 committed by Svarog
parent c4a541d093
commit 3ace16ad23
73 changed files with 301 additions and 98 deletions

8
Gopkg.lock generated
View File

@ -92,6 +92,12 @@
packages = ["."]
revision = "f71540b9dfdcfe64dbf2818e9b66423c6aafcacd"
[[projects]]
branch = "master"
name = "github.com/pkg/errors"
packages = ["."]
revision = "d58f94251046e7f70ac45aceea6cf6f61415ccca"
[[projects]]
branch = "master"
name = "golang.org/x/crypto"
@ -101,6 +107,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "5976f1edbea819ab26c66eb5e5604c255a72f174b7a47bb218f3b8a6733d0c3a"
inputs-digest = "8dc0933d88a4dda8879b373d68550cf2211ad8850c9b091d9894c5d21a92a086"
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -63,6 +63,10 @@
branch = "master"
name = "bou.ke/monkey"
[[constraint]]
branch = "master"
name = "github.com/pkg/errors"
[prune]
go-tests = true
unused-packages = true

10
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,10 @@
node {
stage 'Checkout'
checkout scm
stage 'Version'
sh './deploy.sh version'
stage 'Build'
sh "./deploy.sh build"
}

View File

@ -44,8 +44,8 @@ func NewHeap() BlockHeap {
return h
}
// Pop removes the block with lowest height from this heap and returns it
func (bh BlockHeap) Pop() *blockNode {
// pop removes the block with lowest height from this heap and returns it
func (bh BlockHeap) pop() *blockNode {
return heap.Pop(bh.impl).(*blockNode)
}

View File

@ -76,7 +76,7 @@ func TestBlockHeap(t *testing.T) {
var poppedBlock *blockNode
if test.expectedPop != nil {
poppedBlock = heap.Pop()
poppedBlock = heap.pop()
}
if heap.Len() != test.expectedLength {
t.Errorf("unexpected heap length in test \"%s\". "+

View File

@ -23,10 +23,10 @@ func TestHashes(t *testing.T) {
)
expected := []daghash.Hash{
daghash.Hash{0},
daghash.Hash{1},
daghash.Hash{2},
daghash.Hash{3},
{0},
{1},
{2},
{3},
}
if !daghash.AreEqual(bs.hashes(), expected) {

View File

@ -22,7 +22,7 @@ import (
// overview documentation describes, this includes all of the Bitcoin consensus
// rules. This example intentionally attempts to insert a duplicate genesis
// block to illustrate how an invalid block is handled.
func ExampleBlockChain_ProcessBlock() {
func ExampleBlockDAG_ProcessBlock() {
// Create a new database to store the accepted blocks into. Typically
// this would be opening an existing database and would not be deleting
// and creating a new database like this, but it is done here so this is

View File

@ -85,7 +85,7 @@ func traverseCandidates(newBlock *blockNode, candidates blockSet, selectedParent
}
for queue.Len() > 0 {
current := queue.Pop()
current := queue.pop()
if candidates.contains(current) {
if current == selectedParent || selectedParentPast.anyChildInSet(current) {
selectedParentPast.add(current)

View File

@ -696,7 +696,7 @@ func validateParents(blockHeader *wire.BlockHeader, parents blockSet) error {
}
}
for queue.Len() > 0 {
current := queue.Pop()
current := queue.pop()
if parents.contains(current) {
return fmt.Errorf("Block %s is both a parent of %s and an"+
" ancestor of another parent",

View File

@ -23,8 +23,8 @@ import (
"github.com/daglabs/btcd/logger"
"github.com/daglabs/btcd/server"
"github.com/daglabs/btcd/signal"
"github.com/daglabs/btcd/version"
"github.com/daglabs/btcd/util/fs"
"github.com/daglabs/btcd/version"
)
const (

View File

@ -12,8 +12,8 @@ import (
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/database"
_ "github.com/daglabs/btcd/database/ffldb"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
flags "github.com/jessevdk/go-flags"
)

View File

@ -14,8 +14,8 @@ import (
"github.com/daglabs/btcd/blockdag"
"github.com/daglabs/btcd/blockdag/indexers"
"github.com/daglabs/btcd/database"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// importResults houses the stats and result as an import operation.

View File

@ -10,8 +10,8 @@ import (
"net"
"net/http"
"github.com/daglabs/btcd/btcjson"
"github.com/btcsuite/go-socks/socks"
"github.com/daglabs/btcd/btcjson"
)
// newHTTPClient returns a new HTTP client that is configured according to the

View File

@ -12,8 +12,8 @@ import (
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/database"
_ "github.com/daglabs/btcd/database/ffldb"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
flags "github.com/jessevdk/go-flags"
)

View File

@ -13,8 +13,8 @@ import (
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/database"
_ "github.com/daglabs/btcd/database/ffldb"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
var (

View File

@ -14,8 +14,8 @@ import (
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/database"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// importCmd defines the configuration options for the insecureimport command.

View File

@ -10,10 +10,10 @@ import (
"sync"
"time"
"github.com/daglabs/btcd/database/internal/treap"
"github.com/btcsuite/goleveldb/leveldb"
"github.com/btcsuite/goleveldb/leveldb/iterator"
"github.com/btcsuite/goleveldb/leveldb/util"
"github.com/daglabs/btcd/database/internal/treap"
)
const (

View File

@ -5,9 +5,9 @@
package ffldb
import (
"github.com/daglabs/btcd/database/internal/treap"
"github.com/btcsuite/goleveldb/leveldb/iterator"
"github.com/btcsuite/goleveldb/leveldb/util"
"github.com/daglabs/btcd/database/internal/treap"
)
// ldbTreapIter wraps a treap iterator to provide the additional functionality

View File

@ -17,12 +17,12 @@ import (
"path/filepath"
"testing"
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/database"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/btcsuite/goleveldb/leveldb"
ldberrors "github.com/btcsuite/goleveldb/leveldb/errors"
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/database"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
var (

146
deploy.sh Executable file
View File

@ -0,0 +1,146 @@
#!/bin/sh
export ENVIRONMENT_NAME=${ENVIRONMENT_NAME:-"dev"}
export CF_STACK_NAME=${CF_STACK_NAME:-"${ENVIRONMENT_NAME}-ECS-BTCD"}
export SERVICE_NAME=${SERVICE_NAME:-"btcd"}
export IMAGE_TAG=${IMAGE_TAG:-"latest"}
# GIT_COMMIT is set by Jenkins
export COMMIT=${COMMIT:-$GIT_COMMIT}
AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION:-eu-central-1}
export AWS_DEFAULT_REGION
AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query 'Account' --output=text)
export AWS_ACCOUNT_ID
ECR_SERVER=${ECR_SERVER:-"$AWS_ACCOUNT_ID.dkr.ecr.$AWS_DEFAULT_REGION.amazonaws.com"}
export ECR_SERVER
CF_PARAM=TaskImage
IMAGE_NAME=${ECR_SERVER}/${SERVICE_NAME}
trap "exit 1" INT
fatal() { echo "ERROR: $*" >&2; exit 1; }
measure_runtime() {
START=$(date +%s)
echo "--> $*" >&2
"$@"
rc=$?
echo "--> took $(($(date +%s) - START))s" >&2
return $rc
}
test_git_cli() {
git --version >/dev/null || fatal 'The "git" CLI tool is not available.'
}
test_aws_cli() {
aws --version >/dev/null || fatal 'The "aws" CLI tool is not available.'
aws sts get-caller-identity >/dev/null || fatal 'The "aws" CLI tool is not configured.'
}
test_docker_cli() {
docker --version >/dev/null || fatal 'The "docker" CLI tool is not available.'
}
test_docker_server() {
docker version -f 'Docker server version {{.Server.Version}}, build {{.Server.GitCommit}}' >/dev/null \
|| fatal 'The "docker" server is not available'
}
# fix $COMMIT if executed without Jenkins
if [ -z "$COMMIT" ]; then
test_git_cli
COMMIT=$(git rev-parse --short=7 HEAD)
export COMMIT
fi
version() {
test_git_cli
# place environment variables set by Jenkins into a metadata file
cat <<-EOF > version.txt
GIT_BRANCH=$BRANCH_NAME
GIT_COMMIT=$(git rev-parse --short=12 HEAD)
GIT_AUTHOR_EMAIL=$(git log -1 --pretty='format:%ae')
GIT_AUTHOR_NAME=$(git log -1 --pretty='format:%an')
GIT_AUTHOR_DATE=$(git log -1 --pretty='format:%aI')
EOF
}
login() {
test_aws_cli
eval "$(aws ecr get-login --no-include-email)"
}
build() {
login
test_docker_cli
version
measure_runtime docker build -t "${SERVICE_NAME}:${COMMIT}" . \
-f docker/Dockerfile \
|| fatal 'Failed to build the docker image'
}
create_ecr() {
echo "==> Checking for existance of ECR repository..."
measure_runtime aws ecr describe-repositories --query 'repositories[].repositoryName' \
| grep -E "\"$SERVICE_NAME\"" >/dev/null \
|| {
echo "==> ECR for $SERVICE_NAME does not exist. Creating ..."
measure_runtime aws ecr create-repository --repository-name "$SERVICE_NAME" \
|| fatal 'Failed to create ECR repository'
}
}
push() {
test_aws_cli
test_docker_cli
test_docker_server
build
measure_runtime docker tag "${SERVICE_NAME}:${COMMIT}" "${IMAGE_NAME}:${COMMIT}" || fatal 'Failed to tag docker image'
measure_runtime docker tag "${SERVICE_NAME}:${COMMIT}" "${IMAGE_NAME}:latest" || fatal 'Failed to tag docker image to :last'
create_ecr
login
measure_runtime docker push "${IMAGE_NAME}:${COMMIT}" || fatal 'Failed to push docker image to ECR'
measure_runtime docker push "${IMAGE_NAME}:latest" || fatal 'Failed to push docker image :latest to ECR'
}
deploy() {
measure_runtime aws cloudformation \
update-stack \
--stack-name "$CF_STACK_NAME" \
--capabilities CAPABILITY_NAMED_IAM \
--use-previous-template \
--parameters "ParameterKey=EnvironmentName,UsePreviousValue=true \
ParameterKey=$CF_PARAM,ParameterValue=${IMAGE_NAME}:$COMMIT" \
|| fatal "Failed to update CloudFormation stack $STACK_NAME."
}
usage() {
echo "Usage: $0 <build|login|push|deploy>"
echo " version - create a version.txt file with some meta data"
echo " build - create docker image named $SERVICE_NAME with tag \$COMMIT"
echo " login - configure docker push credentials to use AWS ECR"
echo " push - tag image as :latest and push both :\$COMMIT and :latest to ECR"
echo " push_all - push for all AWS regions"
echo " deploy - update CloudFormation stack '$CF_STACK_NAME' with ECR image '${SERVICE_NAME}:${COMMIT}'"
}
push_all() {
for AWS_DEFAULT_REGION in 'us-east-1' 'us-east-2'; do
export AWS_DEFAULT_REGION
ECR_SERVER="$AWS_ACCOUNT_ID.dkr.ecr.$AWS_DEFAULT_REGION.amazonaws.com"
export ECR_SERVER
IMAGE_NAME=${ECR_SERVER}/${SERVICE_NAME}
export IMAGE_NAME
push
done
}
case $1 in
version) version ;;
build) build ;;
login) login ;;
push) push ;;
push_all) push_all ;;
deploy) deploy ;;
*) usage ;;
esac

View File

@ -15,20 +15,22 @@ RUN go get -u github.com/golang/lint/golint \
COPY ./Gopkg.* ./
RUN dep ensure -v --vendor-only
RUN dep ensure -v -vendor-only
COPY . .
RUN gofmt -d -e -s . \
&& go tool vet -all . \
&& golint -set_exit_status . \
&& aligncheck . \
&& structcheck -e . \
&& varcheck -e .
RUN TEST_DIRS=`go list -f {{.Dir}} ./... | grep -v /vendor/`
RUN GOFMT_RESULT=`gofmt -l $TEST_DIRS`; echo $GOFMT_RESULT; test -z "$GOFMT_RESULT"
RUN go vet ./...
# RUN golint -set_exit_status $TEST_DIRS
# RUN aligncheck ./...
# RUN structcheck -e ./...
# RUN varcheck -e ./...
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o btcd .
RUN strip btcd
RUN go test ./...
# Remove the line below and uncomment the line after it for testing with coverage
RUN go test -gcflags='-l' ./...
# RUN ./test.sh
# --- multistage docker build: stage #2: runtime image
FROM alpine

View File

@ -20,8 +20,8 @@ import (
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/integration/rpctest"
"github.com/daglabs/btcd/txscript"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// makeTestOutput creates an on-chain output paying to a freshly generated

View File

@ -18,8 +18,8 @@ import (
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/rpcclient"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
const (

View File

@ -16,8 +16,8 @@ import (
"github.com/daglabs/btcd/dagconfig"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/txscript"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
func testSendOutputs(r *Harness, t *testing.T) {

View File

@ -556,7 +556,7 @@ func TestProcessTransaction(t *testing.T) {
nonStdSigScriptTx := util.NewTx(&wire.MsgTx{
Version: 1,
TxIn: []*wire.TxIn{&wire.TxIn{
TxIn: []*wire.TxIn{{
PreviousOutPoint: wire.OutPoint{Hash: *p2shTx.Hash(), Index: 0},
SignatureScript: wrappedP2SHNonStdSigScript,
Sequence: wire.MaxTxInSequenceNum,
@ -603,7 +603,7 @@ func TestProcessTransaction(t *testing.T) {
//Checks that a transaction with no outputs will get rejected
noOutsTx := util.NewTx(&wire.MsgTx{
Version: 1,
TxIn: []*wire.TxIn{&wire.TxIn{
TxIn: []*wire.TxIn{{
PreviousOutPoint: dummyPrevOut,
SignatureScript: dummySigScript,
Sequence: wire.MaxTxInSequenceNum,
@ -676,7 +676,7 @@ func TestProcessTransaction(t *testing.T) {
tx = util.NewTx(&wire.MsgTx{
Version: 1,
TxIn: []*wire.TxIn{&wire.TxIn{
TxIn: []*wire.TxIn{{
PreviousOutPoint: spendableOuts[5].outPoint,
SignatureScript: []byte{02, 01}, //Unparsable script
Sequence: wire.MaxTxInSequenceNum,

View File

@ -10,8 +10,8 @@ import (
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/mempool"
"github.com/daglabs/btcd/peer"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// PeerNotifier exposes methods to notify peers of status changes to

View File

@ -11,8 +11,8 @@ import (
"time"
"github.com/daglabs/btcd/rpcclient"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
func main() {

View File

@ -14,8 +14,8 @@ import (
"github.com/daglabs/btcd/btcjson"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// FutureDebugLevelResult is a future promise to deliver the result of a

View File

@ -23,9 +23,9 @@ import (
"sync/atomic"
"time"
"github.com/daglabs/btcd/btcjson"
"github.com/btcsuite/go-socks/socks"
"github.com/btcsuite/websocket"
"github.com/daglabs/btcd/btcjson"
)
var (

View File

@ -15,8 +15,8 @@ import (
"github.com/daglabs/btcd/btcjson"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
var (

View File

@ -4214,7 +4214,7 @@ func NewRPCServer(
gbtWorkState: newGbtWorkState(cfg.TimeSource),
helpCacher: newHelpCacher(),
requestProcessShutdown: make(chan struct{}),
quit: make(chan int),
quit: make(chan int),
}
if config.MainConfig().RPCUser != "" && config.MainConfig().RPCPass != "" {
login := config.MainConfig().RPCUser + ":" + config.MainConfig().RPCPass

View File

@ -13,10 +13,10 @@ import (
"github.com/btcsuite/winsvc/eventlog"
"github.com/btcsuite/winsvc/mgr"
"github.com/btcsuite/winsvc/svc"
"github.com/daglabs/btcd/signal"
"github.com/daglabs/btcd/config"
"github.com/daglabs/btcd/version"
"github.com/daglabs/btcd/server"
"github.com/daglabs/btcd/signal"
"github.com/daglabs/btcd/version"
)
const (

35
test.sh Executable file
View File

@ -0,0 +1,35 @@
#!/bin/sh
set -e
export COVERAGE_PATH="./coverage.txt"
export COVERAGE_TEMP_PATH="./coverage.tmp"
# Remove the old coverage file if exists
rm -f ${COVERAGE_PATH}
# Create a new coverage file
echo 'mode: atomic' > ${COVERAGE_PATH}
# Test each package (excluding vendor packages) separately
# Function inlining messes with monkey patching so we disable it by passing -gcflags='-l'
# Running tests with -covermode=atomic saves us from race conditions unique to the testing environment
# We write coverage for every package to a temporary file so that we may append it to one global coverage file
go list ./... | \
grep -v "vendor" | \
xargs -n1 -I{} sh -c "go test -gcflags='-l' -timeout 20s -covermode=atomic -coverprofile=${COVERAGE_TEMP_PATH} {} && tail -n +2 ${COVERAGE_TEMP_PATH} >> ${COVERAGE_PATH}" | \
tee /tmp/test
# Remove the temporary coverage file
rm -f ${COVERAGE_TEMP_PATH}
# Succeed only if everything is 100% covered
grep "ok .* 100.0% of statements" -v /tmp/test > /tmp/test2 || true
if [ -s /tmp/test2 ]
then
echo " >> tests failed or not 100% coverage"
exit 1
else
echo " >> tests completed successfully"
exit 0
fi

View File

@ -7,8 +7,8 @@ package base58_test
import (
"bytes"
"encoding/hex"
"testing"
"github.com/daglabs/btcd/util/base58"
"testing"
)
var stringTests = []struct {

View File

@ -6,8 +6,8 @@ package base58_test
import (
"bytes"
"testing"
"github.com/daglabs/btcd/util/base58"
"testing"
)
func BenchmarkBase58Encode(b *testing.B) {

View File

@ -5,8 +5,8 @@
package base58_test
import (
"testing"
"github.com/daglabs/btcd/util/base58"
"testing"
)
var checkEncodingStringTests = []struct {

View File

@ -5,8 +5,8 @@
package bech32_test
import (
"testing"
"github.com/daglabs/btcd/util/bech32"
"testing"
)
var checkEncodingStringTests = []struct {

View File

@ -1,8 +1,8 @@
package bech32
import (
"testing"
"strings"
"testing"
)
func TestBech32(t *testing.T) {
@ -20,13 +20,13 @@ func TestBech32(t *testing.T) {
{"abcdef:qpzry9x8gf2tvdw0s3jn54khce6mua7:nw2t26kg", true},
{"::qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq40ku0e3z", true},
{"split:checkupstagehandshakeupstreamerranterredcaperred3za27wc5", true},
{"aaa:bbb", false}, // too short
{"split:checkupstagehandshakeupstreamerranterredCaperred3za27wc5", false}, // mixed uppercase and lowercase
{"split:checkupstagehandshakeupstreamerranterredcaperred3za28wc5", false}, // invalid checksum
{"s lit:checkupstagehandshakeupstreamerranterredcaperred3za27wc5", false}, // invalid character (space) in prefix
{"spl" + string(127) + "t:checkupstagehandshakeupstreamerranterredcaperred3za27wc5", false}, // invalid character (DEL) in prefix
{"split:cheosgds2s3c", false}, // invalid character (o) in data part
{"split:te5peu7", false}, // too short data part
{"aaa:bbb", false}, // too short
{"split:checkupstagehandshakeupstreamerranterredCaperred3za27wc5", false}, // mixed uppercase and lowercase
{"split:checkupstagehandshakeupstreamerranterredcaperred3za28wc5", false}, // invalid checksum
{"s lit:checkupstagehandshakeupstreamerranterredcaperred3za27wc5", false}, // invalid character (space) in prefix
{"spl" + string(127) + "t:checkupstagehandshakeupstreamerranterredcaperred3za27wc5", false}, // invalid character (DEL) in prefix
{"split:cheosgds2s3c", false}, // invalid character (o) in data part
{"split:te5peu7", false}, // too short data part
{":checkupstagehandshakeupstreamerranterredcaperred3za27wc5", false}, // empty prefix
{"::qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq40ku0e3z", false}, // too long
{"bitcoincash:qr6m7j9njldwwzlg9v7v53unlr4jkmx6eylep8ekg2", true},

View File

@ -9,9 +9,9 @@ import (
"math/rand"
"time"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/util/bloom"
"github.com/daglabs/btcd/wire"
)
// This example demonstrates how to create a new bloom filter, add a transaction

View File

@ -9,10 +9,10 @@ import (
"math"
"sync"
"github.com/daglabs/btcd/txscript"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/txscript"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// ln2Squared is simply the square of the natural log of 2.

View File

@ -7,8 +7,8 @@ package bloom
import (
"github.com/daglabs/btcd/blockdag"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// merkleBlock is used to house intermediate information needed to generate a

View File

@ -5,8 +5,8 @@
package bloom_test
import (
"testing"
"github.com/daglabs/btcd/util/bloom"
"testing"
)
// TestMurmurHash3 ensure the MurmurHash3 function produces the correct hash

View File

@ -110,7 +110,7 @@ func NewTLSCertPair(organization string, validUntil time.Time, extraHosts []stri
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature |
x509.KeyUsageCertSign,
IsCA: true, // so can sign self.
IsCA: true, // so can sign self.
BasicConstraintsValid: true,
DNSNames: dnsNames,

View File

@ -10,8 +10,8 @@ import (
"sort"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/wire"
)
// Coin represents a spendable transaction outpoint

View File

@ -12,9 +12,9 @@ import (
"testing"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util"
"github.com/daglabs/btcd/util/coinset"
"github.com/daglabs/btcd/wire"
)
type TestCoin struct {

View File

@ -9,10 +9,10 @@ import (
"crypto/rand"
"encoding/binary"
"github.com/daglabs/btcd/txscript"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util/gcs"
"github.com/daglabs/btcd/dagconfig/daghash"
"github.com/daglabs/btcd/txscript"
"github.com/daglabs/btcd/util/gcs"
"github.com/daglabs/btcd/wire"
)
// DefaultP is the default collision probability (2^-20)

View File

@ -8,9 +8,9 @@ package gcs_test
import (
"bytes"
"encoding/binary"
"github.com/daglabs/btcd/util/gcs"
"math/rand"
"testing"
"github.com/daglabs/btcd/util/gcs"
)
var (

View File

@ -7,9 +7,9 @@ package gcs_test
import (
"encoding/binary"
"github.com/daglabs/btcd/util/gcs"
"math/rand"
"testing"
"github.com/daglabs/btcd/util/gcs"
)
func genRandFilterElements(numElements uint) ([][]byte, error) {

View File

@ -5,8 +5,8 @@
package hdkeychain_test
import (
"testing"
"github.com/daglabs/btcd/util/hdkeychain"
"testing"
)
// bip0032MasterPriv1 is the master private extended key from the first set of

View File

@ -11,8 +11,8 @@ import (
"path/filepath"
"testing"
"github.com/daglabs/btcd/wire"
"github.com/daglabs/btcd/util/txsort"
"github.com/daglabs/btcd/wire"
)
// TestSort ensures the transaction sorting works according to the BIP.