Increase headers proof timeout and add more progress logs (#1939)

* Increase timeout for pruning proof and add some logs

* Show resolving virtual progress as whole percents
This commit is contained in:
Ori Newman 2022-02-06 12:42:28 +02:00 committed by GitHub
parent 27ba9d0374
commit 1cd712a63e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 36 additions and 6 deletions

View File

@ -566,7 +566,7 @@ func (flow *handleIBDFlow) syncMissingBlockBodies(highHash *externalapi.DomainHa
progressReporter.reportProgress(len(hashesToRequest), highestProcessedDAAScore)
}
return flow.resolveVirtual()
return flow.resolveVirtual(highestProcessedDAAScore)
}
func (flow *handleIBDFlow) banIfBlockIsHeaderOnly(block *externalapi.DomainBlock) error {
@ -578,10 +578,20 @@ func (flow *handleIBDFlow) banIfBlockIsHeaderOnly(block *externalapi.DomainBlock
return nil
}
func (flow *handleIBDFlow) resolveVirtual() error {
func (flow *handleIBDFlow) resolveVirtual(estimatedVirtualDAAScoreTarget uint64) error {
virtualDAAScoreStart, err := flow.Domain().Consensus().GetVirtualDAAScore()
if err != nil {
return err
}
for i := 0; ; i++ {
if i%10 == 0 {
log.Infof("Resolving virtual. This may take some time...")
virtualDAAScore, err := flow.Domain().Consensus().GetVirtualDAAScore()
if err != nil {
return err
}
log.Infof("Resolving virtual. Estimated progress: %d%%",
int(float64(virtualDAAScore-virtualDAAScoreStart)/float64(estimatedVirtualDAAScoreTarget-virtualDAAScoreStart)*100))
}
virtualChangeSet, isCompletelyResolved, err := flow.Domain().Consensus().ResolveVirtual()
if err != nil {

View File

@ -9,6 +9,7 @@ import (
"github.com/kaspanet/kaspad/domain/consensus/ruleerrors"
"github.com/kaspanet/kaspad/domain/consensus/utils/consensushashing"
"github.com/pkg/errors"
"time"
)
func (flow *handleIBDFlow) ibdWithHeadersProof(highHash *externalapi.DomainHash, highBlockDAAScore uint64) error {
@ -87,7 +88,7 @@ func (flow *handleIBDFlow) syncAndValidatePruningPointProof() (*externalapi.Doma
if err != nil {
return nil, err
}
message, err := flow.incomingRoute.DequeueWithTimeout(common.DefaultTimeout)
message, err := flow.incomingRoute.DequeueWithTimeout(10 * time.Minute)
if err != nil {
return nil, err
}

View File

@ -741,14 +741,28 @@ func (s *consensus) ValidatePruningPointProof(pruningPointProof *externalapi.Pru
s.lock.Lock()
defer s.lock.Unlock()
return s.pruningProofManager.ValidatePruningPointProof(pruningPointProof)
log.Infof("Validating the pruning point proof")
err := s.pruningProofManager.ValidatePruningPointProof(pruningPointProof)
if err != nil {
return err
}
log.Infof("Done validating the pruning point proof")
return nil
}
func (s *consensus) ApplyPruningPointProof(pruningPointProof *externalapi.PruningPointProof) error {
s.lock.Lock()
defer s.lock.Unlock()
return s.pruningProofManager.ApplyPruningPointProof(pruningPointProof)
log.Infof("Applying the pruning point proof")
err := s.pruningProofManager.ApplyPruningPointProof(pruningPointProof)
if err != nil {
return err
}
log.Infof("Done applying the pruning point proof")
return nil
}
func (s *consensus) BlockDAAWindowHashes(blockHash *externalapi.DomainHash) ([]*externalapi.DomainHash, error) {

View File

@ -347,6 +347,7 @@ func (ppm *pruningProofManager) ValidatePruningPointProof(pruningPointProof *ext
selectedTipByLevel := make([]*externalapi.DomainHash, maxLevel+1)
for blockLevel := maxLevel; blockLevel >= 0; blockLevel-- {
log.Infof("Validating level %d from the pruning point proof", blockLevel)
headers := make([]externalapi.BlockHeader, len(pruningPointProof.Headers[blockLevel]))
copy(headers, pruningPointProof.Headers[blockLevel])
@ -617,8 +618,12 @@ func (ppm *pruningProofManager) ApplyPruningPointProof(pruningPointProof *extern
defer onEnd()
for blockLevel, headers := range pruningPointProof.Headers {
log.Infof("Applying level %d from the pruning point proof", blockLevel)
var selectedTip *externalapi.DomainHash
for i, header := range headers {
if i%1000 == 0 {
log.Infof("Applying level %d from the pruning point proof - applied %d headers out of %d", blockLevel, i, len(headers))
}
stagingArea := model.NewStagingArea()
blockHash := consensushashing.HeaderHash(header)