mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
Merge remote-tracking branch 'ssh_upstream/master' into iss/std-upsert-validator
This commit is contained in:
commit
3811e9ff74
@ -91,10 +91,8 @@ def get_assets(conn, asset_ids):
|
||||
|
||||
@register_query(LocalMongoDBConnection)
|
||||
def get_spent(conn, transaction_id, output):
|
||||
query = {'inputs.fulfills': {
|
||||
'transaction_id': transaction_id,
|
||||
'output_index': output}}
|
||||
|
||||
query = {'inputs.fulfills': {'transaction_id': transaction_id,
|
||||
'output_index': output}}
|
||||
return conn.run(
|
||||
conn.collection('transactions')
|
||||
.find(query, {'_id': 0}))
|
||||
@ -296,7 +294,7 @@ def get_validator_set(conn, height=None):
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
return list(cursor)[0]
|
||||
return next(cursor, None)
|
||||
|
||||
|
||||
@register_query(LocalMongoDBConnection)
|
||||
@ -322,3 +320,23 @@ def get_asset_tokens_for_public_key(conn, asset_id, public_key):
|
||||
{'$project': {'_id': False}}
|
||||
]))
|
||||
return cursor
|
||||
|
||||
|
||||
@register_query(LocalMongoDBConnection)
|
||||
def store_abci_chain(conn, height, chain_id, is_synced=True):
|
||||
return conn.run(
|
||||
conn.collection('abci_chains').replace_one(
|
||||
{'height': height},
|
||||
{'height': height, 'chain_id': chain_id,
|
||||
'is_synced': is_synced},
|
||||
upsert=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@register_query(LocalMongoDBConnection)
|
||||
def get_latest_abci_chain(conn):
|
||||
return conn.run(
|
||||
conn.collection('abci_chains')
|
||||
.find_one(projection={'_id': False}, sort=[('height', DESCENDING)])
|
||||
)
|
||||
|
||||
@ -47,6 +47,7 @@ def create_indexes(conn, dbname):
|
||||
create_utxos_secondary_index(conn, dbname)
|
||||
create_pre_commit_secondary_index(conn, dbname)
|
||||
create_validators_secondary_index(conn, dbname)
|
||||
create_abci_chains_indexes(conn, dbname)
|
||||
|
||||
|
||||
@register_schema(LocalMongoDBConnection)
|
||||
@ -133,6 +134,16 @@ def create_validators_secondary_index(conn, dbname):
|
||||
conn.conn[dbname]['validators'].create_index('height',
|
||||
name='height',
|
||||
unique=True,)
|
||||
conn.conn[dbname]['validators'].create_index('election_id',
|
||||
name='election_id',
|
||||
unique=True,)
|
||||
|
||||
|
||||
def create_abci_chains_indexes(conn, dbname):
|
||||
logger.info('Create `abci_chains.height` secondary index.')
|
||||
|
||||
conn.conn[dbname]['abci_chains'].create_index('height',
|
||||
name='height',
|
||||
unique=True,)
|
||||
|
||||
logger.info('Create `abci_chains.chain_id` secondary index.')
|
||||
conn.conn[dbname]['abci_chains'].create_index('chain_id',
|
||||
name='chain_id',
|
||||
unique=True)
|
||||
|
||||
@ -380,3 +380,23 @@ def get_asset_tokens_for_public_key(connection, asset_id,
|
||||
Iterator of transaction that list given owner in conditions.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@singledispatch
|
||||
def store_abci_chain(conn, height, chain_id, is_synced=True):
|
||||
"""Create or update an ABCI chain at the given height.
|
||||
Usually invoked in the beginning of the ABCI communications (height=0)
|
||||
or when ABCI client (like Tendermint) is migrated (any height).
|
||||
|
||||
Args:
|
||||
is_synced: True if the chain is known by both ABCI client and server
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@singledispatch
|
||||
def get_latest_abci_chain(conn):
|
||||
"""Returns the ABCI chain stored at the biggest height, if any,
|
||||
None otherwise.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Tables/collections that every backend database must create
|
||||
TABLES = ('transactions', 'blocks', 'assets', 'metadata',
|
||||
'validators', 'pre_commit', 'utxos')
|
||||
'validators', 'pre_commit', 'utxos', 'abci_chains')
|
||||
|
||||
VALID_LANGUAGES = ('danish', 'dutch', 'english', 'finnish', 'french', 'german',
|
||||
'hungarian', 'italian', 'norwegian', 'portuguese', 'romanian',
|
||||
|
||||
58
bigchaindb/common/memoize.py
Normal file
58
bigchaindb/common/memoize.py
Normal file
@ -0,0 +1,58 @@
|
||||
import functools
|
||||
import codecs
|
||||
from functools import lru_cache
|
||||
|
||||
|
||||
class HDict(dict):
|
||||
def __hash__(self):
|
||||
return hash(codecs.decode(self['id'], 'hex'))
|
||||
|
||||
|
||||
@lru_cache(maxsize=16384)
|
||||
def from_dict(func, *args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
|
||||
def memoize_from_dict(func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def memoized_func(*args, **kwargs):
|
||||
|
||||
if args[1].get('id', None):
|
||||
args = list(args)
|
||||
args[1] = HDict(args[1])
|
||||
new_args = tuple(args)
|
||||
return from_dict(func, *new_args, **kwargs)
|
||||
else:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return memoized_func
|
||||
|
||||
|
||||
class ToDictWrapper():
|
||||
def __init__(self, tx):
|
||||
self.tx = tx
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.tx.id == other.tx.id
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.tx.id)
|
||||
|
||||
|
||||
@lru_cache(maxsize=16384)
|
||||
def to_dict(func, tx_wrapped):
|
||||
return func(tx_wrapped.tx)
|
||||
|
||||
|
||||
def memoize_to_dict(func):
|
||||
|
||||
@functools.wraps(func)
|
||||
def memoized_func(*args, **kwargs):
|
||||
|
||||
if args[0].id:
|
||||
return to_dict(func, ToDictWrapper(args[0]))
|
||||
else:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return memoized_func
|
||||
@ -12,7 +12,8 @@ Attributes:
|
||||
"""
|
||||
from collections import namedtuple
|
||||
from copy import deepcopy
|
||||
from functools import reduce
|
||||
from functools import reduce, lru_cache
|
||||
import rapidjson
|
||||
|
||||
import base58
|
||||
from cryptoconditions import Fulfillment, ThresholdSha256, Ed25519Sha256
|
||||
@ -27,6 +28,7 @@ from bigchaindb.common.exceptions import (KeypairMismatchException,
|
||||
AmountError, AssetIdMismatch,
|
||||
ThresholdTooDeep)
|
||||
from bigchaindb.common.utils import serialize
|
||||
from .memoize import memoize_from_dict, memoize_to_dict
|
||||
|
||||
|
||||
UnspentOutput = namedtuple(
|
||||
@ -82,6 +84,11 @@ class Input(object):
|
||||
# TODO: If `other !== Fulfillment` return `False`
|
||||
return self.to_dict() == other.to_dict()
|
||||
|
||||
# NOTE: This function is used to provide a unique key for a given
|
||||
# Input to suppliment memoization
|
||||
def __hash__(self):
|
||||
return hash((self.fulfillment, self.fulfills))
|
||||
|
||||
def to_dict(self):
|
||||
"""Transforms the object to a Python dictionary.
|
||||
|
||||
@ -94,7 +101,7 @@ class Input(object):
|
||||
"""
|
||||
try:
|
||||
fulfillment = self.fulfillment.serialize_uri()
|
||||
except (TypeError, AttributeError, ASN1EncodeError):
|
||||
except (TypeError, AttributeError, ASN1EncodeError, ASN1DecodeError):
|
||||
fulfillment = _fulfillment_to_details(self.fulfillment)
|
||||
|
||||
try:
|
||||
@ -161,7 +168,7 @@ def _fulfillment_to_details(fulfillment):
|
||||
if fulfillment.type_name == 'ed25519-sha-256':
|
||||
return {
|
||||
'type': 'ed25519-sha-256',
|
||||
'public_key': base58.b58encode(fulfillment.public_key),
|
||||
'public_key': base58.b58encode(fulfillment.public_key).decode(),
|
||||
}
|
||||
|
||||
if fulfillment.type_name == 'threshold-sha-256':
|
||||
@ -500,7 +507,7 @@ class Transaction(object):
|
||||
VERSION = '2.0'
|
||||
|
||||
def __init__(self, operation, asset, inputs=None, outputs=None,
|
||||
metadata=None, version=None, hash_id=None):
|
||||
metadata=None, version=None, hash_id=None, tx_dict=None):
|
||||
"""The constructor allows to create a customizable Transaction.
|
||||
|
||||
Note:
|
||||
@ -553,6 +560,7 @@ class Transaction(object):
|
||||
self.outputs = outputs or []
|
||||
self.metadata = metadata
|
||||
self._id = hash_id
|
||||
self.tx_dict = tx_dict
|
||||
|
||||
@property
|
||||
def unspent_outputs(self):
|
||||
@ -990,7 +998,7 @@ class Transaction(object):
|
||||
raise ValueError('Inputs and '
|
||||
'output_condition_uris must have the same count')
|
||||
|
||||
tx_dict = self.to_dict()
|
||||
tx_dict = self.tx_dict if self.tx_dict else self.to_dict()
|
||||
tx_dict = Transaction._remove_signatures(tx_dict)
|
||||
tx_dict['id'] = None
|
||||
tx_serialized = Transaction._to_str(tx_dict)
|
||||
@ -1003,6 +1011,7 @@ class Transaction(object):
|
||||
return all(validate(i, cond)
|
||||
for i, cond in enumerate(output_condition_uris))
|
||||
|
||||
@lru_cache(maxsize=16384)
|
||||
def _input_valid(self, input_, operation, message, output_condition_uri=None):
|
||||
"""Validates a single Input against a single Output.
|
||||
|
||||
@ -1048,6 +1057,11 @@ class Transaction(object):
|
||||
ffill_valid = parsed_ffill.validate(message=message.digest())
|
||||
return output_valid and ffill_valid
|
||||
|
||||
# This function is required by `lru_cache` to create a key for memoization
|
||||
def __hash__(self):
|
||||
return hash(self.id)
|
||||
|
||||
@memoize_to_dict
|
||||
def to_dict(self):
|
||||
"""Transforms the object to a Python dictionary.
|
||||
|
||||
@ -1150,7 +1164,9 @@ class Transaction(object):
|
||||
tx_body (dict): The Transaction to be transformed.
|
||||
"""
|
||||
# NOTE: Remove reference to avoid side effects
|
||||
tx_body = deepcopy(tx_body)
|
||||
# tx_body = deepcopy(tx_body)
|
||||
tx_body = rapidjson.loads(rapidjson.dumps(tx_body))
|
||||
|
||||
try:
|
||||
proposed_tx_id = tx_body['id']
|
||||
except KeyError:
|
||||
@ -1167,6 +1183,7 @@ class Transaction(object):
|
||||
raise InvalidHash(err_msg.format(proposed_tx_id))
|
||||
|
||||
@classmethod
|
||||
@memoize_from_dict
|
||||
def from_dict(cls, tx, skip_schema_validation=True):
|
||||
"""Transforms a Python dictionary to a Transaction object.
|
||||
|
||||
@ -1184,7 +1201,7 @@ class Transaction(object):
|
||||
inputs = [Input.from_dict(input_) for input_ in tx['inputs']]
|
||||
outputs = [Output.from_dict(output) for output in tx['outputs']]
|
||||
return cls(tx['operation'], tx['asset'], inputs, outputs,
|
||||
tx['metadata'], tx['version'], hash_id=tx['id'])
|
||||
tx['metadata'], tx['version'], hash_id=tx['id'], tx_dict=tx)
|
||||
|
||||
@classmethod
|
||||
def from_db(cls, bigchain, tx_dict_list):
|
||||
|
||||
@ -6,6 +6,7 @@
|
||||
with Tendermint.
|
||||
"""
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from abci.application import BaseApplication
|
||||
from abci.types_pb2 import (
|
||||
@ -47,22 +48,76 @@ class App(BaseApplication):
|
||||
self.block_transactions = []
|
||||
self.validators = None
|
||||
self.new_height = None
|
||||
self.chain = self.bigchaindb.get_latest_abci_chain()
|
||||
|
||||
def log_abci_migration_error(self, chain_id, validators):
|
||||
logger.error(f'An ABCI chain migration is in process. ' +
|
||||
'Download the new ABCI client and configure it with ' +
|
||||
'chain_id={chain_id} and validators={validators}.')
|
||||
|
||||
def abort_if_abci_chain_is_not_synced(self):
|
||||
if self.chain is None or self.chain['is_synced']:
|
||||
return
|
||||
|
||||
validators = self.bigchaindb.get_validators()
|
||||
self.log_abci_migration_error(self.chain['chain_id'], validators)
|
||||
sys.exit(1)
|
||||
|
||||
def init_chain(self, genesis):
|
||||
"""Initialize chain with block of height 0"""
|
||||
"""Initialize chain upon genesis or a migration"""
|
||||
|
||||
validator_set = [vutils.decode_validator(v) for v in genesis.validators]
|
||||
block = Block(app_hash='', height=0, transactions=[])
|
||||
app_hash = ''
|
||||
height = 0
|
||||
|
||||
known_chain = self.bigchaindb.get_latest_abci_chain()
|
||||
if known_chain is not None:
|
||||
chain_id = known_chain['chain_id']
|
||||
|
||||
if known_chain['is_synced']:
|
||||
msg = f'Got invalid InitChain ABCI request ({genesis}) - ' + \
|
||||
'the chain {chain_id} is already synced.'
|
||||
logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
if chain_id != genesis.chain_id:
|
||||
validators = self.bigchaindb.get_validators()
|
||||
self.log_abci_migration_error(chain_id, validators)
|
||||
sys.exit(1)
|
||||
|
||||
# set migration values for app hash and height
|
||||
block = self.bigchaindb.get_latest_block()
|
||||
app_hash = '' if block is None else block['app_hash']
|
||||
height = 0 if block is None else block['height'] + 1
|
||||
|
||||
known_validators = self.bigchaindb.get_validators()
|
||||
validator_set = [vutils.decode_validator(v)
|
||||
for v in genesis.validators]
|
||||
|
||||
if known_validators and known_validators != validator_set:
|
||||
self.log_abci_migration_error(known_chain['chain_id'],
|
||||
known_validators)
|
||||
sys.exit(1)
|
||||
|
||||
block = Block(app_hash=app_hash, height=height, transactions=[])
|
||||
self.bigchaindb.store_block(block._asdict())
|
||||
self.bigchaindb.store_validator_set(1, validator_set, None)
|
||||
self.bigchaindb.store_validator_set(height + 1, validator_set, None)
|
||||
abci_chain_height = 0 if known_chain is None else known_chain['height']
|
||||
self.bigchaindb.store_abci_chain(abci_chain_height,
|
||||
genesis.chain_id, True)
|
||||
self.chain = {'height': abci_chain_height, 'is_synced': True,
|
||||
'chain_id': genesis.chain_id}
|
||||
return ResponseInitChain()
|
||||
|
||||
def info(self, request):
|
||||
"""Return height of the latest committed block."""
|
||||
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
r = ResponseInfo()
|
||||
block = self.bigchaindb.get_latest_block()
|
||||
if block:
|
||||
r.last_block_height = block['height']
|
||||
chain_shift = 0 if self.chain is None else self.chain['height']
|
||||
r.last_block_height = block['height'] - chain_shift
|
||||
r.last_block_app_hash = block['app_hash'].encode('utf-8')
|
||||
else:
|
||||
r.last_block_height = 0
|
||||
@ -77,6 +132,8 @@ class App(BaseApplication):
|
||||
raw_tx: a raw string (in bytes) transaction.
|
||||
"""
|
||||
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
logger.benchmark('CHECK_TX_INIT')
|
||||
logger.debug('check_tx: %s', raw_transaction)
|
||||
transaction = decode_transaction(raw_transaction)
|
||||
@ -95,8 +152,11 @@ class App(BaseApplication):
|
||||
req_begin_block: block object which contains block header
|
||||
and block hash.
|
||||
"""
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
chain_shift = 0 if self.chain is None else self.chain['height']
|
||||
logger.benchmark('BEGIN BLOCK, height:%s, num_txs:%s',
|
||||
req_begin_block.header.height,
|
||||
req_begin_block.header.height + chain_shift,
|
||||
req_begin_block.header.num_txs)
|
||||
|
||||
self.block_txn_ids = []
|
||||
@ -109,6 +169,9 @@ class App(BaseApplication):
|
||||
Args:
|
||||
raw_tx: a raw string (in bytes) transaction.
|
||||
"""
|
||||
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
logger.debug('deliver_tx: %s', raw_transaction)
|
||||
transaction = self.bigchaindb.is_valid_transaction(
|
||||
decode_transaction(raw_transaction), self.block_transactions)
|
||||
@ -130,7 +193,11 @@ class App(BaseApplication):
|
||||
height (int): new height of the chain.
|
||||
"""
|
||||
|
||||
height = request_end_block.height
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
chain_shift = 0 if self.chain is None else self.chain['height']
|
||||
|
||||
height = request_end_block.height + chain_shift
|
||||
self.new_height = height
|
||||
block_txn_hash = calculate_hash(self.block_txn_ids)
|
||||
block = self.bigchaindb.get_latest_block()
|
||||
@ -158,6 +225,8 @@ class App(BaseApplication):
|
||||
def commit(self):
|
||||
"""Store the new height and along with block hash."""
|
||||
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
data = self.block_txn_hash.encode('utf-8')
|
||||
|
||||
# register a new block only when new transactions are received
|
||||
|
||||
@ -9,6 +9,7 @@ MongoDB.
|
||||
import logging
|
||||
from collections import namedtuple
|
||||
from uuid import uuid4
|
||||
import rapidjson
|
||||
|
||||
try:
|
||||
from hashlib import sha3_256
|
||||
@ -77,10 +78,11 @@ class BigchainDB(object):
|
||||
raise ValidationError('Mode must be one of the following {}.'
|
||||
.format(', '.join(self.mode_list)))
|
||||
|
||||
tx_dict = transaction.tx_dict if transaction.tx_dict else transaction.to_dict()
|
||||
payload = {
|
||||
'method': mode,
|
||||
'jsonrpc': '2.0',
|
||||
'params': [encode_transaction(transaction.to_dict())],
|
||||
'params': [encode_transaction(tx_dict)],
|
||||
'id': str(uuid4())
|
||||
}
|
||||
# TODO: handle connection errors!
|
||||
@ -122,10 +124,9 @@ class BigchainDB(object):
|
||||
txns = []
|
||||
assets = []
|
||||
txn_metadatas = []
|
||||
for transaction_obj in transactions:
|
||||
# self.update_utxoset(transaction)
|
||||
transaction = transaction_obj.to_dict()
|
||||
if transaction['operation'] == transaction_obj.CREATE:
|
||||
for t in transactions:
|
||||
transaction = t.tx_dict if t.tx_dict else rapidjson.loads(rapidjson.dumps(t.to_dict()))
|
||||
if transaction['operation'] == t.CREATE:
|
||||
asset = transaction.pop('asset')
|
||||
asset['id'] = transaction['id']
|
||||
assets.append(asset)
|
||||
@ -224,6 +225,10 @@ class BigchainDB(object):
|
||||
return backend.query.delete_unspent_outputs(
|
||||
self.connection, *unspent_outputs)
|
||||
|
||||
def is_committed(self, transaction_id):
|
||||
transaction = backend.query.get_transaction(self.connection, transaction_id)
|
||||
return bool(transaction)
|
||||
|
||||
def get_transaction(self, transaction_id):
|
||||
transaction = backend.query.get_transaction(self.connection, transaction_id)
|
||||
|
||||
@ -426,8 +431,7 @@ class BigchainDB(object):
|
||||
|
||||
def get_validators(self, height=None):
|
||||
result = self.get_validator_change(height)
|
||||
validators = result['validators']
|
||||
return validators
|
||||
return [] if result is None else result['validators']
|
||||
|
||||
def get_validators_by_election_id(self, election_id):
|
||||
result = backend.query.get_validator_set_by_election_id(self.connection, election_id)
|
||||
@ -448,6 +452,37 @@ class BigchainDB(object):
|
||||
'validators': validators,
|
||||
'election_id': election_id})
|
||||
|
||||
def store_abci_chain(self, height, chain_id, is_synced=True):
|
||||
return backend.query.store_abci_chain(self.connection, height,
|
||||
chain_id, is_synced)
|
||||
|
||||
def get_latest_abci_chain(self):
|
||||
return backend.query.get_latest_abci_chain(self.connection)
|
||||
|
||||
def migrate_abci_chain(self):
|
||||
"""Generate and record a new ABCI chain ID. New blocks are not
|
||||
accepted until we receive an InitChain ABCI request with
|
||||
the matching chain ID and validator set.
|
||||
|
||||
Chain ID is generated based on the current chain and height.
|
||||
`chain-X` => `chain-X-migrated-at-height-5`.
|
||||
`chain-X-migrated-at-height-5` => `chain-X-migrated-at-height-21`.
|
||||
|
||||
If there is no known chain (we are at genesis), the function returns.
|
||||
"""
|
||||
latest_chain = self.get_latest_abci_chain()
|
||||
if latest_chain is None:
|
||||
return
|
||||
|
||||
block = self.get_latest_block()
|
||||
|
||||
suffix = '-migrated-at-height-'
|
||||
chain_id = latest_chain['chain_id']
|
||||
block_height_str = str(block['height'])
|
||||
new_chain_id = chain_id.split(suffix)[0] + suffix + block_height_str
|
||||
|
||||
self.store_abci_chain(block['height'] + 1, new_chain_id, False)
|
||||
|
||||
|
||||
Block = namedtuple('Block', ('app_hash', 'height', 'transactions'))
|
||||
|
||||
|
||||
@ -27,7 +27,7 @@ class Transaction(Transaction):
|
||||
|
||||
if self.operation == Transaction.CREATE:
|
||||
duplicates = any(txn for txn in current_transactions if txn.id == self.id)
|
||||
if bigchain.get_transaction(self.to_dict()['id']) or duplicates:
|
||||
if bigchain.is_committed(self.id) or duplicates:
|
||||
raise DuplicateTransaction('transaction `{}` already exists'
|
||||
.format(self.id))
|
||||
|
||||
|
||||
@ -35,12 +35,6 @@ class ValidatorElection(Transaction):
|
||||
INCONCLUSIVE = 'inconclusive'
|
||||
ELECTION_THRESHOLD = 2 / 3
|
||||
|
||||
def __init__(self, operation, asset, inputs, outputs,
|
||||
metadata=None, version=None, hash_id=None):
|
||||
# operation `CREATE` is being passed as argument as `VALIDATOR_ELECTION` is an extension
|
||||
# of `CREATE` and any validation on `CREATE` in the parent class should apply to it
|
||||
super().__init__(operation, asset, inputs, outputs, metadata, version, hash_id)
|
||||
|
||||
@classmethod
|
||||
def get_validator_change(cls, bigchain, height=None):
|
||||
"""Return the latest change to the validator set
|
||||
@ -172,7 +166,7 @@ class ValidatorElection(Transaction):
|
||||
|
||||
@classmethod
|
||||
def to_public_key(cls, election_id):
|
||||
return base58.b58encode(bytes.fromhex(election_id))
|
||||
return base58.b58encode(bytes.fromhex(election_id)).decode()
|
||||
|
||||
@classmethod
|
||||
def count_votes(cls, election_pk, transactions, getter=getattr):
|
||||
|
||||
@ -11,9 +11,6 @@ Appendices
|
||||
|
||||
json-serialization
|
||||
cryptography
|
||||
the-bigchaindb-class
|
||||
backend
|
||||
commands
|
||||
aws-setup
|
||||
generate-key-pair-for-ssh
|
||||
firewall-notes
|
||||
|
||||
24
docs/server/source/code-reference/index.rst
Normal file
24
docs/server/source/code-reference/index.rst
Normal file
@ -0,0 +1,24 @@
|
||||
|
||||
.. Copyright BigchainDB GmbH and BigchainDB contributors
|
||||
SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||
Code is Apache-2.0 and docs are CC-BY-4.0
|
||||
|
||||
Code Reference
|
||||
==============
|
||||
|
||||
This section contains auto-generated documentation of various functions, classes and methods
|
||||
in the BigchainDB Server code, based on Python docstrings in the code itself.
|
||||
|
||||
.. warning::
|
||||
|
||||
While we try to keep docstrings accurate,
|
||||
if you want to know *for sure* what the code does,
|
||||
then you have to read the code itself.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
|
||||
the-bigchaindb-class
|
||||
backend
|
||||
commands
|
||||
|
||||
@ -23,4 +23,5 @@ BigchainDB Server Documentation
|
||||
data-models/index
|
||||
k8s-deployment-template/index
|
||||
release-notes
|
||||
code-reference/index
|
||||
appendices/index
|
||||
|
||||
2
setup.py
2
setup.py
@ -79,7 +79,7 @@ install_requires = [
|
||||
# TODO Consider not installing the db drivers, or putting them in extras.
|
||||
'pymongo~=3.6',
|
||||
'pysha3~=1.0.2',
|
||||
'cryptoconditions~=0.6.0.dev',
|
||||
'cryptoconditions==0.7.2',
|
||||
'python-rapidjson~=0.6.0',
|
||||
'logstats~=0.2.1',
|
||||
'flask>=0.10.1',
|
||||
|
||||
104
tests/README.md
104
tests/README.md
@ -4,103 +4,13 @@ SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||
Code is Apache-2.0 and docs are CC-BY-4.0
|
||||
--->
|
||||
|
||||
# BigchainDB Server Tests
|
||||
# BigchainDB Server Unit Tests
|
||||
|
||||
## The tests/ Folder
|
||||
Most of the tests in the `tests/` folder are unit tests. For info about how to write and run tests, see [the docs about contributing to BigchainDB](http://docs.bigchaindb.com/projects/contributing/en/latest/index.html), especially:
|
||||
|
||||
The `tests/` folder is where all the tests for BigchainDB Server live. Most of them are unit tests. Integration tests are in the [`tests/integration/` folder](./integration/).
|
||||
- [Write Code - Remember to Write Tests](http://docs.bigchaindb.com/projects/contributing/en/latest/dev-setup-coding-and-contribution-process/write-code.html#remember-to-write-tests)
|
||||
- [Notes on Running a Local Dev Node with Docker Compose](http://docs.bigchaindb.com/projects/contributing/en/latest/dev-setup-coding-and-contribution-process/run-node-with-docker-compose.html), especially `make test`
|
||||
- [
|
||||
Notes on Running a Local Dev Node as Processes (and Running All Tests)](http://docs.bigchaindb.com/projects/contributing/en/latest/dev-setup-coding-and-contribution-process/run-node-as-processes.html)
|
||||
|
||||
A few notes:
|
||||
|
||||
- [`tests/common/`](./common/) contains self-contained tests only testing
|
||||
[`bigchaindb/common/`](../bigchaindb/common/)
|
||||
- [`tests/backend/`](./backend/) contains tests requiring
|
||||
the database backend (MongoDB)
|
||||
|
||||
|
||||
## Writing Tests
|
||||
|
||||
We write unit and integration tests for our Python code using the [pytest](http://pytest.org/latest/) framework. You can use the tests in the `tests/` folder as templates or examples.
|
||||
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Running Tests Directly
|
||||
|
||||
If you installed BigchainDB Server using `pip install bigchaindb`, then you
|
||||
didn't install the tests. Before you can run all the tests, you must install
|
||||
BigchainDB from source. The [`CONTRIBUTING.md` file](../CONTRIBUTING.md) has
|
||||
instructions for how to do that.
|
||||
|
||||
Next, make sure you have Local MongoDB running in the background. You
|
||||
can run MongoDB using `mongod`.
|
||||
|
||||
The `pytest` command has many options. If you want to learn about all the
|
||||
things you can do with pytest, see [the pytest
|
||||
documentation](http://pytest.org/latest/). We've also added a customization to
|
||||
pytest:
|
||||
|
||||
`--database-backend`: Defines the backend to use for the tests. It defaults to
|
||||
`localmongodb`.
|
||||
|
||||
Now you can run all tests using:
|
||||
```text
|
||||
pytest -v
|
||||
```
|
||||
|
||||
or, if that doesn't work, try:
|
||||
```text
|
||||
python -m pytest -v
|
||||
```
|
||||
|
||||
or:
|
||||
```text
|
||||
python setup.py test
|
||||
```
|
||||
|
||||
How does `python setup.py test` work? The documentation for [pytest-runner](https://pypi.python.org/pypi/pytest-runner) explains.
|
||||
|
||||
The `pytest` command has many options. If you want to learn about all the things you can do with pytest, see [the pytest documentation](http://pytest.org/latest/). We've also added a customization to pytest:
|
||||
|
||||
|
||||
### Running Tests with Docker Compose
|
||||
|
||||
You can also use [Docker Compose](https://docs.docker.com/compose/) to run all the tests.
|
||||
|
||||
First, bring up all the services BigchainDB, MongoDB, Tendermint in the background:
|
||||
|
||||
```text
|
||||
$ docker-compose up -d bdb
|
||||
```
|
||||
|
||||
then run the tests using:
|
||||
|
||||
```text
|
||||
$ docker-compose run --rm --no-deps bigchaindb pytest -v
|
||||
```
|
||||
|
||||
## Automated Testing of All Pull Requests
|
||||
|
||||
We use [Travis CI](https://travis-ci.com/), so that whenever someone creates a new BigchainDB pull request on GitHub, Travis CI gets the new code and does _a bunch of stuff_. We use the same `docker-compose.yml` for tests. It tells Travis CI how to install BigchainDB, how to run all the tests, and what to do "after success" (e.g. run `codecov`). (We use [Codecov](https://codecov.io/) to get a rough estimate of our test coverage.)
|
||||
|
||||
|
||||
### Tox
|
||||
|
||||
We use [tox](https://tox.readthedocs.io/en/latest/) to run multiple suites of tests against multiple environments during automated testing. Generally you don't need to run this yourself, but it might be useful when troubleshooting a failing Travis CI build.
|
||||
|
||||
To run all the tox tests, use:
|
||||
```text
|
||||
tox
|
||||
```
|
||||
|
||||
or:
|
||||
```text
|
||||
python -m tox
|
||||
```
|
||||
|
||||
To run only a few environments, use the `-e` flag:
|
||||
```text
|
||||
tox -e {ENVLIST}
|
||||
```
|
||||
|
||||
where `{ENVLIST}` is one or more of the environments specified in the [tox.ini file](../tox.ini).
|
||||
Note: There are acceptance tests in the `acceptance/` folder (at the same level in the hierarchy as the `tests/` folder).
|
||||
|
||||
@ -7,6 +7,9 @@ from copy import deepcopy
|
||||
import pytest
|
||||
import pymongo
|
||||
|
||||
from bigchaindb.backend import connect, query
|
||||
|
||||
|
||||
pytestmark = [pytest.mark.tendermint, pytest.mark.bdb]
|
||||
|
||||
|
||||
@ -202,7 +205,7 @@ def test_get_owned_ids(signed_create_tx, user_pk):
|
||||
conn = connect()
|
||||
|
||||
# insert a transaction
|
||||
conn.db.transactions.insert_one(signed_create_tx.to_dict())
|
||||
conn.db.transactions.insert_one(deepcopy(signed_create_tx.to_dict()))
|
||||
|
||||
txns = list(query.get_owned_ids(conn, user_pk))
|
||||
|
||||
@ -221,7 +224,7 @@ def test_get_spending_transactions(user_pk, user_sk):
|
||||
tx2 = Transaction.transfer([inputs[0]], out, tx1.id).sign([user_sk])
|
||||
tx3 = Transaction.transfer([inputs[1]], out, tx1.id).sign([user_sk])
|
||||
tx4 = Transaction.transfer([inputs[2]], out, tx1.id).sign([user_sk])
|
||||
txns = [tx.to_dict() for tx in [tx1, tx2, tx3, tx4]]
|
||||
txns = [deepcopy(tx.to_dict()) for tx in [tx1, tx2, tx3, tx4]]
|
||||
conn.db.transactions.insert_many(txns)
|
||||
|
||||
links = [inputs[0].fulfills.to_dict(), inputs[2].fulfills.to_dict()]
|
||||
@ -394,3 +397,51 @@ def test_validator_update():
|
||||
|
||||
v91 = query.get_validator_set(conn)
|
||||
assert v91['height'] == 91
|
||||
|
||||
|
||||
@pytest.mark.parametrize('description,stores,expected', [
|
||||
(
|
||||
'Query empty database.',
|
||||
[],
|
||||
None,
|
||||
),
|
||||
(
|
||||
'Store one chain with the default value for `is_synced`.',
|
||||
[
|
||||
{'height': 0, 'chain_id': 'some-id'},
|
||||
],
|
||||
{'height': 0, 'chain_id': 'some-id', 'is_synced': True},
|
||||
),
|
||||
(
|
||||
'Store one chain with a custom value for `is_synced`.',
|
||||
[
|
||||
{'height': 0, 'chain_id': 'some-id', 'is_synced': False},
|
||||
],
|
||||
{'height': 0, 'chain_id': 'some-id', 'is_synced': False},
|
||||
),
|
||||
(
|
||||
'Store one chain, then update it.',
|
||||
[
|
||||
{'height': 0, 'chain_id': 'some-id', 'is_synced': True},
|
||||
{'height': 0, 'chain_id': 'new-id', 'is_synced': False},
|
||||
],
|
||||
{'height': 0, 'chain_id': 'new-id', 'is_synced': False},
|
||||
),
|
||||
(
|
||||
'Store a chain, update it, store another chain.',
|
||||
[
|
||||
{'height': 0, 'chain_id': 'some-id', 'is_synced': True},
|
||||
{'height': 0, 'chain_id': 'some-id', 'is_synced': False},
|
||||
{'height': 10, 'chain_id': 'another-id', 'is_synced': True},
|
||||
],
|
||||
{'height': 10, 'chain_id': 'another-id', 'is_synced': True},
|
||||
),
|
||||
])
|
||||
def test_store_abci_chain(description, stores, expected):
|
||||
conn = connect()
|
||||
|
||||
for store in stores:
|
||||
query.store_abci_chain(conn, **store)
|
||||
|
||||
actual = query.get_latest_abci_chain(conn)
|
||||
assert expected == actual, description
|
||||
|
||||
@ -24,7 +24,7 @@ def test_init_creates_db_tables_and_indexes():
|
||||
collection_names = conn.conn[dbname].collection_names()
|
||||
assert set(collection_names) == {
|
||||
'transactions', 'assets', 'metadata', 'blocks', 'utxos', 'pre_commit',
|
||||
'validators'
|
||||
'validators', 'abci_chains',
|
||||
}
|
||||
|
||||
indexes = conn.conn[dbname]['assets'].index_information().keys()
|
||||
@ -44,7 +44,10 @@ def test_init_creates_db_tables_and_indexes():
|
||||
assert set(indexes) == {'_id_', 'pre_commit_id'}
|
||||
|
||||
indexes = conn.conn[dbname]['validators'].index_information().keys()
|
||||
assert set(indexes) == {'_id_', 'height', 'election_id'}
|
||||
assert set(indexes) == {'_id_', 'height'}
|
||||
|
||||
indexes = conn.conn[dbname]['abci_chains'].index_information().keys()
|
||||
assert set(indexes) == {'_id_', 'height', 'chain_id'}
|
||||
|
||||
|
||||
def test_init_database_fails_if_db_exists():
|
||||
@ -79,7 +82,8 @@ def test_create_tables():
|
||||
collection_names = conn.conn[dbname].collection_names()
|
||||
assert set(collection_names) == {
|
||||
'transactions', 'assets', 'metadata', 'blocks', 'utxos', 'validators',
|
||||
'pre_commit'}
|
||||
'pre_commit', 'abci_chains',
|
||||
}
|
||||
|
||||
|
||||
def test_create_secondary_indexes():
|
||||
|
||||
92
tests/common/test_memoize.py
Normal file
92
tests/common/test_memoize.py
Normal file
@ -0,0 +1,92 @@
|
||||
# Copyright BigchainDB GmbH and BigchainDB contributors
|
||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||
|
||||
import pytest
|
||||
from copy import deepcopy
|
||||
|
||||
from bigchaindb.models import Transaction
|
||||
from bigchaindb.common.crypto import generate_key_pair
|
||||
from bigchaindb.common.memoize import to_dict, from_dict
|
||||
|
||||
|
||||
pytestmark = [pytest.mark.tendermint, pytest.mark.bdb]
|
||||
|
||||
|
||||
def test_memoize_to_dict(b):
|
||||
alice = generate_key_pair()
|
||||
asset = {
|
||||
'data': {'id': 'test_id'},
|
||||
}
|
||||
|
||||
assert to_dict.cache_info().hits == 0
|
||||
assert to_dict.cache_info().misses == 0
|
||||
|
||||
tx = Transaction.create([alice.public_key],
|
||||
[([alice.public_key], 1)],
|
||||
asset=asset,)\
|
||||
.sign([alice.private_key])
|
||||
|
||||
tx.to_dict()
|
||||
|
||||
assert to_dict.cache_info().hits == 0
|
||||
assert to_dict.cache_info().misses == 1
|
||||
|
||||
tx.to_dict()
|
||||
tx.to_dict()
|
||||
|
||||
assert to_dict.cache_info().hits == 2
|
||||
assert to_dict.cache_info().misses == 1
|
||||
|
||||
|
||||
def test_memoize_from_dict(b):
|
||||
alice = generate_key_pair()
|
||||
asset = {
|
||||
'data': {'id': 'test_id'},
|
||||
}
|
||||
|
||||
assert from_dict.cache_info().hits == 0
|
||||
assert from_dict.cache_info().misses == 0
|
||||
|
||||
tx = Transaction.create([alice.public_key],
|
||||
[([alice.public_key], 1)],
|
||||
asset=asset,)\
|
||||
.sign([alice.private_key])
|
||||
tx_dict = deepcopy(tx.to_dict())
|
||||
|
||||
Transaction.from_dict(tx_dict)
|
||||
|
||||
assert from_dict.cache_info().hits == 0
|
||||
assert from_dict.cache_info().misses == 1
|
||||
|
||||
Transaction.from_dict(tx_dict)
|
||||
Transaction.from_dict(tx_dict)
|
||||
|
||||
assert from_dict.cache_info().hits == 2
|
||||
assert from_dict.cache_info().misses == 1
|
||||
|
||||
|
||||
def test_memoize_input_valid(b):
|
||||
alice = generate_key_pair()
|
||||
asset = {
|
||||
'data': {'id': 'test_id'},
|
||||
}
|
||||
|
||||
assert Transaction._input_valid.cache_info().hits == 0
|
||||
assert Transaction._input_valid.cache_info().misses == 0
|
||||
|
||||
tx = Transaction.create([alice.public_key],
|
||||
[([alice.public_key], 1)],
|
||||
asset=asset,)\
|
||||
.sign([alice.private_key])
|
||||
|
||||
tx.inputs_valid()
|
||||
|
||||
assert Transaction._input_valid.cache_info().hits == 0
|
||||
assert Transaction._input_valid.cache_info().misses == 1
|
||||
|
||||
tx.inputs_valid()
|
||||
tx.inputs_valid()
|
||||
|
||||
assert Transaction._input_valid.cache_info().hits == 2
|
||||
assert Transaction._input_valid.cache_info().misses == 1
|
||||
@ -13,7 +13,7 @@ from cryptoconditions import Ed25519Sha256
|
||||
from pytest import mark, raises
|
||||
from sha3 import sha3_256
|
||||
|
||||
pytestmark = mark.tendermint
|
||||
pytestmark = [mark.tendermint, mark.bdb]
|
||||
|
||||
|
||||
def test_input_serialization(ffill_uri, user_pub):
|
||||
@ -93,7 +93,7 @@ def test_output_serialization(user_Ed25519, user_pub):
|
||||
'uri': user_Ed25519.condition_uri,
|
||||
'details': {
|
||||
'type': 'ed25519-sha-256',
|
||||
'public_key': b58encode(user_Ed25519.public_key),
|
||||
'public_key': b58encode(user_Ed25519.public_key).decode(),
|
||||
},
|
||||
},
|
||||
'public_keys': [user_pub],
|
||||
@ -114,7 +114,7 @@ def test_output_deserialization(user_Ed25519, user_pub):
|
||||
'uri': user_Ed25519.condition_uri,
|
||||
'details': {
|
||||
'type': 'ed25519-sha-256',
|
||||
'public_key': b58encode(user_Ed25519.public_key),
|
||||
'public_key': b58encode(user_Ed25519.public_key).decode(),
|
||||
},
|
||||
},
|
||||
'public_keys': [user_pub],
|
||||
@ -533,7 +533,7 @@ def test_validate_input_with_invalid_parameters(utx):
|
||||
input_conditions = [out.fulfillment.condition_uri for out in utx.outputs]
|
||||
tx_dict = utx.to_dict()
|
||||
tx_serialized = Transaction._to_str(tx_dict)
|
||||
valid = utx._input_valid(utx.inputs[0], tx_serialized, input_conditions)
|
||||
valid = utx._input_valid(utx.inputs[0], tx_serialized, input_conditions[0])
|
||||
assert not valid
|
||||
|
||||
|
||||
|
||||
@ -144,11 +144,17 @@ def _bdb(_setup_database, _configure_bigchaindb):
|
||||
from bigchaindb import config
|
||||
from bigchaindb.backend import connect
|
||||
from .utils import flush_db
|
||||
from bigchaindb.common.memoize import to_dict, from_dict
|
||||
from bigchaindb.models import Transaction
|
||||
conn = connect()
|
||||
yield
|
||||
dbname = config['database']['name']
|
||||
flush_db(conn, dbname)
|
||||
|
||||
to_dict.cache_clear()
|
||||
from_dict.cache_clear()
|
||||
Transaction._input_valid.cache_clear()
|
||||
|
||||
|
||||
# We need this function to avoid loading an existing
|
||||
# conf file located in the home of the user running
|
||||
|
||||
@ -16,7 +16,8 @@ def validator_pub_key():
|
||||
@pytest.fixture
|
||||
def init_chain_request():
|
||||
addr = codecs.decode(b'9FD479C869C7D7E7605BF99293457AA5D80C3033', 'hex')
|
||||
pk = codecs.decode(b'VAgFZtYw8bNR5TMZHFOBDWk9cAmEu3/c6JgRBmddbbI=', 'base64')
|
||||
pk = codecs.decode(b'VAgFZtYw8bNR5TMZHFOBDWk9cAmEu3/c6JgRBmddbbI=',
|
||||
'base64')
|
||||
val_a = types.Validator(address=addr, power=10,
|
||||
pub_key=types.PubKey(type='ed25519', data=pk))
|
||||
|
||||
|
||||
@ -2,17 +2,28 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||
|
||||
import codecs
|
||||
import json
|
||||
import pytest
|
||||
import random
|
||||
|
||||
from abci.types_pb2 import (
|
||||
PubKey,
|
||||
ResponseInitChain,
|
||||
RequestInitChain,
|
||||
RequestInfo,
|
||||
RequestBeginBlock,
|
||||
RequestEndBlock
|
||||
RequestEndBlock,
|
||||
Validator,
|
||||
)
|
||||
|
||||
from bigchaindb import App
|
||||
from bigchaindb.backend.localmongodb import query
|
||||
from bigchaindb.common.crypto import generate_key_pair
|
||||
from bigchaindb.core import (CodeTypeOk,
|
||||
CodeTypeError,
|
||||
)
|
||||
from bigchaindb.lib import Block
|
||||
from bigchaindb.upsert_validator.validator_utils import new_validator_set
|
||||
from bigchaindb.tendermint_utils import public_key_to_base64
|
||||
|
||||
@ -24,6 +35,173 @@ def encode_tx_to_bytes(transaction):
|
||||
return json.dumps(transaction.to_dict()).encode('utf8')
|
||||
|
||||
|
||||
def generate_address():
|
||||
return ''.join(random.choices('1,2,3,4,5,6,7,8,9,A,B,C,D,E,F'.split(','),
|
||||
k=40)).encode()
|
||||
|
||||
|
||||
def generate_validator():
|
||||
addr = codecs.decode(generate_address(), 'hex')
|
||||
pk, _ = generate_key_pair()
|
||||
pub_key = PubKey(type='ed25519', data=pk.encode())
|
||||
val = Validator(address=addr, power=10, pub_key=pub_key)
|
||||
return val
|
||||
|
||||
|
||||
def generate_init_chain_request(chain_id, vals=None):
|
||||
vals = vals if vals is not None else [generate_validator()]
|
||||
return RequestInitChain(validators=vals, chain_id=chain_id)
|
||||
|
||||
|
||||
def test_init_chain_successfully_registers_chain(b):
|
||||
request = generate_init_chain_request('chain-XYZ')
|
||||
res = App(b).init_chain(request)
|
||||
assert res == ResponseInitChain()
|
||||
chain = query.get_latest_abci_chain(b.connection)
|
||||
assert chain == {'height': 0, 'chain_id': 'chain-XYZ', 'is_synced': True}
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 0,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
|
||||
def test_init_chain_ignores_invalid_init_chain_requests(b):
|
||||
validators = [generate_validator()]
|
||||
request = generate_init_chain_request('chain-XYZ', validators)
|
||||
res = App(b).init_chain(request)
|
||||
assert res == ResponseInitChain()
|
||||
|
||||
validator_set = query.get_validator_set(b.connection)
|
||||
|
||||
invalid_requests = [
|
||||
request, # the same request again
|
||||
# different validator set
|
||||
generate_init_chain_request('chain-XYZ'),
|
||||
# different chain ID
|
||||
generate_init_chain_request('chain-ABC', validators),
|
||||
]
|
||||
for r in invalid_requests:
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).init_chain(r)
|
||||
# assert nothing changed - neither validator set, nor chain ID
|
||||
new_validator_set = query.get_validator_set(b.connection)
|
||||
assert new_validator_set == validator_set
|
||||
new_chain_id = query.get_latest_abci_chain(b.connection)['chain_id']
|
||||
assert new_chain_id == 'chain-XYZ'
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 0,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
|
||||
def test_init_chain_recognizes_new_chain_after_migration(b):
|
||||
validators = [generate_validator()]
|
||||
request = generate_init_chain_request('chain-XYZ', validators)
|
||||
res = App(b).init_chain(request)
|
||||
assert res == ResponseInitChain()
|
||||
|
||||
validator_set = query.get_validator_set(b.connection)['validators']
|
||||
|
||||
# simulate a migration
|
||||
query.store_block(b.connection, Block(app_hash='', height=1,
|
||||
transactions=[])._asdict())
|
||||
b.migrate_abci_chain()
|
||||
|
||||
# the same or other mismatching requests are ignored
|
||||
invalid_requests = [
|
||||
request,
|
||||
generate_init_chain_request('unknown', validators),
|
||||
generate_init_chain_request('chain-XYZ'),
|
||||
generate_init_chain_request('chain-XYZ-migrated-at-height-1'),
|
||||
]
|
||||
for r in invalid_requests:
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).init_chain(r)
|
||||
assert query.get_latest_abci_chain(b.connection) == {
|
||||
'chain_id': 'chain-XYZ-migrated-at-height-1',
|
||||
'is_synced': False,
|
||||
'height': 2,
|
||||
}
|
||||
new_validator_set = query.get_validator_set(b.connection)['validators']
|
||||
assert new_validator_set == validator_set
|
||||
|
||||
# a request with the matching chain ID and matching validator set
|
||||
# completes the migration
|
||||
request = generate_init_chain_request('chain-XYZ-migrated-at-height-1',
|
||||
validators)
|
||||
res = App(b).init_chain(request)
|
||||
assert res == ResponseInitChain()
|
||||
assert query.get_latest_abci_chain(b.connection) == {
|
||||
'chain_id': 'chain-XYZ-migrated-at-height-1',
|
||||
'is_synced': True,
|
||||
'height': 2,
|
||||
}
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 2,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
# requests with old chain ID and other requests are ignored
|
||||
invalid_requests = [
|
||||
request,
|
||||
generate_init_chain_request('chain-XYZ', validators),
|
||||
generate_init_chain_request('chain-XYZ-migrated-at-height-1'),
|
||||
]
|
||||
for r in invalid_requests:
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).init_chain(r)
|
||||
assert query.get_latest_abci_chain(b.connection) == {
|
||||
'chain_id': 'chain-XYZ-migrated-at-height-1',
|
||||
'is_synced': True,
|
||||
'height': 2,
|
||||
}
|
||||
new_validator_set = query.get_validator_set(b.connection)['validators']
|
||||
assert new_validator_set == validator_set
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 2,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
|
||||
def test_info(b):
|
||||
r = RequestInfo()
|
||||
app = App(b)
|
||||
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 0
|
||||
assert res.last_block_app_hash == b''
|
||||
|
||||
b.store_block(Block(app_hash='1', height=1, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 1
|
||||
assert res.last_block_app_hash == b'1'
|
||||
|
||||
# simulate a migration and assert the height is shifted
|
||||
b.store_abci_chain(2, 'chain-XYZ')
|
||||
app = App(b)
|
||||
b.store_block(Block(app_hash='2', height=2, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 0
|
||||
assert res.last_block_app_hash == b'2'
|
||||
|
||||
b.store_block(Block(app_hash='3', height=3, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 1
|
||||
assert res.last_block_app_hash == b'3'
|
||||
|
||||
# it's always the latest migration that is taken into account
|
||||
b.store_abci_chain(4, 'chain-XYZ-new')
|
||||
app = App(b)
|
||||
b.store_block(Block(app_hash='4', height=4, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 0
|
||||
assert res.last_block_app_hash == b'4'
|
||||
|
||||
|
||||
def test_check_tx__signed_create_is_ok(b):
|
||||
from bigchaindb import App
|
||||
from bigchaindb.models import Transaction
|
||||
@ -57,7 +235,6 @@ def test_check_tx__unsigned_create_is_error(b):
|
||||
assert result.code == CodeTypeError
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_deliver_tx__valid_create_updates_db(b, init_chain_request):
|
||||
from bigchaindb import App
|
||||
from bigchaindb.models import Transaction
|
||||
@ -225,6 +402,17 @@ def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request):
|
||||
assert resp['height'] == 100
|
||||
assert resp['transactions'] == [tx.id]
|
||||
|
||||
# simulate a chain migration and assert the height is shifted
|
||||
b.store_abci_chain(100, 'new-chain')
|
||||
app = App(b)
|
||||
app.begin_block(begin_block)
|
||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
app.end_block(RequestEndBlock(height=1))
|
||||
resp = query.get_pre_commit_state(b.connection, PRE_COMMIT_ID)
|
||||
assert resp['commit_id'] == PRE_COMMIT_ID
|
||||
assert resp['height'] == 101
|
||||
assert resp['transactions'] == [tx.id]
|
||||
|
||||
|
||||
def test_new_validator_set(b):
|
||||
node1 = {'public_key': {'type': 'ed25519-base64',
|
||||
@ -249,3 +437,45 @@ def test_new_validator_set(b):
|
||||
'voting_power': u['power']})
|
||||
|
||||
assert updated_validator_set == updated_validators
|
||||
|
||||
|
||||
def test_info_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).info(RequestInfo())
|
||||
|
||||
|
||||
def test_check_tx_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).check_tx('some bytes')
|
||||
|
||||
|
||||
def test_begin_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).info(RequestBeginBlock())
|
||||
|
||||
|
||||
def test_deliver_tx_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).deliver_tx('some bytes')
|
||||
|
||||
|
||||
def test_end_block_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).info(RequestEndBlock())
|
||||
|
||||
|
||||
def test_commit_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).commit()
|
||||
|
||||
@ -15,6 +15,7 @@ import pytest
|
||||
from pymongo import MongoClient
|
||||
|
||||
from bigchaindb import backend
|
||||
from bigchaindb.lib import Block
|
||||
|
||||
|
||||
pytestmark = pytest.mark.tendermint
|
||||
@ -441,3 +442,35 @@ def test_validation_with_transaction_buffer(b):
|
||||
assert not b.is_valid_transaction(create_tx, [create_tx])
|
||||
assert not b.is_valid_transaction(transfer_tx, [create_tx, transfer_tx])
|
||||
assert not b.is_valid_transaction(double_spend, [create_tx, transfer_tx])
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_migrate_abci_chain_yields_on_genesis(b):
|
||||
b.migrate_abci_chain()
|
||||
latest_chain = b.get_latest_abci_chain()
|
||||
assert latest_chain is None
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
@pytest.mark.parametrize('chain,block_height,expected', [
|
||||
(
|
||||
(1, 'chain-XYZ', True),
|
||||
4,
|
||||
{'height': 5, 'chain_id': 'chain-XYZ-migrated-at-height-4',
|
||||
'is_synced': False},
|
||||
),
|
||||
(
|
||||
(5, 'chain-XYZ-migrated-at-height-4', True),
|
||||
13,
|
||||
{'height': 14, 'chain_id': 'chain-XYZ-migrated-at-height-13',
|
||||
'is_synced': False},
|
||||
),
|
||||
])
|
||||
def test_migrate_abci_chain_generates_new_chains(b, chain, block_height,
|
||||
expected):
|
||||
b.store_abci_chain(*chain)
|
||||
b.store_block(Block(app_hash='', height=block_height,
|
||||
transactions=[])._asdict())
|
||||
b.migrate_abci_chain()
|
||||
latest_chain = b.get_latest_abci_chain()
|
||||
assert latest_chain == expected
|
||||
|
||||
@ -12,6 +12,7 @@ except ImportError:
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
pytestmark = pytest.mark.tendermint
|
||||
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@
|
||||
from functools import singledispatch
|
||||
|
||||
from bigchaindb.backend.localmongodb.connection import LocalMongoDBConnection
|
||||
from bigchaindb.backend.schema import TABLES
|
||||
|
||||
|
||||
@singledispatch
|
||||
@ -14,13 +15,8 @@ def flush_db(connection, dbname):
|
||||
|
||||
@flush_db.register(LocalMongoDBConnection)
|
||||
def flush_localmongo_db(connection, dbname):
|
||||
connection.conn[dbname].bigchain.delete_many({})
|
||||
connection.conn[dbname].blocks.delete_many({})
|
||||
connection.conn[dbname].transactions.delete_many({})
|
||||
connection.conn[dbname].assets.delete_many({})
|
||||
connection.conn[dbname].metadata.delete_many({})
|
||||
connection.conn[dbname].utxos.delete_many({})
|
||||
connection.conn[dbname].validators.delete_many({})
|
||||
for t in TABLES:
|
||||
getattr(connection.conn[dbname], t).delete_many({})
|
||||
|
||||
|
||||
def generate_block(bigchain):
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user