Merge pull request #1027 from bigchaindb/feat/1020/fix-tests-mongodb

Feat/1020/fix tests mongodb
This commit is contained in:
Rodolphe Marques 2017-01-10 15:38:53 +01:00 committed by GitHub
commit b4602b6283
13 changed files with 163 additions and 71 deletions

View File

@ -4,6 +4,9 @@ set -e -x
if [[ "${TOXENV}" == *-rdb ]]; then if [[ "${TOXENV}" == *-rdb ]]; then
rethinkdb --daemon rethinkdb --daemon
elif [[ "${TOXENV}" == *-mdb ]]; then elif [[ "${BIGCHAINDB_DATABASE_BACKEND}" == mongodb ]]; then
sudo service mongod start wget http://downloads.mongodb.org/linux/mongodb-linux-x86_64-3.4.1.tgz -O /tmp/mongodb.tgz
tar -xvf /tmp/mongodb.tgz
mkdir /tmp/mongodb-data
${PWD}/mongodb-linux-x86_64-3.4.1/bin/mongod --dbpath=/tmp/mongodb-data --replSet=rs0 &> /dev/null &
fi fi

View File

@ -4,6 +4,8 @@ set -e -x
if [[ -n ${TOXENV} ]]; then if [[ -n ${TOXENV} ]]; then
tox -e ${TOXENV} tox -e ${TOXENV}
elif [[ "${BIGCHAINDB_DATABASE_BACKEND}" == mongodb ]]; then
pytest -v --database-backend=mongodb --cov=bigchaindb
else else
pytest -v -n auto --cov=bigchaindb pytest -v -n auto --cov=bigchaindb
fi fi

View File

@ -12,8 +12,6 @@ env:
matrix: matrix:
fast_finish: true fast_finish: true
allow_failures:
- env: BIGCHAINDB_DATABASE_BACKEND=mongodb
exclude: exclude:
- python: 3.4 - python: 3.4
env: TOXENV=flake8 env: TOXENV=flake8
@ -26,18 +24,19 @@ matrix:
addons: addons:
rethinkdb: '2.3.5' rethinkdb: '2.3.5'
env: BIGCHAINDB_DATABASE_BACKEND=rethinkdb env: BIGCHAINDB_DATABASE_BACKEND=rethinkdb
- python: 3.5
services: mongodb
env: BIGCHAINDB_DATABASE_BACKEND=mongodb
- python: 3.5 - python: 3.5
addons: addons:
rethinkdb: '2.3.5' rethinkdb: '2.3.5'
env: BIGCHAINDB_DATABASE_BACKEND=rethinkdb env: BIGCHAINDB_DATABASE_BACKEND=rethinkdb
- python: 3.5
env: BIGCHAINDB_DATABASE_BACKEND=mongodb
before_install: sudo .ci/travis-before-install.sh before_install: sudo .ci/travis-before-install.sh
install: .ci/travis-install.sh install: .ci/travis-install.sh
before_script: .ci/travis-before-script.sh
script: .ci/travis_script.sh script: .ci/travis_script.sh
after_success: .ci/travis-after-success.sh after_success: .ci/travis-after-success.sh

View File

@ -67,10 +67,10 @@ class MongoDBChangeFeed(ChangeFeed):
# See https://github.com/bigchaindb/bigchaindb/issues/992 # See https://github.com/bigchaindb/bigchaindb/issues/992
if is_insert and (self.operation & ChangeFeed.INSERT): if is_insert and (self.operation & ChangeFeed.INSERT):
record['o'].pop('_id', None) record['o'].pop('_id', None)
doc = record['o'] self.outqueue.put(record['o'])
elif is_delete and (self.operation & ChangeFeed.DELETE): elif is_delete and (self.operation & ChangeFeed.DELETE):
# on delete it only returns the id of the document # on delete it only returns the id of the document
doc = record['o'] self.outqueue.put(record['o'])
elif is_update and (self.operation & ChangeFeed.UPDATE): elif is_update and (self.operation & ChangeFeed.UPDATE):
# the oplog entry for updates only returns the update # the oplog entry for updates only returns the update
# operations to apply to the document and not the # operations to apply to the document and not the

View File

@ -1,8 +1,10 @@
"""Query implementation for MongoDB""" """Query implementation for MongoDB"""
from time import time from time import time
from itertools import chain
from pymongo import ReturnDocument from pymongo import ReturnDocument
from pymongo import errors
from bigchaindb import backend from bigchaindb import backend
from bigchaindb.common.exceptions import CyclicBlockchainError from bigchaindb.common.exceptions import CyclicBlockchainError
@ -15,7 +17,10 @@ register_query = module_dispatch_registrar(backend.query)
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def write_transaction(conn, signed_transaction): def write_transaction(conn, signed_transaction):
try:
return conn.db['backlog'].insert_one(signed_transaction) return conn.db['backlog'].insert_one(signed_transaction)
except errors.DuplicateKeyError:
return
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
@ -36,7 +41,8 @@ def delete_transaction(conn, *transaction_id):
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_stale_transactions(conn, reassign_delay): def get_stale_transactions(conn, reassign_delay):
return conn.db['backlog']\ return conn.db['backlog']\
.find({'assignment_timestamp': {'$lt': time() - reassign_delay}}) .find({'assignment_timestamp': {'$lt': time() - reassign_delay}},
projection={'_id': False})
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
@ -58,7 +64,10 @@ def get_transaction_from_block(conn, transaction_id, block_id):
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_transaction_from_backlog(conn, transaction_id): def get_transaction_from_backlog(conn, transaction_id):
return conn.db['backlog'].find_one({'id': transaction_id}) return conn.db['backlog']\
.find_one({'id': transaction_id},
projection={'_id': False, 'assignee': False,
'assignment_timestamp': False})
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
@ -70,33 +79,83 @@ def get_blocks_status_from_transaction(conn, transaction_id):
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_txids_by_asset_id(conn, asset_id): def get_txids_by_asset_id(conn, asset_id):
return conn.db['bigchain']\ # get the txid of the create transaction for asset_id
.find({'block.transactions.asset.id': asset_id}, cursor = conn.db['bigchain'].aggregate([
projection=['id']) {'$match': {
'block.transactions.id': asset_id,
'block.transactions.operation': 'CREATE'
}},
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.id': asset_id,
'block.transactions.operation': 'CREATE'
}},
{'$project': {'block.transactions.id': True}}
])
create_tx_txids = (elem['block']['transactions']['id'] for elem in cursor)
# get txids of transfer transaction with asset_id
cursor = conn.db['bigchain'].aggregate([
{'$match': {
'block.transactions.asset.id': asset_id
}},
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.asset.id': asset_id
}},
{'$project': {'block.transactions.id': True}}
])
transfer_tx_ids = (elem['block']['transactions']['id'] for elem in cursor)
return chain(create_tx_txids, transfer_tx_ids)
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_asset_by_id(conn, asset_id): def get_asset_by_id(conn, asset_id):
return conn.db['bigchain']\ cursor = conn.db['bigchain'].aggregate([
.find_one({'block.transactions.asset.id': asset_id, {'$match': {
'block.transactions.asset.operation': 'CREATE'}, 'block.transactions.id': asset_id,
projection=['block.transactions.asset']) 'block.transactions.operation': 'CREATE'
}},
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.id': asset_id,
'block.transactions.operation': 'CREATE'
}},
{'$project': {'block.transactions.asset': True}}
])
# we need to access some nested fields before returning so lets use a
# generator to avoid having to read all records on the cursor at this point
return (elem['block']['transactions'] for elem in cursor)
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_spent(conn, transaction_id, condition_id): def get_spent(conn, transaction_id, output):
return conn.db['bigchain']\ cursor = conn.db['bigchain'].aggregate([
.find_one({'block.transactions.fulfillments.input.txid': {'$unwind': '$block.transactions'},
transaction_id, {'$match': {
'block.transactions.fulfillments.input.cid': 'block.transactions.inputs.fulfills.txid': transaction_id,
condition_id}) 'block.transactions.inputs.fulfills.output': output
}}
])
# we need to access some nested fields before returning so lets use a
# generator to avoid having to read all records on the cursor at this point
return (elem['block']['transactions'] for elem in cursor)
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_owned_ids(conn, owner): def get_owned_ids(conn, owner):
return conn.db['bigchain']\ cursor = conn.db['bigchain'].aggregate([
.find({'block.transactions.transaction.conditions.owners_after': {'$unwind': '$block.transactions'},
owner}) {'$match': {
'block.transactions.outputs.public_keys': {
'$elemMatch': {'$eq': owner}
}
}}
])
# we need to access some nested fields before returning so lets use a
# generator to avoid having to read all records on the cursor at this point
return (elem['block']['transactions'] for elem in cursor)
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
@ -121,7 +180,8 @@ def write_block(conn, block):
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_block(conn, block_id): def get_block(conn, block_id):
return conn.db['bigchain'].find_one({'id': block_id}) return conn.db['bigchain'].find_one({'id': block_id},
projection={'_id': False})
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
@ -184,4 +244,18 @@ def get_last_voted_block(conn, node_pubkey):
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_unvoted_blocks(conn, node_pubkey): def get_unvoted_blocks(conn, node_pubkey):
pass return conn.db['bigchain'].aggregate([
{'$lookup': {
'from': 'votes',
'localField': 'id',
'foreignField': 'vote.voting_for_block',
'as': 'votes'
}},
{'$match': {
'votes.node_pubkey': {'$ne': node_pubkey},
'block.transactions.operation': {'$ne': 'GENESIS'}
}},
{'$project': {
'votes': False, '_id': False
}}
])

View File

@ -72,6 +72,12 @@ def create_bigchain_secondary_index(conn, dbname):
def create_backlog_secondary_index(conn, dbname): def create_backlog_secondary_index(conn, dbname):
logger.info('Create `backlog` secondary index.') logger.info('Create `backlog` secondary index.')
# secondary index on the transaction id with a uniqueness constraint
# to make sure there are no duplicated transactions in the backlog
conn.conn[dbname]['backlog'].create_index('id',
name='transaction_id',
unique=True)
# compound index to read transactions from the backlog per assignee # compound index to read transactions from the backlog per assignee
conn.conn[dbname]['backlog']\ conn.conn[dbname]['backlog']\
.create_index([('assignee', ASCENDING), .create_index([('assignee', ASCENDING),

View File

@ -0,0 +1 @@
"""MongoDB changefeed tests"""

View File

@ -25,7 +25,8 @@ def test_init_creates_db_tables_and_indexes():
'transaction_id'] 'transaction_id']
indexes = conn.conn[dbname]['backlog'].index_information().keys() indexes = conn.conn[dbname]['backlog'].index_information().keys()
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp'] assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp',
'transaction_id']
indexes = conn.conn[dbname]['votes'].index_information().keys() indexes = conn.conn[dbname]['votes'].index_information().keys()
assert sorted(indexes) == ['_id_', 'block_and_voter'] assert sorted(indexes) == ['_id_', 'block_and_voter']
@ -85,26 +86,23 @@ def test_create_secondary_indexes():
# Backlog table # Backlog table
indexes = conn.conn[dbname]['backlog'].index_information().keys() indexes = conn.conn[dbname]['backlog'].index_information().keys()
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp'] assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp',
'transaction_id']
# Votes table # Votes table
indexes = conn.conn[dbname]['votes'].index_information().keys() indexes = conn.conn[dbname]['votes'].index_information().keys()
assert sorted(indexes) == ['_id_', 'block_and_voter'] assert sorted(indexes) == ['_id_', 'block_and_voter']
def test_drop(): def test_drop(dummy_db):
import bigchaindb import bigchaindb
from bigchaindb import backend from bigchaindb import backend
from bigchaindb.backend import schema from bigchaindb.backend import schema
conn = backend.connect() conn = backend.connect()
dbname = bigchaindb.config['database']['name'] assert dummy_db in conn.conn.database_names()
schema.drop_database(conn, dummy_db)
# The db is set up by fixtures assert dummy_db not in conn.conn.database_names()
assert dbname in conn.conn.database_names()
schema.drop_database(conn, dbname)
assert dbname not in conn.conn.database_names()
def test_get_replica_set_name_not_enabled(): def test_get_replica_set_name_not_enabled():

View File

@ -136,11 +136,18 @@ def _configure_bigchaindb(request):
def _setup_database(_configure_bigchaindb): def _setup_database(_configure_bigchaindb):
from bigchaindb import config from bigchaindb import config
from bigchaindb.backend import connect, schema from bigchaindb.backend import connect, schema
from bigchaindb.backend.mongodb.schema import initialize_replica_set
from bigchaindb.common.exceptions import DatabaseDoesNotExist from bigchaindb.common.exceptions import DatabaseDoesNotExist
print('Initializing test db') print('Initializing test db')
dbname = config['database']['name'] dbname = config['database']['name']
conn = connect() conn = connect()
# if we are setting up mongodb for the first time we need to make sure
# that the replica set is initialized before doing any operation in the
# database
if config['database']['backend'] == 'mongodb':
initialize_replica_set(conn)
try: try:
schema.drop_database(conn, dbname) schema.drop_database(conn, dbname)
except DatabaseDoesNotExist: except DatabaseDoesNotExist:
@ -315,10 +322,10 @@ def dummy_db(request):
if xdist_suffix: if xdist_suffix:
dbname = '{}_{}'.format(dbname, xdist_suffix) dbname = '{}_{}'.format(dbname, xdist_suffix)
try: try:
schema.create_database(conn, dbname) schema.init_database(conn, dbname)
except DatabaseAlreadyExists: except DatabaseAlreadyExists:
schema.drop_database(conn, dbname) schema.drop_database(conn, dbname)
schema.create_database(conn, dbname) schema.init_database(conn, dbname)
yield dbname yield dbname
try: try:
schema.drop_database(conn, dbname) schema.drop_database(conn, dbname)

View File

@ -97,18 +97,18 @@ class TestBigchainApi(object):
tx = Transaction.create([b.me], [([b.me], 1)]) tx = Transaction.create([b.me], [([b.me], 1)])
tx = tx.sign([b.me_private]) tx = tx.sign([b.me_private])
monkeypatch.setattr('time.time', lambda: 1) monkeypatch.setattr('time.time', lambda: 1000000000)
block1 = b.create_block([tx]) block1 = b.create_block([tx])
b.write_block(block1) b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 2) monkeypatch.setattr('time.time', lambda: 1000000020)
transfer_tx = Transaction.transfer(tx.to_inputs(), [([b.me], 1)], transfer_tx = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
asset_id=tx.id) asset_id=tx.id)
transfer_tx = transfer_tx.sign([b.me_private]) transfer_tx = transfer_tx.sign([b.me_private])
block2 = b.create_block([transfer_tx]) block2 = b.create_block([transfer_tx])
b.write_block(block2) b.write_block(block2)
monkeypatch.setattr('time.time', lambda: 3333333333) monkeypatch.setattr('time.time', lambda: 1000000030)
transfer_tx2 = Transaction.transfer(tx.to_inputs(), [([b.me], 1)], transfer_tx2 = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
asset_id=tx.id) asset_id=tx.id)
transfer_tx2 = transfer_tx2.sign([b.me_private]) transfer_tx2 = transfer_tx2.sign([b.me_private])
@ -132,11 +132,11 @@ class TestBigchainApi(object):
tx = Transaction.create([b.me], [([b.me], 1)]) tx = Transaction.create([b.me], [([b.me], 1)])
tx = tx.sign([b.me_private]) tx = tx.sign([b.me_private])
monkeypatch.setattr('time.time', lambda: 1) monkeypatch.setattr('time.time', lambda: 1000000000)
block1 = b.create_block([tx]) block1 = b.create_block([tx])
b.write_block(block1) b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 2222222222) monkeypatch.setattr('time.time', lambda: 1000000020)
block2 = b.create_block([tx]) block2 = b.create_block([tx])
b.write_block(block2) b.write_block(block2)
@ -160,7 +160,7 @@ class TestBigchainApi(object):
block1 = b.create_block([tx1]) block1 = b.create_block([tx1])
b.write_block(block1) b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 2000000000) monkeypatch.setattr('time.time', lambda: 1000000020)
tx2 = Transaction.create([b.me], [([b.me], 1)], tx2 = Transaction.create([b.me], [([b.me], 1)],
metadata={'msg': random.random()}) metadata={'msg': random.random()})
tx2 = tx2.sign([b.me_private]) tx2 = tx2.sign([b.me_private])
@ -180,6 +180,7 @@ class TestBigchainApi(object):
@pytest.mark.usefixtures('inputs') @pytest.mark.usefixtures('inputs')
def test_write_transaction(self, b, user_pk, user_sk): def test_write_transaction(self, b, user_pk, user_sk):
from bigchaindb import Bigchain
from bigchaindb.models import Transaction from bigchaindb.models import Transaction
input_tx = b.get_owned_ids(user_pk).pop() input_tx = b.get_owned_ids(user_pk).pop()
@ -190,12 +191,10 @@ class TestBigchainApi(object):
tx = tx.sign([user_sk]) tx = tx.sign([user_sk])
response = b.write_transaction(tx) response = b.write_transaction(tx)
assert response['skipped'] == 0 tx_from_db, status = b.get_transaction(tx.id, include_status=True)
assert response['deleted'] == 0
assert response['unchanged'] == 0 assert tx_from_db.to_dict() == tx.to_dict()
assert response['errors'] == 0 assert status == Bigchain.TX_IN_BACKLOG
assert response['replaced'] == 0
assert response['inserted'] == 1
@pytest.mark.usefixtures('inputs') @pytest.mark.usefixtures('inputs')
def test_read_transaction(self, b, user_pk, user_sk): def test_read_transaction(self, b, user_pk, user_sk):

View File

@ -279,7 +279,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
tx = Transaction.create([b.me], [([test_user_pub], 1)]) tx = Transaction.create([b.me], [([test_user_pub], 1)])
tx = tx.sign([b.me_private]) tx = tx.sign([b.me_private])
monkeypatch.setattr('time.time', lambda: 1111111111) monkeypatch.setattr('time.time', lambda: 1000000000)
block = b.create_block([tx]) block = b.create_block([tx])
b.write_block(block) b.write_block(block)
@ -289,7 +289,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
asset_id=tx.id) asset_id=tx.id)
tx2 = tx2.sign([test_user_priv]) tx2 = tx2.sign([test_user_priv])
monkeypatch.setattr('time.time', lambda: 2222222222) monkeypatch.setattr('time.time', lambda: 2000000000)
block2 = b.create_block([tx2]) block2 = b.create_block([tx2])
b.write_block(block2) b.write_block(block2)
@ -314,7 +314,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
'previous_block': genesis_block.id, 'previous_block': genesis_block.id,
'is_block_valid': True, 'is_block_valid': True,
'invalid_reason': None, 'invalid_reason': None,
'timestamp': '2222222222'} 'timestamp': '2000000000'}
serialized_vote = utils.serialize(vote_doc['vote']).encode() serialized_vote = utils.serialize(vote_doc['vote']).encode()
assert vote_doc['node_pubkey'] == b.me assert vote_doc['node_pubkey'] == b.me
@ -328,7 +328,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
'previous_block': block.id, 'previous_block': block.id,
'is_block_valid': True, 'is_block_valid': True,
'invalid_reason': None, 'invalid_reason': None,
'timestamp': '2222222222'} 'timestamp': '2000000000'}
serialized_vote2 = utils.serialize(vote2_doc['vote']).encode() serialized_vote2 = utils.serialize(vote2_doc['vote']).encode()
assert vote2_doc['node_pubkey'] == b.me assert vote2_doc['node_pubkey'] == b.me
@ -498,15 +498,15 @@ def test_voter_considers_unvoted_blocks_when_single_node(monkeypatch, b):
outpipe = Pipe() outpipe = Pipe()
monkeypatch.setattr('time.time', lambda: 1111111111) monkeypatch.setattr('time.time', lambda: 1000000000)
block_ids = [] block_ids = []
# insert blocks in the database while the voter process is not listening # insert blocks in the database while the voter process is not listening
# (these blocks won't appear in the changefeed) # (these blocks won't appear in the changefeed)
monkeypatch.setattr('time.time', lambda: 2222222222) monkeypatch.setattr('time.time', lambda: 1000000020)
block_1 = dummy_block(b) block_1 = dummy_block(b)
block_ids.append(block_1.id) block_ids.append(block_1.id)
monkeypatch.setattr('time.time', lambda: 3333333333) monkeypatch.setattr('time.time', lambda: 1000000030)
b.write_block(block_1) b.write_block(block_1)
block_2 = dummy_block(b) block_2 = dummy_block(b)
block_ids.append(block_2.id) block_ids.append(block_2.id)
@ -522,7 +522,7 @@ def test_voter_considers_unvoted_blocks_when_single_node(monkeypatch, b):
outpipe.get() outpipe.get()
# create a new block that will appear in the changefeed # create a new block that will appear in the changefeed
monkeypatch.setattr('time.time', lambda: 4444444444) monkeypatch.setattr('time.time', lambda: 1000000040)
block_3 = dummy_block(b) block_3 = dummy_block(b)
block_ids.append(block_3.id) block_ids.append(block_3.id)
b.write_block(block_3) b.write_block(block_3)
@ -546,15 +546,15 @@ def test_voter_chains_blocks_with_the_previous_ones(monkeypatch, b):
outpipe = Pipe() outpipe = Pipe()
monkeypatch.setattr('time.time', lambda: 1111111111) monkeypatch.setattr('time.time', lambda: 1000000000)
block_ids = [] block_ids = []
monkeypatch.setattr('time.time', lambda: 2222222222) monkeypatch.setattr('time.time', lambda: 1000000020)
block_1 = dummy_block(b) block_1 = dummy_block(b)
block_ids.append(block_1.id) block_ids.append(block_1.id)
b.write_block(block_1) b.write_block(block_1)
monkeypatch.setattr('time.time', lambda: 3333333333) monkeypatch.setattr('time.time', lambda: 1000000030)
block_2 = dummy_block(b) block_2 = dummy_block(b)
block_ids.append(block_2.id) block_ids.append(block_2.id)
b.write_block(block_2) b.write_block(block_2)
@ -588,9 +588,9 @@ def test_voter_checks_for_previous_vote(monkeypatch, b):
inpipe = Pipe() inpipe = Pipe()
outpipe = Pipe() outpipe = Pipe()
monkeypatch.setattr('time.time', lambda: 1111111111) monkeypatch.setattr('time.time', lambda: 1000000000)
monkeypatch.setattr('time.time', lambda: 2222222222) monkeypatch.setattr('time.time', lambda: 1000000020)
block_1 = dummy_block(b) block_1 = dummy_block(b)
inpipe.put(block_1.to_dict()) inpipe.put(block_1.to_dict())
assert len(list(query.get_votes_by_block_id(b.connection, block_1.id))) == 0 assert len(list(query.get_votes_by_block_id(b.connection, block_1.id))) == 0
@ -603,11 +603,11 @@ def test_voter_checks_for_previous_vote(monkeypatch, b):
outpipe.get() outpipe.get()
# queue block for voting AGAIN # queue block for voting AGAIN
monkeypatch.setattr('time.time', lambda: 3333333333) monkeypatch.setattr('time.time', lambda: 1000000030)
inpipe.put(block_1.to_dict()) inpipe.put(block_1.to_dict())
# queue another block # queue another block
monkeypatch.setattr('time.time', lambda: 4444444444) monkeypatch.setattr('time.time', lambda: 1000000040)
block_2 = dummy_block(b) block_2 = dummy_block(b)
inpipe.put(block_2.to_dict()) inpipe.put(block_2.to_dict())

View File

@ -102,7 +102,10 @@ def test_env_config(monkeypatch):
def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request): def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request):
file_config = { file_config = {
'database': {'host': 'test-host'}, 'database': {
'host': 'test-host',
'backend': request.config.getoption('--database-backend')
},
'backlog_reassign_delay': 5 'backlog_reassign_delay': 5
} }
monkeypatch.setattr('bigchaindb.config_utils.file_config', lambda *args, **kwargs: file_config) monkeypatch.setattr('bigchaindb.config_utils.file_config', lambda *args, **kwargs: file_config)