mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
Added a secondary index with uniqueness constraint in backlog.
Several test fixes
This commit is contained in:
parent
595f7dc701
commit
7f05974f0f
@ -67,10 +67,10 @@ class MongoDBChangeFeed(ChangeFeed):
|
|||||||
# See https://github.com/bigchaindb/bigchaindb/issues/992
|
# See https://github.com/bigchaindb/bigchaindb/issues/992
|
||||||
if is_insert and (self.operation & ChangeFeed.INSERT):
|
if is_insert and (self.operation & ChangeFeed.INSERT):
|
||||||
record['o'].pop('_id', None)
|
record['o'].pop('_id', None)
|
||||||
doc = record['o']
|
self.outqueue.put(record['o'])
|
||||||
elif is_delete and (self.operation & ChangeFeed.DELETE):
|
elif is_delete and (self.operation & ChangeFeed.DELETE):
|
||||||
# on delete it only returns the id of the document
|
# on delete it only returns the id of the document
|
||||||
doc = record['o']
|
self.outqueue.put(record['o'])
|
||||||
elif is_update and (self.operation & ChangeFeed.UPDATE):
|
elif is_update and (self.operation & ChangeFeed.UPDATE):
|
||||||
# the oplog entry for updates only returns the update
|
# the oplog entry for updates only returns the update
|
||||||
# operations to apply to the document and not the
|
# operations to apply to the document and not the
|
||||||
@ -78,7 +78,7 @@ class MongoDBChangeFeed(ChangeFeed):
|
|||||||
# and then return it.
|
# and then return it.
|
||||||
doc = self.connection.conn[dbname][table]\
|
doc = self.connection.conn[dbname][table]\
|
||||||
.find_one(record['o2'], projection={'_id': False})
|
.find_one(record['o2'], projection={'_id': False})
|
||||||
self.outqueue.put(doc)
|
self.outqueue.put(doc)
|
||||||
|
|
||||||
|
|
||||||
@register_changefeed(MongoDBConnection)
|
@register_changefeed(MongoDBConnection)
|
||||||
|
@ -4,6 +4,7 @@ from time import time
|
|||||||
from itertools import chain
|
from itertools import chain
|
||||||
|
|
||||||
from pymongo import ReturnDocument
|
from pymongo import ReturnDocument
|
||||||
|
from pymongo import errors
|
||||||
|
|
||||||
from bigchaindb import backend
|
from bigchaindb import backend
|
||||||
from bigchaindb.common.exceptions import CyclicBlockchainError
|
from bigchaindb.common.exceptions import CyclicBlockchainError
|
||||||
@ -16,7 +17,10 @@ register_query = module_dispatch_registrar(backend.query)
|
|||||||
|
|
||||||
@register_query(MongoDBConnection)
|
@register_query(MongoDBConnection)
|
||||||
def write_transaction(conn, signed_transaction):
|
def write_transaction(conn, signed_transaction):
|
||||||
return conn.db['backlog'].insert_one(signed_transaction)
|
try:
|
||||||
|
return conn.db['backlog'].insert_one(signed_transaction)
|
||||||
|
except errors.DuplicateKeyError:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
@register_query(MongoDBConnection)
|
@register_query(MongoDBConnection)
|
||||||
|
@ -72,6 +72,12 @@ def create_bigchain_secondary_index(conn, dbname):
|
|||||||
def create_backlog_secondary_index(conn, dbname):
|
def create_backlog_secondary_index(conn, dbname):
|
||||||
logger.info('Create `backlog` secondary index.')
|
logger.info('Create `backlog` secondary index.')
|
||||||
|
|
||||||
|
# secondary index on the transaction id with a uniqueness constraint
|
||||||
|
# to make sure there are no duplicated transactions in the backlog
|
||||||
|
conn.conn[dbname]['backlog'].create_index('id',
|
||||||
|
name='transaction_id',
|
||||||
|
unique=True)
|
||||||
|
|
||||||
# compound index to read transactions from the backlog per assignee
|
# compound index to read transactions from the backlog per assignee
|
||||||
conn.conn[dbname]['backlog']\
|
conn.conn[dbname]['backlog']\
|
||||||
.create_index([('assignee', ASCENDING),
|
.create_index([('assignee', ASCENDING),
|
||||||
|
1
tests/backend/mongodb/test_changefeed.py
Normal file
1
tests/backend/mongodb/test_changefeed.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
"""MongoDB changefeed tests"""
|
@ -25,7 +25,8 @@ def test_init_creates_db_tables_and_indexes():
|
|||||||
'transaction_id']
|
'transaction_id']
|
||||||
|
|
||||||
indexes = conn.conn[dbname]['backlog'].index_information().keys()
|
indexes = conn.conn[dbname]['backlog'].index_information().keys()
|
||||||
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp']
|
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp',
|
||||||
|
'transaction_id']
|
||||||
|
|
||||||
indexes = conn.conn[dbname]['votes'].index_information().keys()
|
indexes = conn.conn[dbname]['votes'].index_information().keys()
|
||||||
assert sorted(indexes) == ['_id_', 'block_and_voter']
|
assert sorted(indexes) == ['_id_', 'block_and_voter']
|
||||||
@ -85,13 +86,19 @@ def test_create_secondary_indexes():
|
|||||||
|
|
||||||
# Backlog table
|
# Backlog table
|
||||||
indexes = conn.conn[dbname]['backlog'].index_information().keys()
|
indexes = conn.conn[dbname]['backlog'].index_information().keys()
|
||||||
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp']
|
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp',
|
||||||
|
'transaction_id']
|
||||||
|
|
||||||
# Votes table
|
# Votes table
|
||||||
indexes = conn.conn[dbname]['votes'].index_information().keys()
|
indexes = conn.conn[dbname]['votes'].index_information().keys()
|
||||||
assert sorted(indexes) == ['_id_', 'block_and_voter']
|
assert sorted(indexes) == ['_id_', 'block_and_voter']
|
||||||
|
|
||||||
|
|
||||||
|
# The database is set up with a session scope.
|
||||||
|
# If we run this test we will remove secondary indexes that are nedeed for
|
||||||
|
# the rest of the tests
|
||||||
|
@pytest.mark.skipif(reason='This will remove the secondary indexes needed'
|
||||||
|
' for the rest of the tests')
|
||||||
def test_drop():
|
def test_drop():
|
||||||
import bigchaindb
|
import bigchaindb
|
||||||
from bigchaindb import backend
|
from bigchaindb import backend
|
||||||
|
@ -97,18 +97,18 @@ class TestBigchainApi(object):
|
|||||||
tx = Transaction.create([b.me], [([b.me], 1)])
|
tx = Transaction.create([b.me], [([b.me], 1)])
|
||||||
tx = tx.sign([b.me_private])
|
tx = tx.sign([b.me_private])
|
||||||
|
|
||||||
monkeypatch.setattr('time.time', lambda: 1)
|
monkeypatch.setattr('time.time', lambda: 1000000000)
|
||||||
block1 = b.create_block([tx])
|
block1 = b.create_block([tx])
|
||||||
b.write_block(block1)
|
b.write_block(block1)
|
||||||
|
|
||||||
monkeypatch.setattr('time.time', lambda: 2)
|
monkeypatch.setattr('time.time', lambda: 1000000020)
|
||||||
transfer_tx = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
|
transfer_tx = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
|
||||||
asset_id=tx.id)
|
asset_id=tx.id)
|
||||||
transfer_tx = transfer_tx.sign([b.me_private])
|
transfer_tx = transfer_tx.sign([b.me_private])
|
||||||
block2 = b.create_block([transfer_tx])
|
block2 = b.create_block([transfer_tx])
|
||||||
b.write_block(block2)
|
b.write_block(block2)
|
||||||
|
|
||||||
monkeypatch.setattr('time.time', lambda: 3333333333)
|
monkeypatch.setattr('time.time', lambda: 1000000030)
|
||||||
transfer_tx2 = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
|
transfer_tx2 = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
|
||||||
asset_id=tx.id)
|
asset_id=tx.id)
|
||||||
transfer_tx2 = transfer_tx2.sign([b.me_private])
|
transfer_tx2 = transfer_tx2.sign([b.me_private])
|
||||||
@ -132,11 +132,11 @@ class TestBigchainApi(object):
|
|||||||
tx = Transaction.create([b.me], [([b.me], 1)])
|
tx = Transaction.create([b.me], [([b.me], 1)])
|
||||||
tx = tx.sign([b.me_private])
|
tx = tx.sign([b.me_private])
|
||||||
|
|
||||||
monkeypatch.setattr('time.time', lambda: 1)
|
monkeypatch.setattr('time.time', lambda: 1000000000)
|
||||||
block1 = b.create_block([tx])
|
block1 = b.create_block([tx])
|
||||||
b.write_block(block1)
|
b.write_block(block1)
|
||||||
|
|
||||||
monkeypatch.setattr('time.time', lambda: 2222222222)
|
monkeypatch.setattr('time.time', lambda: 1000000020)
|
||||||
block2 = b.create_block([tx])
|
block2 = b.create_block([tx])
|
||||||
b.write_block(block2)
|
b.write_block(block2)
|
||||||
|
|
||||||
@ -160,7 +160,7 @@ class TestBigchainApi(object):
|
|||||||
block1 = b.create_block([tx1])
|
block1 = b.create_block([tx1])
|
||||||
b.write_block(block1)
|
b.write_block(block1)
|
||||||
|
|
||||||
monkeypatch.setattr('time.time', lambda: 2000000000)
|
monkeypatch.setattr('time.time', lambda: 1000000020)
|
||||||
tx2 = Transaction.create([b.me], [([b.me], 1)],
|
tx2 = Transaction.create([b.me], [([b.me], 1)],
|
||||||
metadata={'msg': random.random()})
|
metadata={'msg': random.random()})
|
||||||
tx2 = tx2.sign([b.me_private])
|
tx2 = tx2.sign([b.me_private])
|
||||||
@ -180,6 +180,7 @@ class TestBigchainApi(object):
|
|||||||
|
|
||||||
@pytest.mark.usefixtures('inputs')
|
@pytest.mark.usefixtures('inputs')
|
||||||
def test_write_transaction(self, b, user_pk, user_sk):
|
def test_write_transaction(self, b, user_pk, user_sk):
|
||||||
|
from bigchaindb import Bigchain
|
||||||
from bigchaindb.models import Transaction
|
from bigchaindb.models import Transaction
|
||||||
|
|
||||||
input_tx = b.get_owned_ids(user_pk).pop()
|
input_tx = b.get_owned_ids(user_pk).pop()
|
||||||
@ -190,12 +191,10 @@ class TestBigchainApi(object):
|
|||||||
tx = tx.sign([user_sk])
|
tx = tx.sign([user_sk])
|
||||||
response = b.write_transaction(tx)
|
response = b.write_transaction(tx)
|
||||||
|
|
||||||
assert response['skipped'] == 0
|
tx_from_db, status = b.get_transaction(tx.id, include_status=True)
|
||||||
assert response['deleted'] == 0
|
|
||||||
assert response['unchanged'] == 0
|
assert tx_from_db.to_dict() == tx.to_dict()
|
||||||
assert response['errors'] == 0
|
assert status == Bigchain.TX_IN_BACKLOG
|
||||||
assert response['replaced'] == 0
|
|
||||||
assert response['inserted'] == 1
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures('inputs')
|
@pytest.mark.usefixtures('inputs')
|
||||||
def test_read_transaction(self, b, user_pk, user_sk):
|
def test_read_transaction(self, b, user_pk, user_sk):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user