Merge remote-tracking branch 'origin' into bug/1132/vote-pipeline-validates-transactions-twice

This commit is contained in:
Rodolphe Marques 2017-02-02 13:38:34 +01:00
commit 153b3dd8d4
8 changed files with 59 additions and 33 deletions

View File

@ -143,6 +143,10 @@ def get_asset_by_id(conn, asset_id):
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_spent(conn, transaction_id, output): def get_spent(conn, transaction_id, output):
cursor = conn.db['bigchain'].aggregate([ cursor = conn.db['bigchain'].aggregate([
{'$match': {
'block.transactions.inputs.fulfills.txid': transaction_id,
'block.transactions.inputs.fulfills.output': output
}},
{'$unwind': '$block.transactions'}, {'$unwind': '$block.transactions'},
{'$match': { {'$match': {
'block.transactions.inputs.fulfills.txid': transaction_id, 'block.transactions.inputs.fulfills.txid': transaction_id,
@ -157,12 +161,9 @@ def get_spent(conn, transaction_id, output):
@register_query(MongoDBConnection) @register_query(MongoDBConnection)
def get_owned_ids(conn, owner): def get_owned_ids(conn, owner):
cursor = conn.db['bigchain'].aggregate([ cursor = conn.db['bigchain'].aggregate([
{'$match': {'block.transactions.outputs.public_keys': owner}},
{'$unwind': '$block.transactions'}, {'$unwind': '$block.transactions'},
{'$match': { {'$match': {'block.transactions.outputs.public_keys': owner}}
'block.transactions.outputs.public_keys': {
'$elemMatch': {'$eq': owner}
}
}}
]) ])
# we need to access some nested fields before returning so lets use a # we need to access some nested fields before returning so lets use a
# generator to avoid having to read all records on the cursor at this point # generator to avoid having to read all records on the cursor at this point

View File

@ -63,6 +63,18 @@ def create_bigchain_secondary_index(conn, dbname):
.create_index('block.transactions.transaction.asset.id', .create_index('block.transactions.transaction.asset.id',
name='asset_id') name='asset_id')
# secondary index on the public keys of outputs
conn.conn[dbname]['bigchain']\
.create_index('block.transactions.outputs.public_keys',
name='outputs')
# secondary index on inputs/transaction links (txid, output)
conn.conn[dbname]['bigchain']\
.create_index([
('block.transactions.inputs.fulfills.txid', ASCENDING),
('block.transactions.inputs.fulfills.output', ASCENDING),
], name='inputs')
def create_backlog_secondary_index(conn, dbname): def create_backlog_secondary_index(conn, dbname):
logger.info('Create `backlog` secondary index.') logger.info('Create `backlog` secondary index.')

View File

@ -111,21 +111,22 @@ def _get_asset_create_tx_query(asset_id):
@register_query(RethinkDBConnection) @register_query(RethinkDBConnection)
def get_spent(connection, transaction_id, output): def get_spent(connection, transaction_id, output):
# TODO: use index!
return connection.run( return connection.run(
r.table('bigchain', read_mode=READ_MODE) r.table('bigchain', read_mode=READ_MODE)
.concat_map(lambda doc: doc['block']['transactions']) .get_all([transaction_id, output], index='inputs')
.filter(lambda transaction: transaction['inputs'].contains( .concat_map(lambda doc: doc['block']['transactions'])
lambda input: input['fulfills'] == {'txid': transaction_id, 'output': output}))) .filter(lambda transaction: transaction['inputs'].contains(
lambda input_: input_['fulfills'] == {'txid': transaction_id, 'output': output})))
@register_query(RethinkDBConnection) @register_query(RethinkDBConnection)
def get_owned_ids(connection, owner): def get_owned_ids(connection, owner):
# TODO: use index!
return connection.run( return connection.run(
r.table('bigchain', read_mode=READ_MODE) r.table('bigchain', read_mode=READ_MODE)
.concat_map(lambda doc: doc['block']['transactions']) .get_all(owner, index='outputs')
.filter(lambda tx: tx['outputs'].contains( .distinct()
.concat_map(lambda doc: doc['block']['transactions'])
.filter(lambda tx: tx['outputs'].contains(
lambda c: c['public_keys'].contains(owner)))) lambda c: c['public_keys'].contains(owner))))

View File

@ -66,6 +66,31 @@ def create_bigchain_secondary_index(connection, dbname):
.table('bigchain') .table('bigchain')
.index_create('asset_id', r.row['block']['transactions']['asset']['id'], multi=True)) .index_create('asset_id', r.row['block']['transactions']['asset']['id'], multi=True))
# secondary index on the public keys of outputs
# the last reduce operation is to return a flatten list of public_keys
# without it we would need to match exactly the public_keys list.
# For instance querying for `pk1` would not match documents with
# `public_keys: [pk1, pk2, pk3]`
connection.run(
r.db(dbname)
.table('bigchain')
.index_create('outputs',
r.row['block']['transactions']
.concat_map(lambda tx: tx['outputs']['public_keys'])
.reduce(lambda l, r: l + r), multi=True))
# secondary index on inputs/transaction links (txid, output)
connection.run(
r.db(dbname)
.table('bigchain')
.index_create('inputs',
r.row['block']['transactions']
.concat_map(lambda tx: tx['inputs']['fulfills'])
.with_fields('txid', 'output')
.map(lambda fulfills: [fulfills['txid'],
fulfills['output']]),
multi=True))
# wait for rethinkdb to finish creating secondary indexes # wait for rethinkdb to finish creating secondary indexes
connection.run( connection.run(
r.db(dbname) r.db(dbname)

View File

@ -209,10 +209,6 @@ class Block(object):
return self return self
def _validate_block(self, bigchain): def _validate_block(self, bigchain):
# First, make sure this node hasn't already voted on this block
if bigchain.has_previous_vote(self.id, self.voters):
return self
# Check if the block was created by a federation node # Check if the block was created by a federation node
possible_voters = (bigchain.nodes_except_me + [bigchain.me]) possible_voters = (bigchain.nodes_except_me + [bigchain.me])
if self.node_pubkey not in possible_voters: if self.node_pubkey not in possible_voters:

View File

@ -21,8 +21,8 @@ def test_init_creates_db_tables_and_indexes():
assert sorted(collection_names) == ['backlog', 'bigchain', 'votes'] assert sorted(collection_names) == ['backlog', 'bigchain', 'votes']
indexes = conn.conn[dbname]['bigchain'].index_information().keys() indexes = conn.conn[dbname]['bigchain'].index_information().keys()
assert sorted(indexes) == ['_id_', 'asset_id', 'block_timestamp', assert sorted(indexes) == ['_id_', 'asset_id', 'block_timestamp', 'inputs',
'transaction_id'] 'outputs', 'transaction_id']
indexes = conn.conn[dbname]['backlog'].index_information().keys() indexes = conn.conn[dbname]['backlog'].index_information().keys()
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp', assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp',
@ -81,8 +81,8 @@ def test_create_secondary_indexes():
# Bigchain table # Bigchain table
indexes = conn.conn[dbname]['bigchain'].index_information().keys() indexes = conn.conn[dbname]['bigchain'].index_information().keys()
assert sorted(indexes) == ['_id_', 'asset_id', 'block_timestamp', assert sorted(indexes) == ['_id_', 'asset_id', 'block_timestamp', 'inputs',
'transaction_id'] 'outputs', 'transaction_id']
# Backlog table # Backlog table
indexes = conn.conn[dbname]['backlog'].index_information().keys() indexes = conn.conn[dbname]['backlog'].index_information().keys()

View File

@ -85,6 +85,10 @@ def test_create_secondary_indexes():
'transaction_id')) is True 'transaction_id')) is True
assert conn.run(r.db(dbname).table('bigchain').index_list().contains( assert conn.run(r.db(dbname).table('bigchain').index_list().contains(
'asset_id')) is True 'asset_id')) is True
assert conn.run(r.db(dbname).table('bigchain').index_list().contains(
'inputs')) is True
assert conn.run(r.db(dbname).table('bigchain').index_list().contains(
'outputs')) is True
# Backlog table # Backlog table
assert conn.run(r.db(dbname).table('backlog').index_list().contains( assert conn.run(r.db(dbname).table('backlog').index_list().contains(

View File

@ -163,16 +163,3 @@ class TestBlockModel(object):
public_key = PublicKey(b.me) public_key = PublicKey(b.me)
assert public_key.verify(expected_block_serialized, block.signature) assert public_key.verify(expected_block_serialized, block.signature)
def test_validate_already_voted_on_block(self, b, monkeypatch):
from unittest.mock import Mock
from bigchaindb.models import Transaction
tx = Transaction.create([b.me], [([b.me], 1)])
block = b.create_block([tx])
has_previous_vote = Mock()
has_previous_vote.return_value = True
monkeypatch.setattr(b, 'has_previous_vote', has_previous_vote)
assert block == block.validate(b)
assert has_previous_vote.called is True