Merge branch 'bug/1592/metadata-text-search'

This commit is contained in:
kansi 2017-11-20 19:12:31 +05:30
commit f9d8519832
19 changed files with 503 additions and 36 deletions

View File

@ -265,6 +265,16 @@ def write_assets(conn, assets):
return
@register_query(MongoDBConnection)
def write_metadata(conn, metadata):
try:
return conn.run(
conn.collection('metadata')
.insert_many(metadata, ordered=False))
except OperationError:
return
@register_query(MongoDBConnection)
def get_assets(conn, asset_ids):
return conn.run(
@ -273,6 +283,14 @@ def get_assets(conn, asset_ids):
projection={'_id': False}))
@register_query(MongoDBConnection)
def get_metadata(conn, txn_ids):
return conn.run(
conn.collection('metadata')
.find({'id': {'$in': txn_ids}},
projection={'_id': False}))
@register_query(MongoDBConnection)
def count_blocks(conn):
return conn.run(
@ -348,9 +366,9 @@ def get_new_blocks_feed(conn, start_block_id):
@register_query(MongoDBConnection)
def text_search(conn, search, *, language='english', case_sensitive=False,
diacritic_sensitive=False, text_score=False, limit=0):
diacritic_sensitive=False, text_score=False, limit=0, table='assets'):
cursor = conn.run(
conn.collection('assets')
conn.collection(table)
.find({'$text': {
'$search': search,
'$language': language,
@ -363,7 +381,7 @@ def text_search(conn, search, *, language='english', case_sensitive=False,
if text_score:
return cursor
return (_remove_text_score(asset) for asset in cursor)
return (_remove_text_score(obj) for obj in cursor)
def _remove_text_score(asset):

View File

@ -27,7 +27,7 @@ def create_database(conn, dbname):
@register_schema(MongoDBConnection)
def create_tables(conn, dbname):
for table_name in ['bigchain', 'backlog', 'votes', 'assets']:
for table_name in ['bigchain', 'backlog', 'votes', 'assets', 'metadata']:
logger.info('Create `%s` table.', table_name)
# create the table
# TODO: read and write concerns can be declared here
@ -40,6 +40,7 @@ def create_indexes(conn, dbname):
create_backlog_secondary_index(conn, dbname)
create_votes_secondary_index(conn, dbname)
create_assets_secondary_index(conn, dbname)
create_metadata_secondary_index(conn, dbname)
@register_schema(MongoDBConnection)
@ -121,3 +122,17 @@ def create_assets_secondary_index(conn, dbname):
# full text search index
conn.conn[dbname]['assets'].create_index([('$**', TEXT)], name='text')
def create_metadata_secondary_index(conn, dbname):
logger.info('Create `metadata` secondary index.')
# unique index on the id of the metadata.
# the id is the txid of the transaction for which the metadata
# was specified
conn.conn[dbname]['metadata'].create_index('id',
name='transaction_id',
unique=True)
# full text search index
conn.conn[dbname]['metadata'].create_index([('$**', TEXT)], name='text')

View File

@ -254,6 +254,19 @@ def write_assets(connection, assets):
raise NotImplementedError
@singledispatch
def write_metadata(connection, metadata):
"""Write a list of metadata to the metadata table.
Args:
metadata (list): a list of metadata to write.
Returns:
The database response.
"""
raise NotImplementedError
@singledispatch
def get_assets(connection, asset_ids):
"""Get a list of assets from the assets table.
@ -268,6 +281,20 @@ def get_assets(connection, asset_ids):
raise NotImplementedError
@singledispatch
def get_metadata(connection, txn_ids):
"""Get a list of metadata from the metadata table.
Args:
txn_ids (list): a list of ids for the metadata to be retrieved from
the database.
Returns:
metadata (list): the list of returned metadata.
"""
raise NotImplementedError
@singledispatch
def count_blocks(connection):
"""Count the number of blocks in the bigchain table.
@ -360,7 +387,7 @@ def get_new_blocks_feed(connection, start_block_id):
@singledispatch
def text_search(conn, search, *, language='english', case_sensitive=False,
diacritic_sensitive=False, text_score=False, limit=0):
diacritic_sensitive=False, text_score=False, limit=0, table=None):
"""Return all the assets that match the text search.
The results are sorted by text score.

View File

@ -173,6 +173,13 @@ def write_assets(connection, assets):
.insert(assets, durability=WRITE_DURABILITY))
@register_query(RethinkDBConnection)
def write_metadata(connection, metadata):
return connection.run(
r.table('metadata')
.insert(metadata, durability=WRITE_DURABILITY))
@register_query(RethinkDBConnection)
def get_assets(connection, asset_ids):
return connection.run(
@ -180,6 +187,13 @@ def get_assets(connection, asset_ids):
.get_all(*asset_ids))
@register_query(RethinkDBConnection)
def get_metadata(connection, txn_ids):
return connection.run(
r.table('metadata', read_mode=READ_MODE)
.get_all(*txn_ids))
@register_query(RethinkDBConnection)
def count_blocks(connection):
return connection.run(

View File

@ -23,7 +23,7 @@ def create_database(connection, dbname):
@register_schema(RethinkDBConnection)
def create_tables(connection, dbname):
for table_name in ['bigchain', 'backlog', 'votes', 'assets']:
for table_name in ['bigchain', 'backlog', 'votes', 'assets', 'metadata']:
logger.info('Create `%s` table.', table_name)
connection.run(r.db(dbname).table_create(table_name))

View File

@ -21,7 +21,7 @@ from bigchaindb.common.utils import validate_all_values_for_key
logger = logging.getLogger(__name__)
TABLES = ('bigchain', 'backlog', 'votes', 'assets')
TABLES = ('bigchain', 'backlog', 'votes', 'assets', 'metadata')
VALID_LANGUAGES = ('danish', 'dutch', 'english', 'finnish', 'french', 'german',
'hungarian', 'italian', 'norwegian', 'portuguese', 'romanian',
'russian', 'spanish', 'swedish', 'turkish', 'none',

View File

@ -190,10 +190,15 @@ class Bigchain(object):
# get the asset ids from the block
if block_dict:
asset_ids = Block.get_asset_ids(block_dict)
txn_ids = Block.get_txn_ids(block_dict)
# get the assets from the database
assets = self.get_assets(asset_ids)
# get the metadata from the database
metadata = self.get_metadata(txn_ids)
# add the assets to the block transactions
block_dict = Block.couple_assets(block_dict, assets)
# add the metadata to the block transactions
block_dict = Block.couple_metadata(block_dict, metadata)
status = None
if include_status:
@ -379,8 +384,8 @@ class Bigchain(object):
for transaction in transactions:
# ignore transactions in invalid blocks
# FIXME: Isn't there a faster solution than doing I/O again?
_, status = self.get_transaction(transaction['id'],
include_status=True)
txn, status = self.get_transaction(transaction['id'],
include_status=True)
if status == self.TX_VALID:
num_valid_transactions += 1
# `txid` can only have been spent in at most on valid block.
@ -390,6 +395,7 @@ class Bigchain(object):
' with the chain'.format(txid))
# if its not and invalid transaction
if status is not None:
transaction.update({'metadata': txn.metadata})
non_invalid_transactions.append(transaction)
if non_invalid_transactions:
@ -508,10 +514,15 @@ class Bigchain(object):
# Decouple assets from block
assets, block_dict = block.decouple_assets()
metadatas, block_dict = block.decouple_metadata(block_dict)
# write the assets
if assets:
self.write_assets(assets)
if metadatas:
self.write_metadata(metadatas)
# write the block
return backend.query.write_block(self.connection, block_dict)
@ -622,6 +633,19 @@ class Bigchain(object):
"""
return backend.query.get_assets(self.connection, asset_ids)
def get_metadata(self, txn_ids):
"""
Return a list of metadata that match the transaction ids (txn_ids)
Args:
txn_ids (:obj:`list` of :obj:`str`): A list of txn_ids to
retrieve from the database.
Returns:
list: The list of metadata returned from the database.
"""
return backend.query.get_metadata(self.connection, txn_ids)
def write_assets(self, assets):
"""
Writes a list of assets into the database.
@ -632,7 +656,17 @@ class Bigchain(object):
"""
return backend.query.write_assets(self.connection, assets)
def text_search(self, search, *, limit=0):
def write_metadata(self, metadata):
"""
Writes a list of metadata into the database.
Args:
metadata (:obj:`list` of :obj:`dict`): A list of metadata to write to
the database.
"""
return backend.query.write_metadata(self.connection, metadata)
def text_search(self, search, *, limit=0, table='assets'):
"""
Return an iterator of assets that match the text search
@ -643,12 +677,13 @@ class Bigchain(object):
Returns:
iter: An iterator of assets that match the text search.
"""
assets = backend.query.text_search(self.connection, search, limit=limit)
objects = backend.query.text_search(self.connection, search, limit=limit,
table=table)
# TODO: This is not efficient. There may be a more efficient way to
# query by storing block ids with the assets and using fastquery.
# See https://github.com/bigchaindb/bigchaindb/issues/1496
for asset in assets:
tx, status = self.get_transaction(asset['id'], True)
for obj in objects:
tx, status = self.get_transaction(obj['id'], True)
if status == self.TX_VALID:
yield asset
yield obj

View File

@ -5,8 +5,7 @@ from bigchaindb.common.exceptions import (InvalidHash, InvalidSignature,
DoubleSpend, InputDoesNotExist,
TransactionNotInValidBlock,
AssetIdMismatch, AmountError,
SybilError,
DuplicateTransaction)
SybilError, DuplicateTransaction)
from bigchaindb.common.transaction import Transaction
from bigchaindb.common.utils import (gen_timestamp, serialize,
validate_txn_obj, validate_key)
@ -114,6 +113,15 @@ class Transaction(Transaction):
del asset['id']
tx_dict.update({'asset': asset})
# get metadata of the transaction
metadata = list(bigchain.get_metadata([tx_dict['id']]))
if 'metadata' not in tx_dict:
metadata = metadata[0] if metadata else None
if metadata:
metadata = metadata.get('metadata')
tx_dict.update({'metadata': metadata})
return cls.from_dict(tx_dict)
@ -352,11 +360,15 @@ class Block(object):
"""
asset_ids = cls.get_asset_ids(block_dict)
assets = bigchain.get_assets(asset_ids)
txn_ids = cls.get_txn_ids(block_dict)
metadata = bigchain.get_metadata(txn_ids)
# reconstruct block
block_dict = cls.couple_assets(block_dict, assets)
block_dict = cls.couple_metadata(block_dict, metadata)
kwargs = from_dict_kwargs or {}
return cls.from_dict(block_dict, **kwargs)
def decouple_assets(self):
def decouple_assets(self, block_dict=None):
"""
Extracts the assets from the ``CREATE`` transactions in the block.
@ -365,7 +377,9 @@ class Block(object):
the block being the dict of the block with no assets in the CREATE
transactions.
"""
block_dict = deepcopy(self.to_dict())
if block_dict is None:
block_dict = deepcopy(self.to_dict())
assets = []
for transaction in block_dict['block']['transactions']:
if transaction['operation'] in [Transaction.CREATE,
@ -376,6 +390,27 @@ class Block(object):
return (assets, block_dict)
def decouple_metadata(self, block_dict=None):
"""
Extracts the metadata from transactions in the block.
Returns:
tuple: (metadatas, block) with the metadatas being a list of dict/null and
the block being the dict of the block with no metadata in any transaction.
"""
if block_dict is None:
block_dict = deepcopy(self.to_dict())
metadatas = []
for transaction in block_dict['block']['transactions']:
metadata = transaction.pop('metadata')
if metadata:
metadata_new = {'id': transaction['id'],
'metadata': metadata}
metadatas.append(metadata_new)
return (metadatas, block_dict)
@staticmethod
def couple_assets(block_dict, assets):
"""
@ -401,6 +436,34 @@ class Block(object):
transaction.update({'asset': assets.get(transaction['id'])})
return block_dict
@staticmethod
def couple_metadata(block_dict, metadatal):
"""
Given a block_dict with no metadata (as returned from a database call)
and a list of metadata, reconstruct the original block by putting the
metadata of each transaction back into its original transaction.
NOTE: Till a transaction gets accepted the `metadata` of the transaction
is not moved outside of the transaction. So, if a transaction is found to
have metadata then it should not be overridden.
Args:
block_dict (:obj:`dict`): The block dict as returned from a
database call.
metadata (:obj:`list` of :obj:`dict`): A list of metadata returned from
a database call.
Returns:
dict: The dict of the reconstructed block.
"""
# create a dict with {'<txid>': metadata}
metadatal = {m.pop('id'): m.pop('metadata') for m in metadatal}
# add the metadata to their corresponding transactions
for transaction in block_dict['block']['transactions']:
metadata = metadatal.get(transaction['id'], None)
transaction.update({'metadata': metadata})
return block_dict
@staticmethod
def get_asset_ids(block_dict):
"""
@ -424,6 +487,25 @@ class Block(object):
return asset_ids
@staticmethod
def get_txn_ids(block_dict):
"""
Given a block_dict return all the transaction ids.
Args:
block_dict (:obj:`dict`): The block dict as returned from a
database call.
Returns:
list: The list of txn_ids in the block.
"""
txn_ids = []
for transaction in block_dict['block']['transactions']:
txn_ids.append(transaction['id'])
return txn_ids
def to_str(self):
return serialize(self.to_dict())

View File

@ -2,6 +2,7 @@
from flask_restful import Api
from bigchaindb.web.views import (
assets,
metadata,
blocks,
info,
statuses,
@ -27,6 +28,7 @@ def r(*args, **kwargs):
ROUTES_API_V1 = [
r('/', info.ApiV1Index),
r('assets/', assets.AssetListApi),
r('metadata/', metadata.MetadataApi),
r('blocks/<string:block_id>', blocks.BlockApi),
r('blocks/', blocks.BlockListApi),
r('statuses/', statuses.StatusApi),

View File

@ -0,0 +1,50 @@
"""This module provides the blueprint for some basic API endpoints.
For more information please refer to the documentation: http://bigchaindb.com/http-api
"""
import logging
from flask_restful import reqparse, Resource
from flask import current_app
from bigchaindb.backend.exceptions import OperationError
from bigchaindb.web.views.base import make_error
logger = logging.getLogger(__name__)
class MetadataApi(Resource):
def get(self):
"""API endpoint to perform a text search on transaction metadata.
Args:
search (str): Text search string to query the text index
limit (int, optional): Limit the number of returned documents.
Return:
A list of metadata that match the query.
"""
parser = reqparse.RequestParser()
parser.add_argument('search', type=str, required=True)
parser.add_argument('limit', type=int)
args = parser.parse_args()
if not args['search']:
return make_error(400, 'text_search cannot be empty')
if not args['limit']:
del args['limit']
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
args['table'] = 'metadata'
metadata = bigchain.text_search(**args)
try:
# This only works with MongoDB as the backend
return list(metadata)
except OperationError as e:
return make_error(
400,
'({}): {}'.format(type(e).__name__, e)
)

View File

@ -452,6 +452,118 @@ Assets
text search.
Transaction Metadata
--------------------------------
.. http:get:: /api/v1/metadata
Return all the metadata that match a given text search.
:query string text search: Text search string to query.
:query int limit: (Optional) Limit the number of returned metadata objects. Defaults
to ``0`` meaning return all matching objects.
.. note::
Currently this enpoint is only supported if the server is running
MongoDB as the backend.
.. http:get:: /api/v1/metadata/?search={text_search}
Return all metadata that match a given text search. The ``id`` of the metadata
is the same ``id`` of the transaction where it was defined.
If no metadata match the text search it returns an empty list.
If the text string is empty or the server does not support text search,
a ``400`` is returned.
The results are sorted by text score.
For more information about the behavior of text search see `MongoDB text
search behavior <https://docs.mongodb.com/manual/reference/operator/query/text/#behavior>`_
**Example request**:
.. sourcecode:: http
GET /api/v1/metadata/?search=bigchaindb HTTP/1.1
Host: example.com
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-type: application/json
[
{
"metadata": {"metakey1": "Hello BigchainDB 1!"},
"id": "51ce82a14ca274d43e4992bbce41f6fdeb755f846e48e710a3bbb3b0cf8e4204"
},
{
"metadata": {"metakey2": "Hello BigchainDB 2!"},
"id": "b4e9005fa494d20e503d916fa87b74fe61c079afccd6e084260674159795ee31"
},
{
"metadata": {"metakey3": "Hello BigchainDB 3!"},
"id": "fa6bcb6a8fdea3dc2a860fcdc0e0c63c9cf5b25da8b02a4db4fb6a2d36d27791"
}
]
:resheader Content-Type: ``application/json``
:statuscode 200: The query was executed successfully.
:statuscode 400: The query was not executed successfully. Returned if the
text string is empty or the server does not support
text search.
.. http:get:: /api/v1/metadata/?search={text_search}&limit={n_documents}
Return at most ``n`` metadata objects that match a given text search.
If no metadata match the text search it returns an empty list.
If the text string is empty or the server does not support text search,
a ``400`` is returned.
The results are sorted by text score.
For more information about the behavior of text search see `MongoDB text
search behavior <https://docs.mongodb.com/manual/reference/operator/query/text/#behavior>`_
**Example request**:
.. sourcecode:: http
GET /api/v1/metadata/?search=bigchaindb&limit=2 HTTP/1.1
Host: example.com
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-type: application/json
[
{
"metadata": {"msg": "Hello BigchainDB 1!"},
"id": "51ce82a14ca274d43e4992bbce41f6fdeb755f846e48e710a3bbb3b0cf8e4204"
},
{
"metadata": {"msg": "Hello BigchainDB 2!"},
"id": "b4e9005fa494d20e503d916fa87b74fe61c079afccd6e084260674159795ee31"
},
]
:resheader Content-Type: ``application/json``
:statuscode 200: The query was executed successfully.
:statuscode 400: The query was not executed successfully. Returned if the
text string is empty or the server does not support
text search.
Advanced Usage
--------------------------------

View File

@ -353,6 +353,7 @@ def test_get_genesis_block(genesis_block):
conn = connect()
assets, genesis_block_dict = genesis_block.decouple_assets()
metadata, genesis_block_dict = genesis_block.decouple_metadata(genesis_block_dict)
assert query.get_genesis_block(conn) == genesis_block_dict
@ -420,7 +421,8 @@ def test_get_new_blocks_feed(b, create_tx):
ts = str(random.random())
block = Block(transactions=[create_tx], timestamp=ts)
b.write_block(block)
return block.decouple_assets()[1]
block_dict = block.decouple_assets()[1]
return block.decouple_metadata(block_dict)[1]
create_block()
b1 = create_block()
@ -527,13 +529,14 @@ def test_get_assets():
assert list(cursor.sort('id', pymongo.ASCENDING)) == assets[::2]
def test_text_search():
@pytest.mark.parametrize("table", ['assets', 'metadata'])
def test_text_search(table):
from bigchaindb.backend import connect, query
conn = connect()
# Example data and tests cases taken from the mongodb documentation
# https://docs.mongodb.com/manual/reference/operator/query/text/
assets = [
objects = [
{'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
{'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
{'id': 3, 'subject': 'Baking a cake', 'author': 'abc', 'views': 90},
@ -545,17 +548,17 @@ def test_text_search():
]
# insert the assets
conn.db.assets.insert_many(deepcopy(assets), ordered=False)
conn.db[table].insert_many(deepcopy(objects), ordered=False)
# test search single word
assert list(query.text_search(conn, 'coffee')) == [
assert list(query.text_search(conn, 'coffee', table=table)) == [
{'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
{'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
{'id': 7, 'subject': 'coffee and cream', 'author': 'efg', 'views': 10},
]
# match any of the search terms
assert list(query.text_search(conn, 'bake coffee cake')) == [
assert list(query.text_search(conn, 'bake coffee cake', table=table)) == [
{'author': 'abc', 'id': 3, 'subject': 'Baking a cake', 'views': 90},
{'author': 'xyz', 'id': 1, 'subject': 'coffee', 'views': 50},
{'author': 'xyz', 'id': 4, 'subject': 'baking', 'views': 100},
@ -564,48 +567,48 @@ def test_text_search():
]
# search for a phrase
assert list(query.text_search(conn, '\"coffee shop\"')) == [
assert list(query.text_search(conn, '\"coffee shop\"', table=table)) == [
{'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
]
# exclude documents that contain a term
assert list(query.text_search(conn, 'coffee -shop')) == [
assert list(query.text_search(conn, 'coffee -shop', table=table)) == [
{'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
{'id': 7, 'subject': 'coffee and cream', 'author': 'efg', 'views': 10},
]
# search different language
assert list(query.text_search(conn, 'leche', language='es')) == [
assert list(query.text_search(conn, 'leche', language='es', table=table)) == [
{'id': 5, 'subject': 'Café Con Leche', 'author': 'abc', 'views': 200},
{'id': 8, 'subject': 'Cafe con Leche', 'author': 'xyz', 'views': 10}
]
# case and diacritic insensitive search
assert list(query.text_search(conn, 'сы́рники CAFÉS')) == [
assert list(query.text_search(conn, 'сы́рники CAFÉS', table=table)) == [
{'id': 6, 'subject': 'Сырники', 'author': 'jkl', 'views': 80},
{'id': 5, 'subject': 'Café Con Leche', 'author': 'abc', 'views': 200},
{'id': 8, 'subject': 'Cafe con Leche', 'author': 'xyz', 'views': 10}
]
# case sensitive search
assert list(query.text_search(conn, 'Coffee', case_sensitive=True)) == [
assert list(query.text_search(conn, 'Coffee', case_sensitive=True, table=table)) == [
{'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
]
# diacritic sensitive search
assert list(query.text_search(conn, 'CAFÉ', diacritic_sensitive=True)) == [
assert list(query.text_search(conn, 'CAFÉ', diacritic_sensitive=True, table=table)) == [
{'id': 5, 'subject': 'Café Con Leche', 'author': 'abc', 'views': 200},
]
# return text score
assert list(query.text_search(conn, 'coffee', text_score=True)) == [
assert list(query.text_search(conn, 'coffee', text_score=True, table=table)) == [
{'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50, 'score': 1.0},
{'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5, 'score': 0.75},
{'id': 7, 'subject': 'coffee and cream', 'author': 'efg', 'views': 10, 'score': 0.75},
]
# limit search result
assert list(query.text_search(conn, 'coffee', limit=2)) == [
assert list(query.text_search(conn, 'coffee', limit=2, table=table)) == [
{'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
{'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
]

View File

@ -19,7 +19,7 @@ def test_init_creates_db_tables_and_indexes():
collection_names = conn.conn[dbname].collection_names()
assert sorted(collection_names) == ['assets', 'backlog', 'bigchain',
'votes']
'metadata', 'votes']
indexes = conn.conn[dbname]['bigchain'].index_information().keys()
assert sorted(indexes) == ['_id_', 'asset_id', 'block_id', 'block_timestamp',
@ -35,6 +35,9 @@ def test_init_creates_db_tables_and_indexes():
indexes = conn.conn[dbname]['assets'].index_information().keys()
assert sorted(indexes) == ['_id_', 'asset_id', 'text']
indexes = conn.conn[dbname]['metadata'].index_information().keys()
assert sorted(indexes) == ['_id_', 'text', 'transaction_id']
def test_init_database_fails_if_db_exists():
import bigchaindb
@ -67,7 +70,7 @@ def test_create_tables():
collection_names = conn.conn[dbname].collection_names()
assert sorted(collection_names) == ['assets', 'backlog', 'bigchain',
'votes']
'metadata', 'votes']
def test_create_secondary_indexes():

View File

@ -64,7 +64,8 @@ def test_create_tables():
assert conn.run(r.db(dbname).table_list().contains('backlog')) is True
assert conn.run(r.db(dbname).table_list().contains('votes')) is True
assert conn.run(r.db(dbname).table_list().contains('assets')) is True
assert len(conn.run(r.db(dbname).table_list())) == 4
assert conn.run(r.db(dbname).table_list().contains('metadata')) is True
assert len(conn.run(r.db(dbname).table_list())) == 5
@pytest.mark.bdb

View File

@ -40,6 +40,8 @@ def test_schema(schema_func_name, args_qty):
('get_spending_transactions', 1),
('write_assets', 1),
('get_assets', 1),
('write_metadata', 1),
('get_metadata', 1),
))
def test_query(query_func_name, args_qty):
from bigchaindb.backend import query

View File

@ -26,6 +26,7 @@ def test_double_create(b, user_pk):
assert count_blocks(b.connection) == 2
@pytest.mark.dspend
@pytest.mark.usefixtures('inputs')
def test_get_owned_ids_works_after_double_spend(b, user_pk, user_sk):
""" Test for #633 https://github.com/bigchaindb/bigchaindb/issues/633 """
@ -40,7 +41,7 @@ def test_get_owned_ids_works_after_double_spend(b, user_pk, user_sk):
# write the valid tx and wait for voting/block to catch up
b.write_transaction(tx_valid)
time.sleep(2)
time.sleep(5)
# doesn't throw an exception
b.get_owned_ids(user_pk)

View File

@ -29,6 +29,16 @@ def decouple_assets(b, block):
return block_dict
def decouple_metadata(b, block, block_dict):
# the block comming from the database does not contain the metadata
# so we need to pass the block without the metadata and store the metadata
# so that the voting pipeline can reconstruct it
metadata, block_dict = block.decouple_metadata(block_dict)
if metadata:
b.write_metadata(metadata)
return block_dict
DUMMY_SHA3 = '0123456789abcdef' * 4
@ -89,6 +99,7 @@ def test_vote_validate_block(b):
tx = dummy_tx(b)
block = b.create_block([tx])
block_dict = decouple_assets(b, block)
block_dict = decouple_metadata(b, block, block_dict)
vote_obj = vote.Vote()
validation = vote_obj.validate_block(block_dict)
@ -230,6 +241,7 @@ def test_valid_block_voting_multiprocessing(b, genesis_block, monkeypatch):
block = dummy_block(b)
block_dict = decouple_assets(b, block)
block_dict = decouple_metadata(b, block, block_dict)
inpipe.put(block_dict)
vote_pipeline.start()
@ -268,6 +280,7 @@ def test_valid_block_voting_with_create_transaction(b,
monkeypatch.setattr('time.time', lambda: 1111111111)
block = b.create_block([tx])
block_dict = decouple_assets(b, block)
block_dict = decouple_metadata(b, block, block_dict)
inpipe = Pipe()
outpipe = Pipe()

View File

@ -33,6 +33,7 @@ def flush_rethink_db(connection, dbname):
connection.run(r.db(dbname).table('backlog').delete())
connection.run(r.db(dbname).table('votes').delete())
connection.run(r.db(dbname).table('assets').delete())
connection.run(r.db(dbname).table('metadata').delete())
except r.ReqlOpFailedError:
pass
@ -43,6 +44,7 @@ def flush_mongo_db(connection, dbname):
connection.conn[dbname].backlog.delete_many({})
connection.conn[dbname].votes.delete_many({})
connection.conn[dbname].assets.delete_many({})
connection.conn[dbname].metadata.delete_many({})
@singledispatch

View File

@ -0,0 +1,87 @@
import pytest
METADATA_ENDPOINT = '/api/v1/metadata/'
def test_get_metadata_with_empty_text_search(client):
res = client.get(METADATA_ENDPOINT + '?search=')
assert res.json == {'status': 400,
'message': 'text_search cannot be empty'}
assert res.status_code == 400
def test_get_metadata_with_missing_text_search(client):
res = client.get(METADATA_ENDPOINT)
assert res.status_code == 400
@pytest.mark.genesis
def test_get_metadata(client, b):
from bigchaindb.models import Transaction
from bigchaindb.backend.mongodb.connection import MongoDBConnection
if isinstance(b.connection, MongoDBConnection):
# test returns empty list when no assets are found
res = client.get(METADATA_ENDPOINT + '?search=abc')
assert res.json == []
assert res.status_code == 200
# create asset
asset = {'msg': 'abc'}
metadata = {'key': 'my_meta'}
tx = Transaction.create([b.me], [([b.me], 1)], metadata=metadata,
asset=asset).sign([b.me_private])
# create block
block = b.create_block([tx])
b.write_block(block)
# vote valid
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
# test that metadata is returned
res = client.get(METADATA_ENDPOINT + '?search=my_meta')
assert res.status_code == 200
assert len(res.json) == 1
assert res.json[0] == {
'metadata': {'key': 'my_meta'},
'id': tx.id
}
else:
# test that the correct error is returned if not running MongoDB
res = client.get(METADATA_ENDPOINT + '?search=my_meta')
assert res.status_code == 400
assert res.json['message'].startswith('(OperationError)')
@pytest.mark.genesis
def test_get_metadata_limit(client, b):
from bigchaindb.models import Transaction
from bigchaindb.backend.mongodb.connection import MongoDBConnection
if isinstance(b.connection, MongoDBConnection):
# create two assets
asset1 = {'msg': 'abc 1'}
meta1 = {'key': 'meta 1'}
tx1 = Transaction.create([b.me], [([b.me], 1)], metadata=meta1,
asset=asset1).sign([b.me_private])
asset2 = {'msg': 'abc 2'}
meta2 = {'key': 'meta 2'}
tx2 = Transaction.create([b.me], [([b.me], 1)], metadata=meta2,
asset=asset2).sign([b.me_private])
# create block
block = b.create_block([tx1, tx2])
b.write_block(block)
# vote valid
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
# test that both assets are returned without limit
res = client.get(METADATA_ENDPOINT + '?search=meta')
assert res.status_code == 200
assert len(res.json) == 2
# test that only one asset is returned when using limit=1
res = client.get(METADATA_ENDPOINT + '?search=meta&limit=1')
assert res.status_code == 200
assert len(res.json) == 1