diff --git a/.idea/misc.xml b/.idea/misc.xml
index 3c29c38..aedccf6 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -1,4 +1,4 @@
-
+
\ No newline at end of file
diff --git a/.idea/planetmint.iml b/.idea/planetmint.iml
index acb3bb6..c8ca155 100644
--- a/.idea/planetmint.iml
+++ b/.idea/planetmint.iml
@@ -1,8 +1,10 @@
-
-
+
+
+
+
diff --git a/planetmint/__init__.py b/planetmint/__init__.py
index e211654..11e5051 100644
--- a/planetmint/__init__.py
+++ b/planetmint/__init__.py
@@ -21,34 +21,29 @@ from planetmint.core import App # noqa
# _base_database_localmongodb.keys() because dicts are unordered.
# I tried to configure
-_database_keys_map = {
- 'localmongodb': ('host', 'port', 'name'),
+_database_keys_map = { # TODO Check if it is working after removing 'name' field
+ 'tarantool_db': ('host', 'port'),
}
-_base_database_localmongodb = {
+_base_database_tarantool_local_db = { # TODO Rewrite this configs for tarantool usage
'host': 'localhost',
- 'port': 27017,
- 'name': 'bigchain',
- 'replicaset': None,
- 'login': None,
+ 'port': 3301,
+ 'username': None,
'password': None,
+ "connect_now": True,
+ "encoding": "utf-8"
}
-_database_localmongodb = {
- 'backend': 'localmongodb',
+_database_tarantool = {
+ 'backend': 'tarantool_db',
'connection_timeout': 5000,
'max_tries': 3,
- 'ssl': False,
- 'ca_cert': None,
- 'certfile': None,
- 'keyfile': None,
- 'keyfile_passphrase': None,
- 'crlfile': None,
+ "reconnect_delay": 0.5
}
-_database_localmongodb.update(_base_database_localmongodb)
+_database_tarantool.update(_base_database_tarantool_local_db)
_database_map = {
- 'localmongodb': _database_localmongodb,
+ 'tarantool_db': _database_tarantool,
}
config = {
@@ -73,8 +68,8 @@ config = {
'port': 26657,
'version': 'v0.31.5', # look for __tm_supported_versions__
},
- # FIXME: hardcoding to localmongodb for now
- 'database': _database_map['localmongodb'],
+ # TODO Maybe remove hardcode configs for tarantool (review)
+ 'database': _database_map['tarantool_db'],
'log': {
'file': log_config['handlers']['file']['filename'],
'error_file': log_config['handlers']['errors']['filename'],
@@ -93,7 +88,7 @@ config = {
# We need to maintain a backup copy of the original config dict in case
# the user wants to reconfigure the node. Check ``planetmint.config_utils``
# for more info.
-_config = copy.deepcopy(config)
+_config = copy.deepcopy(config) # TODO Check what to do with those imports
from planetmint.common.transaction import Transaction # noqa
from planetmint import models # noqa
from planetmint.upsert_validator import ValidatorElection # noqa
diff --git a/planetmint/backend/__init__.py b/planetmint/backend/__init__.py
index db1e2ac..b1c15e2 100644
--- a/planetmint/backend/__init__.py
+++ b/planetmint/backend/__init__.py
@@ -14,4 +14,4 @@ configuration or the ``PLANETMINT_DATABASE_BACKEND`` environment variable.
# Include the backend interfaces
from planetmint.backend import schema, query # noqa
-from planetmint.backend.connection import connect # noqa
+from planetmint.backend.connection_tarantool import connect # noqa
diff --git a/planetmint/backend/connection.py b/planetmint/backend/connection.py
index d92204b..7e76b03 100644
--- a/planetmint/backend/connection.py
+++ b/planetmint/backend/connection.py
@@ -12,7 +12,7 @@ from planetmint.backend.exceptions import ConnectionError
from planetmint.backend.utils import get_planetmint_config_value, get_planetmint_config_value_or_key_error
from planetmint.common.exceptions import ConfigurationError
-BACKENDS = {
+BACKENDS = { # This is path to MongoDBClass
'localmongodb': 'planetmint.backend.localmongodb.connection.LocalMongoDBConnection',
}
@@ -71,7 +71,7 @@ def connect(backend=None, host=None, port=None, name=None, max_tries=None,
keyfile_passphrase = keyfile_passphrase or get_planetmint_config_value('keyfile_passphrase', None)
crlfile = crlfile or get_planetmint_config_value('crlfile')
- try:
+ try: # Here we get class using getattr function
module_name, _, class_name = BACKENDS[backend].rpartition('.')
Class = getattr(import_module(module_name), class_name)
except KeyError:
diff --git a/planetmint/backend/connection_tarantool.py b/planetmint/backend/connection_tarantool.py
new file mode 100644
index 0000000..d939ec0
--- /dev/null
+++ b/planetmint/backend/connection_tarantool.py
@@ -0,0 +1,143 @@
+# Copyright © 2020 Interplanetary Database Association e.V.,
+# Planetmint and IPDB software contributors.
+# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
+# Code is Apache-2.0 and docs are CC-BY-4.0
+
+import logging
+from importlib import import_module
+from itertools import repeat
+
+import planetmint
+from planetmint.backend.exceptions import ConnectionError
+from planetmint.backend.utils import get_planetmint_config_value, get_planetmint_config_value_or_key_error
+from planetmint.common.exceptions import ConfigurationError
+
+BACKENDS = { # This is path to MongoDBClass
+ 'tarantool_db': 'planetmint.backend.tarantool.connection_tarantool.TarantoolDB',
+}
+
+logger = logging.getLogger(__name__)
+
+
+def connect(host: str, port: int, username: str, password: str, backend: str):
+ """Create a new connection to the database backend.
+
+ All arguments default to the current configuration's values if not
+ given.
+
+ Args:
+ backend (str): the name of the backend to use.
+ host (str): the host to connect to.
+ port (int): the port to connect to.
+
+ Returns:
+ An instance of :class:`~planetmint.backend.connection.Connection`
+ based on the given (or defaulted) :attr:`backend`.
+
+ Raises:
+ :exc:`~ConnectionError`: If the connection to the database fails.
+ :exc:`~ConfigurationError`: If the given (or defaulted) :attr:`backend`
+ is not supported or could not be loaded.
+ :exc:`~AuthenticationError`: If there is a OperationFailure due to
+ Authentication failure after connecting to the database.
+ """
+
+ backend = backend or get_planetmint_config_value_or_key_error('backend') # TODO Rewrite Configs
+ host = host or get_planetmint_config_value_or_key_error('host')
+ port = port or get_planetmint_config_value_or_key_error('port')
+ username = username or get_planetmint_config_value('login')
+ password = password or get_planetmint_config_value('password')
+
+ try: # Here we get class using getattr function
+ module_name, _, class_name = BACKENDS[backend].rpartition('.')
+ Class = getattr(import_module(module_name), class_name)
+ except KeyError:
+ raise ConfigurationError('Backend `{}` is not supported. '
+ 'Planetmint currently supports {}'.format(backend, BACKENDS.keys()))
+ except (ImportError, AttributeError) as exc:
+ raise ConfigurationError('Error loading backend `{}`'.format(backend)) from exc
+
+ logger.debug('Connection: {}'.format(Class))
+ return Class(host=host, port=port, username=username, password=password)
+
+
+class Connection:
+ """Connection class interface.
+
+ All backend implementations should provide a connection class that inherits
+ from and implements this class.
+ """
+
+ def __init__(self, host=None, port=None, dbname=None,
+ connection_timeout=None, max_tries=None,
+ **kwargs):
+ """Create a new :class:`~.Connection` instance.
+
+ Args:
+ host (str): the host to connect to.
+ port (int): the port to connect to.
+ dbname (str): the name of the database to use.
+ connection_timeout (int, optional): the milliseconds to wait
+ until timing out the database connection attempt.
+ Defaults to 5000ms.
+ max_tries (int, optional): how many tries before giving up,
+ if 0 then try forever. Defaults to 3.
+ **kwargs: arbitrary keyword arguments provided by the
+ configuration's ``database`` settings
+ """
+
+ dbconf = planetmint.config['database']
+
+ self.host = host or dbconf['host']
+ self.port = port or dbconf['port']
+ self.dbname = dbname or dbconf['name']
+ self.connection_timeout = connection_timeout if connection_timeout is not None \
+ else dbconf['connection_timeout']
+ self.max_tries = max_tries if max_tries is not None else dbconf['max_tries']
+ self.max_tries_counter = range(self.max_tries) if self.max_tries != 0 else repeat(0)
+ self._conn = None
+
+ @property
+ def conn(self):
+ if self._conn is None:
+ self.connect()
+ return self._conn
+
+ def run(self, query):
+ """Run a query.
+
+ Args:
+ query: the query to run
+ Raises:
+ :exc:`~DuplicateKeyError`: If the query fails because of a
+ duplicate key constraint.
+ :exc:`~OperationFailure`: If the query fails for any other
+ reason.
+ :exc:`~ConnectionError`: If the connection to the database
+ fails.
+ """
+
+ raise NotImplementedError()
+
+ def connect(self):
+ """Try to connect to the database.
+
+ Raises:
+ :exc:`~ConnectionError`: If the connection to the database
+ fails.
+ """
+
+ attempt = 0
+ for i in self.max_tries_counter:
+ attempt += 1
+ try:
+ self._conn = self._connect()
+ except ConnectionError as exc:
+ logger.warning('Attempt %s/%s. Connection to %s:%s failed after %sms.',
+ attempt, self.max_tries if self.max_tries != 0 else '∞',
+ self.host, self.port, self.connection_timeout)
+ if attempt == self.max_tries:
+ logger.critical('Cannot connect to the Database. Giving up.')
+ raise ConnectionError() from exc
+ else:
+ break
diff --git a/tests/backend/localmongodb/test_queries.py b/tests/backend/localmongodb/test_queries.py
index ec86410..4a48e02 100644
--- a/tests/backend/localmongodb/test_queries.py
+++ b/tests/backend/localmongodb/test_queries.py
@@ -6,7 +6,7 @@
from copy import deepcopy
import pytest
-import pymongo
+# import pymongo
from planetmint.backend import connect, query
@@ -17,26 +17,26 @@ pytestmark = pytest.mark.bdb
def test_get_txids_filtered(signed_create_tx, signed_transfer_tx):
from planetmint.backend import connect, query
from planetmint.models import Transaction
- conn = connect()
-
- # create and insert two blocks, one for the create and one for the
- # transfer transaction
- conn.db.transactions.insert_one(signed_create_tx.to_dict())
- conn.db.transactions.insert_one(signed_transfer_tx.to_dict())
-
- asset_id = Transaction.get_asset_id([signed_create_tx, signed_transfer_tx])
-
- # Test get by just asset id
- txids = set(query.get_txids_filtered(conn, asset_id))
- assert txids == {signed_create_tx.id, signed_transfer_tx.id}
-
- # Test get by asset and CREATE
- txids = set(query.get_txids_filtered(conn, asset_id, Transaction.CREATE))
- assert txids == {signed_create_tx.id}
-
- # Test get by asset and TRANSFER
- txids = set(query.get_txids_filtered(conn, asset_id, Transaction.TRANSFER))
- assert txids == {signed_transfer_tx.id}
+ conn = connect() # TODO First rewrite to get here tarantool connection
+ print(conn)
+ # # create and insert two blocks, one for the create and one for the
+ # # transfer transaction
+ # conn.db.transactions.insert_one(signed_create_tx.to_dict())
+ # conn.db.transactions.insert_one(signed_transfer_tx.to_dict())
+ #
+ # asset_id = Transaction.get_asset_id([signed_create_tx, signed_transfer_tx])
+ #
+ # # Test get by just asset id
+ # txids = set(query.get_txids_filtered(conn, asset_id))
+ # assert txids == {signed_create_tx.id, signed_transfer_tx.id}
+ #
+ # # Test get by asset and CREATE
+ # txids = set(query.get_txids_filtered(conn, asset_id, Transaction.CREATE))
+ # assert txids == {signed_create_tx.id}
+ #
+ # # Test get by asset and TRANSFER
+ # txids = set(query.get_txids_filtered(conn, asset_id, Transaction.TRANSFER))
+ # assert txids == {signed_transfer_tx.id}
def test_write_assets():
@@ -482,3 +482,5 @@ def test_store_abci_chain(description, stores, expected):
actual = query.get_latest_abci_chain(conn)
assert expected == actual, description
+
+test_get_txids_filtered(None, None)
diff --git a/tests/backend/tarantool/__init__.py b/tests/backend/tarantool/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/backend/tarantool/test_queries.py b/tests/backend/tarantool/test_queries.py
new file mode 100644
index 0000000..2831243
--- /dev/null
+++ b/tests/backend/tarantool/test_queries.py
@@ -0,0 +1,486 @@
+# Copyright © 2020 Interplanetary Database Association e.V.,
+# Planetmint and IPDB software contributors.
+# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
+# Code is Apache-2.0 and docs are CC-BY-4.0
+
+from copy import deepcopy
+
+import pytest
+# import pymongo
+
+from planetmint.backend import connect, query
+
+pytestmark = pytest.mark.bdb
+
+conn = connect()
+print(conn)
+
+
+def test_get_txids_filtered(signed_create_tx, signed_transfer_tx):
+ from planetmint.backend import connect, query
+ from planetmint.models import Transaction
+ conn = connect() # TODO First rewrite to get here tarantool connection
+ print(conn)
+ # # create and insert two blocks, one for the create and one for the
+ # # transfer transaction
+ # conn.db.transactions.insert_one(signed_create_tx.to_dict())
+ # conn.db.transactions.insert_one(signed_transfer_tx.to_dict())
+ #
+ # asset_id = Transaction.get_asset_id([signed_create_tx, signed_transfer_tx])
+ #
+ # # Test get by just asset id
+ # txids = set(query.get_txids_filtered(conn, asset_id))
+ # assert txids == {signed_create_tx.id, signed_transfer_tx.id}
+ #
+ # # Test get by asset and CREATE
+ # txids = set(query.get_txids_filtered(conn, asset_id, Transaction.CREATE))
+ # assert txids == {signed_create_tx.id}
+ #
+ # # Test get by asset and TRANSFER
+ # txids = set(query.get_txids_filtered(conn, asset_id, Transaction.TRANSFER))
+ # assert txids == {signed_transfer_tx.id}
+
+
+def test_write_assets():
+ from planetmint.backend import connect, query
+ conn = connect()
+
+ assets = [
+ {'id': 1, 'data': '1'},
+ {'id': 2, 'data': '2'},
+ {'id': 3, 'data': '3'},
+ # Duplicated id. Should not be written to the database
+ {'id': 1, 'data': '1'},
+ ]
+
+ # write the assets
+ for asset in assets:
+ query.store_asset(conn, deepcopy(asset))
+
+ # check that 3 assets were written to the database
+ cursor = conn.db.assets.find({}, projection={'_id': False}) \
+ .sort('id', pymongo.ASCENDING)
+
+ assert cursor.collection.count_documents({}) == 3
+ assert list(cursor) == assets[:-1]
+
+
+def test_get_assets():
+ from planetmint.backend import connect, query
+ conn = connect()
+
+ assets = [
+ {'id': 1, 'data': '1'},
+ {'id': 2, 'data': '2'},
+ {'id': 3, 'data': '3'},
+ ]
+
+ conn.db.assets.insert_many(deepcopy(assets), ordered=False)
+
+ for asset in assets:
+ assert query.get_asset(conn, asset['id'])
+
+
+@pytest.mark.parametrize('table', ['assets', 'metadata'])
+def test_text_search(table):
+ from planetmint.backend import connect, query
+ conn = connect()
+
+ # Example data and tests cases taken from the mongodb documentation
+ # https://docs.mongodb.com/manual/reference/operator/query/text/
+ objects = [
+ {'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
+ {'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
+ {'id': 3, 'subject': 'Baking a cake', 'author': 'abc', 'views': 90},
+ {'id': 4, 'subject': 'baking', 'author': 'xyz', 'views': 100},
+ {'id': 5, 'subject': 'Café Con Leche', 'author': 'abc', 'views': 200},
+ {'id': 6, 'subject': 'Сырники', 'author': 'jkl', 'views': 80},
+ {'id': 7, 'subject': 'coffee and cream', 'author': 'efg', 'views': 10},
+ {'id': 8, 'subject': 'Cafe con Leche', 'author': 'xyz', 'views': 10}
+ ]
+
+ # insert the assets
+ conn.db[table].insert_many(deepcopy(objects), ordered=False)
+
+ # test search single word
+ assert list(query.text_search(conn, 'coffee', table=table)) == [
+ {'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
+ {'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
+ {'id': 7, 'subject': 'coffee and cream', 'author': 'efg', 'views': 10},
+ ]
+
+ # match any of the search terms
+ assert list(query.text_search(conn, 'bake coffee cake', table=table)) == [
+ {'author': 'abc', 'id': 3, 'subject': 'Baking a cake', 'views': 90},
+ {'author': 'xyz', 'id': 1, 'subject': 'coffee', 'views': 50},
+ {'author': 'xyz', 'id': 4, 'subject': 'baking', 'views': 100},
+ {'author': 'efg', 'id': 2, 'subject': 'Coffee Shopping', 'views': 5},
+ {'author': 'efg', 'id': 7, 'subject': 'coffee and cream', 'views': 10}
+ ]
+
+ # search for a phrase
+ assert list(query.text_search(conn, '\"coffee shop\"', table=table)) == [
+ {'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
+ ]
+
+ # exclude documents that contain a term
+ assert list(query.text_search(conn, 'coffee -shop', table=table)) == [
+ {'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
+ {'id': 7, 'subject': 'coffee and cream', 'author': 'efg', 'views': 10},
+ ]
+
+ # search different language
+ assert list(query.text_search(conn, 'leche', language='es', table=table)) == [
+ {'id': 5, 'subject': 'Café Con Leche', 'author': 'abc', 'views': 200},
+ {'id': 8, 'subject': 'Cafe con Leche', 'author': 'xyz', 'views': 10}
+ ]
+
+ # case and diacritic insensitive search
+ assert list(query.text_search(conn, 'сы́рники CAFÉS', table=table)) == [
+ {'id': 6, 'subject': 'Сырники', 'author': 'jkl', 'views': 80},
+ {'id': 5, 'subject': 'Café Con Leche', 'author': 'abc', 'views': 200},
+ {'id': 8, 'subject': 'Cafe con Leche', 'author': 'xyz', 'views': 10}
+ ]
+
+ # case sensitive search
+ assert list(query.text_search(conn, 'Coffee', case_sensitive=True, table=table)) == [
+ {'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
+ ]
+
+ # diacritic sensitive search
+ assert list(query.text_search(conn, 'CAFÉ', diacritic_sensitive=True, table=table)) == [
+ {'id': 5, 'subject': 'Café Con Leche', 'author': 'abc', 'views': 200},
+ ]
+
+ # return text score
+ assert list(query.text_search(conn, 'coffee', text_score=True, table=table)) == [
+ {'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50, 'score': 1.0},
+ {'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5, 'score': 0.75},
+ {'id': 7, 'subject': 'coffee and cream', 'author': 'efg', 'views': 10, 'score': 0.75},
+ ]
+
+ # limit search result
+ assert list(query.text_search(conn, 'coffee', limit=2, table=table)) == [
+ {'id': 1, 'subject': 'coffee', 'author': 'xyz', 'views': 50},
+ {'id': 2, 'subject': 'Coffee Shopping', 'author': 'efg', 'views': 5},
+ ]
+
+
+def test_write_metadata():
+ from planetmint.backend import connect, query
+ conn = connect()
+
+ metadata = [
+ {'id': 1, 'data': '1'},
+ {'id': 2, 'data': '2'},
+ {'id': 3, 'data': '3'}
+ ]
+
+ # write the assets
+ query.store_metadatas(conn, deepcopy(metadata))
+
+ # check that 3 assets were written to the database
+ cursor = conn.db.metadata.find({}, projection={'_id': False}) \
+ .sort('id', pymongo.ASCENDING)
+
+ assert cursor.collection.count_documents({}) == 3
+ assert list(cursor) == metadata
+
+
+def test_get_metadata():
+ from planetmint.backend import connect, query
+ conn = connect()
+
+ metadata = [
+ {'id': 1, 'metadata': None},
+ {'id': 2, 'metadata': {'key': 'value'}},
+ {'id': 3, 'metadata': '3'},
+ ]
+
+ conn.db.metadata.insert_many(deepcopy(metadata), ordered=False)
+
+ for meta in metadata:
+ assert query.get_metadata(conn, [meta['id']])
+
+
+def test_get_owned_ids(signed_create_tx, user_pk):
+ from planetmint.backend import connect, query
+ conn = connect()
+
+ # insert a transaction
+ conn.db.transactions.insert_one(deepcopy(signed_create_tx.to_dict()))
+
+ txns = list(query.get_owned_ids(conn, user_pk))
+
+ assert txns[0] == signed_create_tx.to_dict()
+
+
+def test_get_spending_transactions(user_pk, user_sk):
+ from planetmint.backend import connect, query
+ from planetmint.models import Transaction
+ conn = connect()
+
+ out = [([user_pk], 1)]
+ tx1 = Transaction.create([user_pk], out * 3)
+ tx1.sign([user_sk])
+ inputs = tx1.to_inputs()
+ tx2 = Transaction.transfer([inputs[0]], out, tx1.id).sign([user_sk])
+ tx3 = Transaction.transfer([inputs[1]], out, tx1.id).sign([user_sk])
+ tx4 = Transaction.transfer([inputs[2]], out, tx1.id).sign([user_sk])
+ txns = [deepcopy(tx.to_dict()) for tx in [tx1, tx2, tx3, tx4]]
+ conn.db.transactions.insert_many(txns)
+
+ links = [inputs[0].fulfills.to_dict(), inputs[2].fulfills.to_dict()]
+ txns = list(query.get_spending_transactions(conn, links))
+
+ # tx3 not a member because input 1 not asked for
+ assert txns == [tx2.to_dict(), tx4.to_dict()]
+
+
+def test_get_spending_transactions_multiple_inputs():
+ from planetmint.backend import connect, query
+ from planetmint.models import Transaction
+ from planetmint.common.crypto import generate_key_pair
+ conn = connect()
+ (alice_sk, alice_pk) = generate_key_pair()
+ (bob_sk, bob_pk) = generate_key_pair()
+ (carol_sk, carol_pk) = generate_key_pair()
+
+ out = [([alice_pk], 9)]
+ tx1 = Transaction.create([alice_pk], out).sign([alice_sk])
+
+ inputs1 = tx1.to_inputs()
+ tx2 = Transaction.transfer([inputs1[0]],
+ [([alice_pk], 6), ([bob_pk], 3)],
+ tx1.id).sign([alice_sk])
+
+ inputs2 = tx2.to_inputs()
+ tx3 = Transaction.transfer([inputs2[0]],
+ [([bob_pk], 3), ([carol_pk], 3)],
+ tx1.id).sign([alice_sk])
+
+ inputs3 = tx3.to_inputs()
+ tx4 = Transaction.transfer([inputs2[1], inputs3[0]],
+ [([carol_pk], 6)],
+ tx1.id).sign([bob_sk])
+
+ txns = [deepcopy(tx.to_dict()) for tx in [tx1, tx2, tx3, tx4]]
+ conn.db.transactions.insert_many(txns)
+
+ links = [
+ ({'transaction_id': tx2.id, 'output_index': 0}, 1, [tx3.id]),
+ ({'transaction_id': tx2.id, 'output_index': 1}, 1, [tx4.id]),
+ ({'transaction_id': tx3.id, 'output_index': 0}, 1, [tx4.id]),
+ ({'transaction_id': tx3.id, 'output_index': 1}, 0, None),
+ ]
+ for li, num, match in links:
+ txns = list(query.get_spending_transactions(conn, [li]))
+ assert len(txns) == num
+ if len(txns):
+ assert [tx['id'] for tx in txns] == match
+
+
+def test_store_block():
+ from planetmint.backend import connect, query
+ from planetmint.lib import Block
+ conn = connect()
+
+ block = Block(app_hash='random_utxo',
+ height=3,
+ transactions=[])
+ query.store_block(conn, block._asdict())
+ cursor = conn.db.blocks.find({}, projection={'_id': False})
+ assert cursor.collection.count_documents({}) == 1
+
+
+def test_get_block():
+ from planetmint.backend import connect, query
+ from planetmint.lib import Block
+ conn = connect()
+
+ block = Block(app_hash='random_utxo',
+ height=3,
+ transactions=[])
+
+ conn.db.blocks.insert_one(block._asdict())
+
+ block = dict(query.get_block(conn, 3))
+ assert block['height'] == 3
+
+
+def test_delete_zero_unspent_outputs(db_context, utxoset):
+ from planetmint.backend import query
+ unspent_outputs, utxo_collection = utxoset
+ delete_res = query.delete_unspent_outputs(db_context.conn)
+ assert delete_res is None
+ assert utxo_collection.count_documents({}) == 3
+ assert utxo_collection.count_documents(
+ {'$or': [
+ {'transaction_id': 'a', 'output_index': 0},
+ {'transaction_id': 'b', 'output_index': 0},
+ {'transaction_id': 'a', 'output_index': 1},
+ ]}
+ ) == 3
+
+
+def test_delete_one_unspent_outputs(db_context, utxoset):
+ from planetmint.backend import query
+ unspent_outputs, utxo_collection = utxoset
+ delete_res = query.delete_unspent_outputs(db_context.conn,
+ unspent_outputs[0])
+ assert delete_res.raw_result['n'] == 1
+ assert utxo_collection.count_documents(
+ {'$or': [
+ {'transaction_id': 'a', 'output_index': 1},
+ {'transaction_id': 'b', 'output_index': 0},
+ ]}
+ ) == 2
+ assert utxo_collection.count_documents(
+ {'transaction_id': 'a', 'output_index': 0}) == 0
+
+
+def test_delete_many_unspent_outputs(db_context, utxoset):
+ from planetmint.backend import query
+ unspent_outputs, utxo_collection = utxoset
+ delete_res = query.delete_unspent_outputs(db_context.conn,
+ *unspent_outputs[::2])
+ assert delete_res.raw_result['n'] == 2
+ assert utxo_collection.count_documents(
+ {'$or': [
+ {'transaction_id': 'a', 'output_index': 0},
+ {'transaction_id': 'b', 'output_index': 0},
+ ]}
+ ) == 0
+ assert utxo_collection.count_documents(
+ {'transaction_id': 'a', 'output_index': 1}) == 1
+
+
+def test_store_zero_unspent_output(db_context, utxo_collection):
+ from planetmint.backend import query
+ res = query.store_unspent_outputs(db_context.conn)
+ assert res is None
+ assert utxo_collection.count_documents({}) == 0
+
+
+def test_store_one_unspent_output(db_context,
+ unspent_output_1, utxo_collection):
+ from planetmint.backend import query
+ res = query.store_unspent_outputs(db_context.conn, unspent_output_1)
+ assert res.acknowledged
+ assert len(res.inserted_ids) == 1
+ assert utxo_collection.count_documents(
+ {'transaction_id': unspent_output_1['transaction_id'],
+ 'output_index': unspent_output_1['output_index']}
+ ) == 1
+
+
+def test_store_many_unspent_outputs(db_context,
+ unspent_outputs, utxo_collection):
+ from planetmint.backend import query
+ res = query.store_unspent_outputs(db_context.conn, *unspent_outputs)
+ assert res.acknowledged
+ assert len(res.inserted_ids) == 3
+ assert utxo_collection.count_documents(
+ {'transaction_id': unspent_outputs[0]['transaction_id']}
+ ) == 3
+
+
+def test_get_unspent_outputs(db_context, utxoset):
+ from planetmint.backend import query
+ cursor = query.get_unspent_outputs(db_context.conn)
+ assert cursor.collection.count_documents({}) == 3
+ retrieved_utxoset = list(cursor)
+ unspent_outputs, utxo_collection = utxoset
+ assert retrieved_utxoset == list(
+ utxo_collection.find(projection={'_id': False}))
+ assert retrieved_utxoset == unspent_outputs
+
+
+def test_store_pre_commit_state(db_context):
+ from planetmint.backend import query
+
+ state = dict(height=3, transactions=[])
+
+ query.store_pre_commit_state(db_context.conn, state)
+ cursor = db_context.conn.db.pre_commit.find({'commit_id': 'test'},
+ projection={'_id': False})
+ assert cursor.collection.count_documents({}) == 1
+
+
+def test_get_pre_commit_state(db_context):
+ from planetmint.backend import query
+
+ state = dict(height=3, transactions=[])
+ db_context.conn.db.pre_commit.insert_one(state)
+ resp = query.get_pre_commit_state(db_context.conn)
+ assert resp == state
+
+
+def test_validator_update():
+ from planetmint.backend import connect, query
+
+ conn = connect()
+
+ def gen_validator_update(height):
+ return {'data': 'somedata', 'height': height, 'election_id': f'election_id_at_height_{height}'}
+
+ for i in range(1, 100, 10):
+ value = gen_validator_update(i)
+ query.store_validator_set(conn, value)
+
+ v1 = query.get_validator_set(conn, 8)
+ assert v1['height'] == 1
+
+ v41 = query.get_validator_set(conn, 50)
+ assert v41['height'] == 41
+
+ v91 = query.get_validator_set(conn)
+ assert v91['height'] == 91
+
+
+@pytest.mark.parametrize('description,stores,expected', [
+ (
+ 'Query empty database.',
+ [],
+ None,
+ ),
+ (
+ 'Store one chain with the default value for `is_synced`.',
+ [
+ {'height': 0, 'chain_id': 'some-id'},
+ ],
+ {'height': 0, 'chain_id': 'some-id', 'is_synced': True},
+ ),
+ (
+ 'Store one chain with a custom value for `is_synced`.',
+ [
+ {'height': 0, 'chain_id': 'some-id', 'is_synced': False},
+ ],
+ {'height': 0, 'chain_id': 'some-id', 'is_synced': False},
+ ),
+ (
+ 'Store one chain, then update it.',
+ [
+ {'height': 0, 'chain_id': 'some-id', 'is_synced': True},
+ {'height': 0, 'chain_id': 'new-id', 'is_synced': False},
+ ],
+ {'height': 0, 'chain_id': 'new-id', 'is_synced': False},
+ ),
+ (
+ 'Store a chain, update it, store another chain.',
+ [
+ {'height': 0, 'chain_id': 'some-id', 'is_synced': True},
+ {'height': 0, 'chain_id': 'some-id', 'is_synced': False},
+ {'height': 10, 'chain_id': 'another-id', 'is_synced': True},
+ ],
+ {'height': 10, 'chain_id': 'another-id', 'is_synced': True},
+ ),
+])
+def test_store_abci_chain(description, stores, expected):
+ conn = connect()
+
+ for store in stores:
+ query.store_abci_chain(conn, **store)
+
+ actual = query.get_latest_abci_chain(conn)
+ assert expected == actual, description