Merge pull request #12 from planetmint/planetmint-tarantool

Planetmint tarantool
This commit is contained in:
Lorenz Herzberger 2022-06-13 16:57:20 +02:00 committed by GitHub
commit ff2e429633
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 45 additions and 39 deletions

View File

@ -22,7 +22,7 @@ TABLES = ('transactions', 'blocks', 'assets', 'metadata',
SPACE_NAMES = ("abci_chains", "assets", "blocks", "blocks_tx",
"elections", "meta_data", "pre_commits", "validators",
"transactions", "inputs", "outputs", "keys")
"transactions", "inputs", "outputs", "keys", "utxos")
VALID_LANGUAGES = ('danish', 'dutch', 'english', 'finnish', 'french', 'german',
'hungarian', 'italian', 'norwegian', 'portuguese', 'romanian',

View File

@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
class TarantoolDBConnection(Connection):
def __init__(self, host: str = "localhost", port: int = 3303, user: str = None, password: str = None, **kwargs):
def __init__(self, host: str = "localhost", port: int = 3303, user: str = None, password: str = None, **kwargs):
try:
super().__init__(**kwargs)
self.host = host
@ -62,7 +62,6 @@ class TarantoolDBConnection(Connection):
except tarantool.error.NetworkError as net_error:
raise net_error
def get_connection(self):
return self.conn

View File

@ -5,6 +5,7 @@
"""Query implementation for Tarantool"""
from secrets import token_hex
from hashlib import sha256
from operator import itemgetter
import tarantool.error
@ -15,6 +16,7 @@ from planetmint.backend.tarantool.connection import TarantoolDBConnection
from planetmint.backend.tarantool.transaction.tools import TransactionCompose, TransactionDecompose
from json import dumps, loads
register_query = module_dispatch_registrar(query)
@ -390,11 +392,11 @@ def store_pre_commit_state(connection, state: dict):
_precommit = connection.run(
connection.space("pre_commits").select([], limit=1)
)
_precommitTuple = (token_hex(8), state["height"], state["transactions"]) if _precommit is None or len(_precommit) == 0 else _precommit[0]
_precommitTuple = (token_hex(8), state["height"], state["transactions"]) if _precommit is None or len(
_precommit) == 0 else _precommit[0]
connection.run(
connection.space("pre_commits").upsert(_precommitTuple,
op_list=[('=', 0, _precommitTuple[0]),
('=', 1, state["height"]),
op_list=[('=', 1, state["height"]),
('=', 2, state["transactions"])],
limit=1),
only_data=False
@ -420,8 +422,7 @@ def store_validator_set(conn, validators_update: dict):
unique_id = token_hex(8) if _validator is None or len(_validator) == 0 else _validator[0][0]
conn.run(
conn.space("validators").upsert((unique_id, validators_update["height"], validators_update["validators"]),
op_list=[('=', 0, unique_id),
('=', 1, validators_update["height"]),
op_list=[('=', 1, validators_update["height"]),
('=', 2, validators_update["validators"])],
limit=1),
only_data=False
@ -444,8 +445,7 @@ def delete_validator_set(connection, height: int):
def store_election(connection, election_id: str, height: int, is_concluded: bool):
connection.run(
connection.space("elections").upsert((election_id, height, is_concluded),
op_list=[('=', 0, election_id),
('=', 1, height),
op_list=[('=', 1, height),
('=', 2, is_concluded)],
limit=1),
only_data=False
@ -517,28 +517,25 @@ def get_asset_tokens_for_public_key(connection, asset_id: str,
@register_query(TarantoolDBConnection)
def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = True):
_chain = connection.run(connection.space("abci_chains").select(height, index="height_search", limit=1))
_chainTuple = (height, is_synced, chain_id) if _chain is None or len(_chain) == 0 else _chain[0]
hash_id_primarykey = sha256(dumps(obj={"height": height}).encode()).hexdigest()
connection.run(
connection.space("abci_chains").upsert(_chainTuple,
op_list=[('=', 0, height),
('=', 1, is_synced),
('=', 2, chain_id)],
limit=1),
connection.space("abci_chains").upsert((height, is_synced, chain_id, hash_id_primarykey),
op_list=[
('=', 0, height),
('=', 1, is_synced),
('=', 2, chain_id)
]),
only_data=False
)
@register_query(TarantoolDBConnection)
def delete_abci_chain(connection, height: int):
_chains = connection.run(
connection.space("abci_chains").select(height, index="height_search")
hash_id_primarykey = sha256(dumps(obj={"height": height}).encode()).hexdigest()
connection.run(
connection.space("abci_chains").delete(hash_id_primarykey),
only_data=False
)
for _chain in _chains:
connection.run(
connection.space("abci_chains").delete(_chain[2]),
only_data=False
)
@register_query(TarantoolDBConnection)

View File

@ -32,8 +32,8 @@ SPACE_COMMANDS = {
INDEX_COMMANDS = {
"abci_chains":
{
"id_search": "abci_chains:create_index('id_search' ,{type='hash', parts={'chain_id'}})",
"height_search": "abci_chains:create_index('height_search' ,{type='tree',unique=false, parts={'height'}})"
"id_search": "abci_chains:create_index('id_search' ,{type='hash', parts={'id'}})",
"height_search": "abci_chains:create_index('height_search' ,{type='tree', unique=false, parts={'height'}})"
},
"assets":
{
@ -105,7 +105,7 @@ INDEX_COMMANDS = {
SCHEMA_COMMANDS = {
"abci_chains":
"abci_chains:format({{name='height' , type='integer'},{name='is_synched' , type='boolean'},{name='chain_id',type='string'}})",
"abci_chains:format({{name='height' , type='integer'},{name='is_synched' , type='boolean'},{name='chain_id',type='string'}, {name='id', type='string'}})",
"assets":
"assets:format({{name='data' , type='any'}, {name='tx_id', type='string'}, {name='asset_id', type='string'}})",
"blocks":

View File

@ -552,7 +552,7 @@ def tarantool_client(db_context): # TODO Here add TarantoolConnectionClass
#
@pytest.fixture
def utxo_collection(tarantool_client):
def utxo_collection(tarantool_client, _setup_database):
return tarantool_client.get_space("utxos")
@ -568,10 +568,12 @@ def dummy_unspent_outputs():
@pytest.fixture
def utxoset(dummy_unspent_outputs, utxo_collection):
from json import dumps
num_rows_before_operation = utxo_collection.select().rowcount
for utxo in dummy_unspent_outputs:
res = utxo_collection.insert((utxo["transaction_id"], utxo["output_index"], dumps(utxo)))
assert res
assert len(utxo_collection.select()) == 3
num_rows_after_operation = utxo_collection.select().rowcount
assert num_rows_after_operation == num_rows_before_operation + 3
return dummy_unspent_outputs, utxo_collection

View File

@ -288,9 +288,11 @@ def test_store_bulk_transaction(mocker, b, signed_create_tx,
@pytest.mark.bdb
def test_delete_zero_unspent_outputs(b, utxoset):
unspent_outputs, utxo_collection = utxoset
num_rows_before_operation = utxo_collection.select().rowcount
delete_res = b.delete_unspent_outputs()
num_rows_after_operation = utxo_collection.select().rowcount
# assert delete_res is None
assert utxo_collection.select().rowcount == 3
assert num_rows_before_operation == num_rows_after_operation
# assert utxo_collection.count_documents(
# {'$or': [
# {'transaction_id': 'a', 'output_index': 0},
@ -350,9 +352,11 @@ def test_delete_many_unspent_outputs(b, utxoset):
@pytest.mark.bdb
def test_store_zero_unspent_output(b, utxo_collection):
num_rows_before_operation = utxo_collection.select().rowcount
res = b.store_unspent_outputs()
num_rows_after_operation = utxo_collection.select().rowcount
assert res is None
assert utxo_collection.select().rowcount == 0
assert num_rows_before_operation == num_rows_after_operation
@pytest.mark.bdb
@ -514,11 +518,11 @@ def test_get_spent_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
bob = generate_key_pair()
tx1 = Create.generate([user_pk],
[([alice.public_key], 3), ([user_pk], 2)],
asset=None) \
[([alice.public_key], 3), ([user_pk], 2)],
asset=None) \
.sign([user_sk])
b.store_bulk_transactions([tx1])
assert tx1.validate(b)
inputs = tx1.to_inputs()
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], tx1.id).sign([user_sk])
assert tx2.validate(b)

View File

@ -39,14 +39,18 @@ def flush_tarantool_db(connection, dbname):
for _id in _all_data:
if "assets" == s:
connection.run(connection.space(s).delete(_id[1]), only_data=False)
elif s in ["blocks", "abci_chains"]:
connection.run(connection.space(s).delete(_id[2], only_data=False))
elif s == "blocks":
connection.run(connection.space(s).delete(_id[2]), only_data=False)
elif s == "inputs":
connection.run(connection.space(s).delete(_id[-2], only_data=False))
connection.run(connection.space(s).delete(_id[-2]), only_data=False)
elif s == "outputs":
connection.run(connection.space(s).delete(_id[-4], only_data=False))
connection.run(connection.space(s).delete(_id[-4]), only_data=False)
elif s == "utxos":
connection.run(connection.space(s).delete([_id[0], _id[1]]), only_data=False)
elif s == "abci_chains":
connection.run(connection.space(s).delete(_id[-1]), only_data=False)
else:
connection.run(connection.space(s).delete(_id[0], only_data=False))
connection.run(connection.space(s).delete(_id[0]), only_data=False)
def generate_block(planet):