mirror of
https://github.com/planetmint/planetmint.git
synced 2025-11-25 15:05:49 +00:00
Merge pull request #10 from planetmint/planetmint-tarantool
Planetmint tarantool
This commit is contained in:
commit
7759625511
@ -390,12 +390,12 @@ def get_unspent_outputs(connection, query=None): # for now we don't have implem
|
||||
@register_query(TarantoolDBConnection)
|
||||
def store_pre_commit_state(connection, state: dict):
|
||||
_precommit = connection.run(
|
||||
connection.space("pre_commits").select(state["height"], index="height_search", limit=1)
|
||||
connection.space("pre_commits").select([], limit=1)
|
||||
)
|
||||
unique_id = token_hex(8) if _precommit is None or len(_precommit.data) == 0 else _precommit.data[0][0]
|
||||
_precommitTuple = (token_hex(8), state["height"], state["transactions"]) if _precommit is None or len(_precommit) == 0 else _precommit[0]
|
||||
connection.run(
|
||||
connection.space("pre_commits").upsert((unique_id, state["height"], state["transactions"]),
|
||||
op_list=[('=', 0, unique_id),
|
||||
connection.space("pre_commits").upsert(_precommitTuple,
|
||||
op_list=[('=', 0, _precommitTuple[0]),
|
||||
('=', 1, state["height"]),
|
||||
('=', 2, state["transactions"])],
|
||||
limit=1),
|
||||
@ -410,7 +410,7 @@ def get_pre_commit_state(connection):
|
||||
)
|
||||
if _commit is None or len(_commit) == 0:
|
||||
return None
|
||||
_commit = sorted(_commit, key=itemgetter(1), reverse=True)[0]
|
||||
_commit = sorted(_commit, key=itemgetter(1), reverse=False)[0]
|
||||
return {"height": _commit[1], "transactions": _commit[2]}
|
||||
|
||||
|
||||
@ -419,7 +419,7 @@ def store_validator_set(conn, validators_update: dict):
|
||||
_validator = conn.run(
|
||||
conn.space("validators").select(validators_update["height"], index="height_search", limit=1)
|
||||
)
|
||||
unique_id = token_hex(8) if _validator is None or len(_validator) == 0 else _validator.data[0][0]
|
||||
unique_id = token_hex(8) if _validator is None or len(_validator) == 0 else _validator[0][0]
|
||||
conn.run(
|
||||
conn.space("validators").upsert((unique_id, validators_update["height"], validators_update["validators"]),
|
||||
op_list=[('=', 0, unique_id),
|
||||
@ -519,8 +519,10 @@ def get_asset_tokens_for_public_key(connection, asset_id: str,
|
||||
|
||||
@register_query(TarantoolDBConnection)
|
||||
def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = True):
|
||||
_chain = connection.run(connection.space("abci_chains").select(height, index="height_search", limit=1))
|
||||
_chainTuple = (height, is_synced, chain_id) if _chain is None or len(_chain) == 0 else _chain[0]
|
||||
connection.run(
|
||||
connection.space("abci_chains").upsert((height, is_synced, chain_id),
|
||||
connection.space("abci_chains").upsert(_chainTuple,
|
||||
op_list=[('=', 0, height),
|
||||
('=', 1, is_synced),
|
||||
('=', 2, chain_id)],
|
||||
|
||||
@ -195,7 +195,6 @@ class App(BaseApplication):
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
chain_shift = 0 if self.chain is None else self.chain['height']
|
||||
|
||||
height = request_end_block.height + chain_shift
|
||||
self.new_height = height
|
||||
|
||||
|
||||
@ -606,7 +606,6 @@ class Transaction(object):
|
||||
tx_body (dict): The Transaction to be transformed.
|
||||
"""
|
||||
# NOTE: Remove reference to avoid side effects
|
||||
print(f"\nbefore deepcopy {tx_body}")
|
||||
tx_body = deepcopy(tx_body)
|
||||
tx_body = rapidjson.loads(rapidjson.dumps(tx_body))
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
from copy import deepcopy
|
||||
|
||||
import pytest
|
||||
|
||||
import tarantool
|
||||
from planetmint.transactions.types.assets.create import Create
|
||||
from planetmint.transactions.types.assets.transfer import Transfer
|
||||
|
||||
@ -22,37 +22,32 @@ pytestmark = pytest.mark.bdb
|
||||
def test_get_txids_filtered(signed_create_tx, signed_transfer_tx, db_conn):
|
||||
from planetmint.backend.tarantool import query
|
||||
from planetmint.models import Transaction
|
||||
conn = db_conn.get_connection()
|
||||
# create and insert two blocks, one for the create and one for the
|
||||
# transfer transaction
|
||||
create_tx_dict = signed_create_tx.to_dict()
|
||||
transfer_tx_dict = signed_transfer_tx.to_dict()
|
||||
print(create_tx_dict)
|
||||
print(" ")
|
||||
print(transfer_tx_dict)
|
||||
query.store_transactions(signed_transactions=[create_tx_dict], connection=conn)
|
||||
query.store_transactions(signed_transactions=[transfer_tx_dict], connection=conn)
|
||||
|
||||
query.store_transactions(signed_transactions=[create_tx_dict], connection=db_conn)
|
||||
query.store_transactions(signed_transactions=[transfer_tx_dict], connection=db_conn)
|
||||
|
||||
asset_id = Transaction.get_asset_id([signed_create_tx, signed_transfer_tx])
|
||||
|
||||
# Test get by just asset id
|
||||
txids = set(query.get_txids_filtered(connection=conn, asset_id=asset_id))
|
||||
txids = set(query.get_txids_filtered(connection=db_conn, asset_id=asset_id))
|
||||
assert txids == {signed_create_tx.id, signed_transfer_tx.id}
|
||||
|
||||
# Test get by asset and CREATE
|
||||
txids = set(query.get_txids_filtered(connection=conn, asset_id=asset_id, operation=Transaction.CREATE))
|
||||
txids = set(query.get_txids_filtered(connection=db_conn, asset_id=asset_id, operation=Transaction.CREATE))
|
||||
assert txids == {signed_create_tx.id}
|
||||
|
||||
# Test get by asset and TRANSFER
|
||||
txids = set(query.get_txids_filtered(connection=conn, asset_id=asset_id, operation=Transaction.TRANSFER))
|
||||
txids = set(query.get_txids_filtered(connection=db_conn, asset_id=asset_id, operation=Transaction.TRANSFER))
|
||||
assert txids == {signed_transfer_tx.id}
|
||||
|
||||
|
||||
def test_write_assets(db_conn):
|
||||
# from planetmint.backend.connection import Connection
|
||||
from planetmint.backend.tarantool import query
|
||||
# conn = Connection().get_connection()
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
assets = [
|
||||
{'id': '1', 'data': '1'},
|
||||
{'id': '2', 'data': '2'},
|
||||
@ -63,29 +58,28 @@ def test_write_assets(db_conn):
|
||||
|
||||
# write the assets
|
||||
for asset in assets:
|
||||
query.store_asset(connection=conn, asset=asset)
|
||||
query.store_asset(connection=db_conn, asset=asset)
|
||||
|
||||
# check that 3 assets were written to the database
|
||||
documents = query.get_assets(assets_ids=[asset["id"] for asset in assets], connection=conn)
|
||||
print(f"\nDOCUMENTS: {documents}")
|
||||
print(f"\nASSETS: {assets}")
|
||||
documents = query.get_assets(assets_ids=[asset["id"] for asset in assets], connection=db_conn)
|
||||
|
||||
assert len(documents) == 3
|
||||
assert list(documents)[0] == assets[:-1][0]
|
||||
|
||||
|
||||
def test_get_assets(db_conn):
|
||||
from planetmint.backend.tarantool import query
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
assets = [
|
||||
("1", '1', '1'),
|
||||
("2", '2', '2'),
|
||||
("3", '3', '3'),
|
||||
]
|
||||
|
||||
query.store_assets(assets=assets, connection=conn)
|
||||
query.store_assets(assets=assets, connection=db_conn)
|
||||
|
||||
for asset in assets:
|
||||
assert query.get_asset(asset_id=asset[2], connection=conn)
|
||||
assert query.get_asset(asset_id=asset[2], connection=db_conn)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('table', ['assets', 'metadata'])
|
||||
@ -176,21 +170,19 @@ def test_write_metadata(db_conn):
|
||||
# from planetmint.backend.connection import Connection
|
||||
from planetmint.backend.tarantool import query
|
||||
# conn = Connection().get_connection()
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
metadata = [
|
||||
{'id': "1", 'data': '1'},
|
||||
{'id': "2", 'data': '2'},
|
||||
{'id': "3", 'data': '3'}
|
||||
]
|
||||
# write the assets
|
||||
query.store_metadatas(connection=conn, metadata=metadata)
|
||||
query.store_metadatas(connection=db_conn, metadata=metadata)
|
||||
|
||||
# check that 3 assets were written to the database
|
||||
space = conn.get_space("meta_data")
|
||||
metadatas = []
|
||||
for meta in metadata:
|
||||
_data = space.select(meta["id"])
|
||||
_data = _data.data[0]
|
||||
_data = db_conn.run(db_conn.space("meta_data").select(meta["id"]))[0]
|
||||
metadatas.append({"id": _data[0], "data": _data[1]})
|
||||
|
||||
metadatas = sorted(metadatas, key=lambda k: k["id"])
|
||||
@ -201,7 +193,6 @@ def test_write_metadata(db_conn):
|
||||
|
||||
def test_get_metadata(db_conn):
|
||||
from planetmint.backend.tarantool import query
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
metadata = [
|
||||
{'id': "dd86682db39e4b424df0eec1413cfad65488fd48712097c5d865ca8e8e059b64", 'metadata': None},
|
||||
@ -209,30 +200,26 @@ def test_get_metadata(db_conn):
|
||||
]
|
||||
|
||||
# conn.db.metadata.insert_many(deepcopy(metadata), ordered=False)
|
||||
query.store_metadatas(connection=conn, metadata=metadata)
|
||||
query.store_metadatas(connection=db_conn, metadata=metadata)
|
||||
|
||||
for meta in metadata:
|
||||
_m = query.get_metadata(connection=conn, transaction_ids=[meta["id"]])
|
||||
_m = query.get_metadata(connection=db_conn, transaction_ids=[meta["id"]])
|
||||
assert _m
|
||||
|
||||
|
||||
def test_get_owned_ids(signed_create_tx, user_pk, db_conn):
|
||||
from planetmint.backend.tarantool import query
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
# insert a transaction
|
||||
query.store_transactions(connection=conn, signed_transactions=[signed_create_tx.to_dict()])
|
||||
txns = list(query.get_owned_ids(connection=conn, owner=user_pk))
|
||||
query.store_transactions(connection=db_conn, signed_transactions=[signed_create_tx.to_dict()])
|
||||
txns = list(query.get_owned_ids(connection=db_conn, owner=user_pk))
|
||||
tx_dict = signed_create_tx.to_dict()
|
||||
founded = [tx for tx in txns if tx["id"] == tx_dict["id"]]
|
||||
assert founded[0] == tx_dict
|
||||
|
||||
|
||||
def test_get_spending_transactions(user_pk, user_sk, db_conn):
|
||||
from planetmint.models import Transaction
|
||||
#
|
||||
from planetmint.backend.tarantool import query
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
out = [([user_pk], 1)]
|
||||
tx1 = Create.generate([user_pk], out * 3)
|
||||
@ -242,20 +229,18 @@ def test_get_spending_transactions(user_pk, user_sk, db_conn):
|
||||
tx3 = Transfer.generate([inputs[1]], out, tx1.id).sign([user_sk])
|
||||
tx4 = Transfer.generate([inputs[2]], out, tx1.id).sign([user_sk])
|
||||
txns = [deepcopy(tx.to_dict()) for tx in [tx1, tx2, tx3, tx4]]
|
||||
query.store_transactions(signed_transactions=txns, connection=conn)
|
||||
query.store_transactions(signed_transactions=txns, connection=db_conn)
|
||||
|
||||
links = [inputs[0].fulfills.to_dict(), inputs[2].fulfills.to_dict()]
|
||||
txns = list(query.get_spending_transactions(connection=conn, inputs=links))
|
||||
txns = list(query.get_spending_transactions(connection=db_conn, inputs=links))
|
||||
|
||||
# tx3 not a member because input 1 not asked for
|
||||
assert txns == [tx2.to_dict(), tx4.to_dict()]
|
||||
|
||||
|
||||
def test_get_spending_transactions_multiple_inputs(db_conn):
|
||||
from planetmint.models import Transaction
|
||||
from planetmint.transactions.common.crypto import generate_key_pair
|
||||
from planetmint.backend.tarantool import query
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
(alice_sk, alice_pk) = generate_key_pair()
|
||||
(bob_sk, bob_pk) = generate_key_pair()
|
||||
@ -280,7 +265,7 @@ def test_get_spending_transactions_multiple_inputs(db_conn):
|
||||
tx1.id).sign([bob_sk])
|
||||
|
||||
txns = [deepcopy(tx.to_dict()) for tx in [tx1, tx2, tx3, tx4]]
|
||||
query.store_transactions(signed_transactions=txns, connection=conn)
|
||||
query.store_transactions(signed_transactions=txns, connection=db_conn)
|
||||
|
||||
links = [
|
||||
({'transaction_id': tx2.id, 'output_index': 0}, 1, [tx3.id]),
|
||||
@ -289,7 +274,7 @@ def test_get_spending_transactions_multiple_inputs(db_conn):
|
||||
({'transaction_id': tx3.id, 'output_index': 1}, 0, None),
|
||||
]
|
||||
for li, num, match in links:
|
||||
txns = list(query.get_spending_transactions(connection=conn, inputs=[li]))
|
||||
txns = list(query.get_spending_transactions(connection=db_conn, inputs=[li]))
|
||||
assert len(txns) == num
|
||||
if len(txns):
|
||||
assert [tx['id'] for tx in txns] == match
|
||||
@ -298,31 +283,26 @@ def test_get_spending_transactions_multiple_inputs(db_conn):
|
||||
def test_store_block(db_conn):
|
||||
from planetmint.lib import Block
|
||||
from planetmint.backend.tarantool import query
|
||||
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
block = Block(app_hash='random_utxo',
|
||||
height=3,
|
||||
transactions=[])
|
||||
query.store_block(connection=conn, block=block._asdict())
|
||||
# block = query.get_block(connection=conn)
|
||||
blocks = conn.get_space("blocks").select([])
|
||||
assert len(blocks.data) == 1
|
||||
query.store_block(connection=db_conn, block=block._asdict())
|
||||
# block = query.get_block(connection=db_conn)
|
||||
blocks = db_conn.run(db_conn.space("blocks").select([]))
|
||||
assert len(blocks) == 1
|
||||
|
||||
|
||||
def test_get_block(db_conn):
|
||||
from planetmint.lib import Block
|
||||
from planetmint.backend.tarantool import query
|
||||
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
block = Block(app_hash='random_utxo',
|
||||
height=3,
|
||||
transactions=[])
|
||||
|
||||
query.store_block(connection=conn, block=block._asdict())
|
||||
query.store_block(connection=db_conn, block=block._asdict())
|
||||
|
||||
block = dict(query.get_block(connection=conn, block_id=3))
|
||||
block = dict(query.get_block(connection=db_conn, block_id=3))
|
||||
assert block['height'] == 3
|
||||
|
||||
|
||||
@ -427,12 +407,10 @@ def test_get_block(db_conn):
|
||||
def test_store_pre_commit_state(db_conn):
|
||||
from planetmint.backend.tarantool import query
|
||||
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
state = dict(height=3, transactions=[])
|
||||
|
||||
query.store_pre_commit_state(connection=conn, state=state)
|
||||
commit = query.get_pre_commit_state(connection=conn)
|
||||
query.store_pre_commit_state(connection=db_conn, state=state)
|
||||
commit = query.get_pre_commit_state(connection=db_conn)
|
||||
assert len([commit]) == 1
|
||||
|
||||
# cursor = db_context.conn.db.pre_commit.find({'commit_id': 'test'},
|
||||
@ -442,41 +420,35 @@ def test_store_pre_commit_state(db_conn):
|
||||
def test_get_pre_commit_state(db_conn):
|
||||
from planetmint.backend.tarantool import query
|
||||
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
space = conn.space("pre_commits")
|
||||
all_pre = space.select([])
|
||||
for pre in all_pre.data:
|
||||
space.delete(pre[0])
|
||||
all_pre = db_conn.run(db_conn.space("pre_commits").select([]))
|
||||
for pre in all_pre:
|
||||
db_conn.run(db_conn.space("pre_commits").delete(pre[0]), only_data=False)
|
||||
# TODO First IN, First OUT
|
||||
state = dict(height=3, transactions=[])
|
||||
# db_context.conn.db.pre_commit.insert_one
|
||||
query.store_pre_commit_state(state=state, connection=conn)
|
||||
resp = query.get_pre_commit_state(connection=conn)
|
||||
query.store_pre_commit_state(state=state, connection=db_conn)
|
||||
resp = query.get_pre_commit_state(connection=db_conn)
|
||||
assert resp == state
|
||||
|
||||
|
||||
def test_validator_update(db_conn):
|
||||
|
||||
from planetmint.backend.tarantool import query
|
||||
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
def gen_validator_update(height):
|
||||
return {'validators': [], 'height': height, 'election_id': f'election_id_at_height_{height}'}
|
||||
# return {'data': 'somedata', 'height': height, 'election_id': f'election_id_at_height_{height}'}
|
||||
|
||||
for i in range(1, 100, 10):
|
||||
value = gen_validator_update(i)
|
||||
query.store_validator_set(conn=conn, validators_update=value)
|
||||
query.store_validator_set(conn=db_conn, validators_update=value)
|
||||
|
||||
v1 = query.get_validator_set(connection=conn, height=8)
|
||||
v1 = query.get_validator_set(connection=db_conn, height=8)
|
||||
assert v1['height'] == 1
|
||||
|
||||
v41 = query.get_validator_set(connection=conn, height=50)
|
||||
v41 = query.get_validator_set(connection=db_conn, height=50)
|
||||
assert v41['height'] == 41
|
||||
|
||||
v91 = query.get_validator_set(connection=conn)
|
||||
v91 = query.get_validator_set(connection=db_conn)
|
||||
assert v91['height'] == 91
|
||||
|
||||
|
||||
@ -521,10 +493,8 @@ def test_validator_update(db_conn):
|
||||
def test_store_abci_chain(description, stores, expected, db_conn):
|
||||
from planetmint.backend.tarantool import query
|
||||
|
||||
conn = db_conn.get_connection()
|
||||
|
||||
for store in stores:
|
||||
query.store_abci_chain(conn, **store)
|
||||
query.store_abci_chain(db_conn, **store)
|
||||
|
||||
actual = query.get_latest_abci_chain(conn)
|
||||
actual = query.get_latest_abci_chain(db_conn)
|
||||
assert expected == actual, description
|
||||
|
||||
@ -28,6 +28,7 @@ from planetmint.transactions.types.assets.transfer import Transfer
|
||||
|
||||
from tests.utils import generate_election, generate_validators
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config(request, monkeypatch):
|
||||
backend = request.config.getoption('--database-backend')
|
||||
@ -55,6 +56,7 @@ def config(request, monkeypatch):
|
||||
monkeypatch.setattr('planetmint.config', config)
|
||||
return config
|
||||
|
||||
|
||||
def test_bigchain_class_default_initialization(config):
|
||||
from planetmint import Planetmint
|
||||
from planetmint.validation import BaseValidationRules
|
||||
@ -63,6 +65,7 @@ def test_bigchain_class_default_initialization(config):
|
||||
assert planet.connection.port == config['database']['port']
|
||||
assert planet.validation == BaseValidationRules
|
||||
|
||||
|
||||
def test_bigchain_class_initialization_with_parameters():
|
||||
from planetmint import Planetmint
|
||||
from planetmint.backend import connect
|
||||
@ -88,8 +91,6 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
[carol.public_key],
|
||||
[([carol.public_key], 8)],
|
||||
).sign([carol.private_key])
|
||||
print(f" TX 1 : {tx_1} ")
|
||||
print(f" TX 1 ID : {tx_1.id} ")
|
||||
assert tx_1.validate(b)
|
||||
b.store_bulk_transactions([tx_1])
|
||||
|
||||
@ -100,8 +101,6 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
([carol.public_key], 4)],
|
||||
asset_id=tx_1.id,
|
||||
).sign([carol.private_key])
|
||||
print(f" TX 2 : {tx_2} ")
|
||||
print(f" TX 2 ID : {tx_2.id} ")
|
||||
assert tx_2.validate(b)
|
||||
b.store_bulk_transactions([tx_2])
|
||||
|
||||
@ -111,8 +110,6 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
([carol.public_key], 3)],
|
||||
asset_id=tx_1.id,
|
||||
).sign([carol.private_key])
|
||||
print(f"TX 3 : {tx_3} ")
|
||||
print(f"TX 3 ID : {tx_3.id} ")
|
||||
assert tx_3.validate(b)
|
||||
b.store_bulk_transactions([tx_3])
|
||||
|
||||
@ -136,513 +133,3 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
assert not b.get_spent(tx_5.id, 0)
|
||||
assert b.get_outputs_filtered(alice.public_key)
|
||||
assert b.get_outputs_filtered(alice.public_key, spent=False)
|
||||
|
||||
def encode_tx_to_bytes(transaction):
|
||||
return json.dumps(transaction.to_dict()).encode('utf8')
|
||||
|
||||
|
||||
def generate_address():
|
||||
return ''.join(random.choices('1,2,3,4,5,6,7,8,9,A,B,C,D,E,F'.split(','),
|
||||
k=40)).encode()
|
||||
|
||||
|
||||
def generate_validator():
|
||||
pk, _ = generate_key_pair()
|
||||
pub_key = keys_pb2.PublicKey(ed25519=pk.encode())
|
||||
val = types.ValidatorUpdate(power=10, pub_key=pub_key)
|
||||
return val
|
||||
|
||||
|
||||
def generate_init_chain_request(chain_id, vals=None):
|
||||
vals = vals if vals is not None else [generate_validator()]
|
||||
return types.RequestInitChain(validators=vals, chain_id=chain_id)
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_init_chain_successfully_registers_chain(b):
|
||||
request = generate_init_chain_request('chain-XYZ')
|
||||
res = App(b).init_chain(request)
|
||||
assert res == types.ResponseInitChain()
|
||||
print(b.connection)
|
||||
|
||||
chain = query.get_latest_abci_chain(b.connection)
|
||||
assert chain == {'height': 0, 'chain_id': 'chain-XYZ', 'is_synced': True}
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 0,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_init_chain_ignores_invalid_init_chain_requests(b):
|
||||
validators = [generate_validator()]
|
||||
request = generate_init_chain_request('chain-XYZ', validators)
|
||||
res = App(b).init_chain(request)
|
||||
assert res == types.ResponseInitChain()
|
||||
|
||||
validator_set = query.get_validator_set(b.connection)
|
||||
|
||||
invalid_requests = [
|
||||
request, # the same request again
|
||||
# different validator set
|
||||
generate_init_chain_request('chain-XYZ'),
|
||||
# different chain ID
|
||||
generate_init_chain_request('chain-ABC', validators),
|
||||
]
|
||||
for r in invalid_requests:
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).init_chain(r)
|
||||
# assert nothing changed - neither validator set, nor chain ID
|
||||
new_validator_set = query.get_validator_set(b.connection)
|
||||
assert new_validator_set == validator_set
|
||||
new_chain_id = query.get_latest_abci_chain(b.connection)['chain_id']
|
||||
assert new_chain_id == 'chain-XYZ'
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 0,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_init_chain_recognizes_new_chain_after_migration(b):
|
||||
validators = [generate_validator()]
|
||||
request = generate_init_chain_request('chain-XYZ', validators)
|
||||
res = App(b).init_chain(request)
|
||||
assert res == types.ResponseInitChain()
|
||||
|
||||
validator_set = query.get_validator_set(b.connection)['validators']
|
||||
|
||||
# simulate a migration
|
||||
query.store_block(b.connection, Block(app_hash='', height=1,
|
||||
transactions=[])._asdict())
|
||||
b.migrate_abci_chain()
|
||||
|
||||
# the same or other mismatching requests are ignored
|
||||
invalid_requests = [
|
||||
request,
|
||||
generate_init_chain_request('unknown', validators),
|
||||
generate_init_chain_request('chain-XYZ'),
|
||||
generate_init_chain_request('chain-XYZ-migrated-at-height-1'),
|
||||
]
|
||||
for r in invalid_requests:
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).init_chain(r)
|
||||
assert query.get_latest_abci_chain(b.connection) == {
|
||||
'chain_id': 'chain-XYZ-migrated-at-height-1',
|
||||
'is_synced': False,
|
||||
'height': 2,
|
||||
}
|
||||
new_validator_set = query.get_validator_set(b.connection)['validators']
|
||||
assert new_validator_set == validator_set
|
||||
|
||||
# a request with the matching chain ID and matching validator set
|
||||
# completes the migration
|
||||
request = generate_init_chain_request('chain-XYZ-migrated-at-height-1',
|
||||
validators)
|
||||
res = App(b).init_chain(request)
|
||||
assert res == types.ResponseInitChain()
|
||||
assert query.get_latest_abci_chain(b.connection) == {
|
||||
'chain_id': 'chain-XYZ-migrated-at-height-1',
|
||||
'is_synced': True,
|
||||
'height': 2,
|
||||
}
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 2,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
# requests with old chain ID and other requests are ignored
|
||||
invalid_requests = [
|
||||
request,
|
||||
generate_init_chain_request('chain-XYZ', validators),
|
||||
generate_init_chain_request('chain-XYZ-migrated-at-height-1'),
|
||||
]
|
||||
for r in invalid_requests:
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).init_chain(r)
|
||||
assert query.get_latest_abci_chain(b.connection) == {
|
||||
'chain_id': 'chain-XYZ-migrated-at-height-1',
|
||||
'is_synced': True,
|
||||
'height': 2,
|
||||
}
|
||||
new_validator_set = query.get_validator_set(b.connection)['validators']
|
||||
assert new_validator_set == validator_set
|
||||
assert query.get_latest_block(b.connection) == {
|
||||
'height': 2,
|
||||
'app_hash': '',
|
||||
'transactions': [],
|
||||
}
|
||||
|
||||
|
||||
def test_info(b):
|
||||
r = types.RequestInfo(version=__tm_supported_versions__[0])
|
||||
app = App(b)
|
||||
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 0
|
||||
assert res.last_block_app_hash == b''
|
||||
|
||||
b.store_block(Block(app_hash='1', height=1, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 1
|
||||
assert res.last_block_app_hash == b'1'
|
||||
|
||||
# simulate a migration and assert the height is shifted
|
||||
b.store_abci_chain(2, 'chain-XYZ')
|
||||
app = App(b)
|
||||
b.store_block(Block(app_hash='2', height=2, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 0
|
||||
assert res.last_block_app_hash == b'2'
|
||||
|
||||
b.store_block(Block(app_hash='3', height=3, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 1
|
||||
assert res.last_block_app_hash == b'3'
|
||||
|
||||
# it's always the latest migration that is taken into account
|
||||
b.store_abci_chain(4, 'chain-XYZ-new')
|
||||
app = App(b)
|
||||
b.store_block(Block(app_hash='4', height=4, transactions=[])._asdict())
|
||||
res = app.info(r)
|
||||
assert res.last_block_height == 0
|
||||
assert res.last_block_app_hash == b'4'
|
||||
|
||||
|
||||
def test_check_tx__signed_create_is_ok(b):
|
||||
from planetmint import App
|
||||
from planetmint.transactions.common.crypto import generate_key_pair
|
||||
|
||||
alice = generate_key_pair()
|
||||
bob = generate_key_pair()
|
||||
|
||||
tx = Create.generate([alice.public_key],
|
||||
[([bob.public_key], 1)])\
|
||||
.sign([alice.private_key])
|
||||
|
||||
app = App(b)
|
||||
result = app.check_tx(encode_tx_to_bytes(tx))
|
||||
assert result.code == OkCode
|
||||
|
||||
|
||||
def test_check_tx__unsigned_create_is_error(b):
|
||||
from planetmint import App
|
||||
from planetmint.transactions.common.crypto import generate_key_pair
|
||||
|
||||
alice = generate_key_pair()
|
||||
bob = generate_key_pair()
|
||||
|
||||
tx = Create.generate([alice.public_key],
|
||||
[([bob.public_key], 1)])
|
||||
|
||||
app = App(b)
|
||||
result = app.check_tx(encode_tx_to_bytes(tx))
|
||||
assert result.code == CodeTypeError
|
||||
|
||||
|
||||
def test_deliver_tx__valid_create_updates_db_and_emits_event(b, init_chain_request):
|
||||
import multiprocessing as mp
|
||||
from planetmint import App
|
||||
from planetmint.transactions.common.crypto import generate_key_pair
|
||||
|
||||
alice = generate_key_pair()
|
||||
bob = generate_key_pair()
|
||||
events = mp.Queue()
|
||||
|
||||
tx = Create.generate([alice.public_key],
|
||||
[([bob.public_key], 1)])\
|
||||
.sign([alice.private_key])
|
||||
|
||||
app = App(b, events)
|
||||
|
||||
app.init_chain(init_chain_request)
|
||||
|
||||
begin_block = types.RequestBeginBlock()
|
||||
app.begin_block(begin_block)
|
||||
|
||||
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
assert result.code == OkCode
|
||||
|
||||
app.end_block(types.RequestEndBlock(height=99))
|
||||
app.commit()
|
||||
assert b.get_transaction(tx.id).id == tx.id
|
||||
block_event = events.get()
|
||||
assert block_event.data['transactions'] == [tx]
|
||||
|
||||
# unspent_outputs = b.get_unspent_outputs()
|
||||
# unspent_output = next(unspent_outputs)
|
||||
# expected_unspent_output = next(tx.unspent_outputs)._asdict()
|
||||
# assert unspent_output == expected_unspent_output
|
||||
# with pytest.raises(StopIteration):
|
||||
# next(unspent_outputs)
|
||||
|
||||
|
||||
def test_deliver_tx__double_spend_fails(b, eventqueue_fixture, init_chain_request):
|
||||
from planetmint import App
|
||||
from planetmint.transactions.common.crypto import generate_key_pair
|
||||
|
||||
alice = generate_key_pair()
|
||||
bob = generate_key_pair()
|
||||
|
||||
tx = Create.generate([alice.public_key],
|
||||
[([bob.public_key], 1)])\
|
||||
.sign([alice.private_key])
|
||||
|
||||
app = App(b, eventqueue_fixture)
|
||||
app.init_chain(init_chain_request)
|
||||
|
||||
begin_block = types.RequestBeginBlock()
|
||||
app.begin_block(begin_block)
|
||||
|
||||
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
assert result.code == OkCode
|
||||
|
||||
app.end_block(types.RequestEndBlock(height=99))
|
||||
app.commit()
|
||||
|
||||
assert b.get_transaction(tx.id).id == tx.id
|
||||
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
assert result.code == CodeTypeError
|
||||
|
||||
|
||||
def test_deliver_transfer_tx__double_spend_fails(b, init_chain_request):
|
||||
from planetmint import App
|
||||
from planetmint.transactions.common.crypto import generate_key_pair
|
||||
|
||||
app = App(b)
|
||||
app.init_chain(init_chain_request)
|
||||
|
||||
begin_block = types.RequestBeginBlock()
|
||||
app.begin_block(begin_block)
|
||||
|
||||
alice = generate_key_pair()
|
||||
bob = generate_key_pair()
|
||||
carly = generate_key_pair()
|
||||
|
||||
asset = {
|
||||
'msg': 'live long and prosper'
|
||||
}
|
||||
|
||||
tx = Create.generate([alice.public_key],
|
||||
[([alice.public_key], 1)],
|
||||
asset=asset)\
|
||||
.sign([alice.private_key])
|
||||
|
||||
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
assert result.code == OkCode
|
||||
|
||||
tx_transfer = Transfer.generate(tx.to_inputs(),
|
||||
[([bob.public_key], 1)],
|
||||
asset_id=tx.id)\
|
||||
.sign([alice.private_key])
|
||||
|
||||
result = app.deliver_tx(encode_tx_to_bytes(tx_transfer))
|
||||
assert result.code == OkCode
|
||||
|
||||
double_spend = Transfer.generate(tx.to_inputs(),
|
||||
[([carly.public_key], 1)],
|
||||
asset_id=tx.id)\
|
||||
.sign([alice.private_key])
|
||||
|
||||
result = app.deliver_tx(encode_tx_to_bytes(double_spend))
|
||||
assert result.code == CodeTypeError
|
||||
|
||||
|
||||
def test_end_block_return_validator_updates(b, init_chain_request):
|
||||
app = App(b)
|
||||
app.init_chain(init_chain_request)
|
||||
|
||||
begin_block = types.RequestBeginBlock()
|
||||
app.begin_block(begin_block)
|
||||
|
||||
# generate a block containing a concluded validator election
|
||||
validators = generate_validators([1] * 4)
|
||||
b.store_validator_set(1, [v['storage'] for v in validators])
|
||||
|
||||
new_validator = generate_validators([1])[0]
|
||||
|
||||
public_key = validators[0]['public_key']
|
||||
private_key = validators[0]['private_key']
|
||||
voter_keys = [v['private_key'] for v in validators]
|
||||
|
||||
election, votes = generate_election(b,
|
||||
ValidatorElection,
|
||||
public_key, private_key,
|
||||
new_validator['election'],
|
||||
voter_keys)
|
||||
b.store_block(Block(height=1, transactions=[election.id],
|
||||
app_hash='')._asdict())
|
||||
b.store_bulk_transactions([election])
|
||||
Election.process_block(b, 1, [election])
|
||||
|
||||
app.block_transactions = votes
|
||||
|
||||
resp = app.end_block(types.RequestEndBlock(height=2))
|
||||
assert resp.validator_updates[0].power == new_validator['election']['power']
|
||||
expected = bytes.fromhex(new_validator['election']['public_key']['value'])
|
||||
assert expected == resp.validator_updates[0].pub_key.ed25519
|
||||
|
||||
|
||||
def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request):
|
||||
from planetmint import App
|
||||
from planetmint.backend import query
|
||||
|
||||
tx = Create.generate([alice.public_key],
|
||||
[([alice.public_key], 1)],
|
||||
asset={'msg': 'live long and prosper'})\
|
||||
.sign([alice.private_key])
|
||||
|
||||
app = App(b)
|
||||
app.init_chain(init_chain_request)
|
||||
|
||||
begin_block = types.RequestBeginBlock()
|
||||
app.begin_block(begin_block)
|
||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
app.end_block(types.RequestEndBlock(height=99))
|
||||
|
||||
resp = query.get_pre_commit_state(b.connection)
|
||||
assert resp['height'] == 99
|
||||
assert resp['transactions'] == [tx.id]
|
||||
|
||||
app.begin_block(begin_block)
|
||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
app.end_block(types.RequestEndBlock(height=100))
|
||||
resp = query.get_pre_commit_state(b.connection)
|
||||
assert resp['height'] == 100
|
||||
assert resp['transactions'] == [tx.id]
|
||||
|
||||
# simulate a chain migration and assert the height is shifted
|
||||
b.store_abci_chain(100, 'new-chain')
|
||||
app = App(b)
|
||||
app.begin_block(begin_block)
|
||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||
app.end_block(types.RequestEndBlock(height=1))
|
||||
resp = query.get_pre_commit_state(b.connection)
|
||||
assert resp['height'] == 101
|
||||
assert resp['transactions'] == [tx.id]
|
||||
|
||||
|
||||
def test_rollback_pre_commit_state_after_crash(b):
|
||||
validators = generate_validators([1] * 4)
|
||||
b.store_validator_set(1, [v['storage'] for v in validators])
|
||||
b.store_block(Block(height=1, transactions=[], app_hash='')._asdict())
|
||||
|
||||
public_key = validators[0]['public_key']
|
||||
private_key = validators[0]['private_key']
|
||||
voter_keys = [v['private_key'] for v in validators]
|
||||
|
||||
migration_election, votes = generate_election(b,
|
||||
ChainMigrationElection,
|
||||
public_key, private_key,
|
||||
{},
|
||||
voter_keys)
|
||||
|
||||
total_votes = votes
|
||||
txs = [migration_election, *votes]
|
||||
|
||||
new_validator = generate_validators([1])[0]
|
||||
validator_election, votes = generate_election(b,
|
||||
ValidatorElection,
|
||||
public_key, private_key,
|
||||
new_validator['election'],
|
||||
voter_keys)
|
||||
|
||||
total_votes += votes
|
||||
txs += [validator_election, *votes]
|
||||
|
||||
b.store_bulk_transactions(txs)
|
||||
b.store_abci_chain(2, 'new_chain')
|
||||
b.store_validator_set(2, [v['storage'] for v in validators])
|
||||
# TODO change to `4` when upgrading to Tendermint 0.22.4.
|
||||
b.store_validator_set(3, [new_validator['storage']])
|
||||
b.store_election(migration_election.id, 2, is_concluded=False)
|
||||
b.store_election(validator_election.id, 2, is_concluded=True)
|
||||
|
||||
# no pre-commit state
|
||||
rollback(b)
|
||||
|
||||
for tx in txs:
|
||||
assert b.get_transaction(tx.id)
|
||||
assert b.get_latest_abci_chain()
|
||||
assert len(b.get_validator_change()['validators']) == 1
|
||||
assert b.get_election(migration_election.id)
|
||||
assert b.get_election(validator_election.id)
|
||||
|
||||
b.store_pre_commit_state({'height': 2, 'transactions': [tx.id for tx in txs]})
|
||||
|
||||
rollback(b)
|
||||
|
||||
for tx in txs:
|
||||
assert not b.get_transaction(tx.id)
|
||||
assert not b.get_latest_abci_chain()
|
||||
assert len(b.get_validator_change()['validators']) == 4
|
||||
assert len(b.get_validator_change(2)['validators']) == 4
|
||||
assert not b.get_election(migration_election.id)
|
||||
assert not b.get_election(validator_election.id)
|
||||
|
||||
|
||||
def test_new_validator_set(b):
|
||||
node1 = {'public_key': {'type': 'ed25519-base64',
|
||||
'value': 'FxjS2/8AFYoIUqF6AcePTc87qOT7e4WGgH+sGCpTUDQ='},
|
||||
'voting_power': 10}
|
||||
node1_new_power = {'public_key': {'value': '1718D2DBFF00158A0852A17A01C78F4DCF3BA8E4FB7B8586807FAC182A535034',
|
||||
'type': 'ed25519-base16'},
|
||||
'power': 20}
|
||||
node2 = {'public_key': {'value': '1888A353B181715CA2554701D06C1665BC42C5D936C55EA9C5DBCBDB8B3F02A3',
|
||||
'type': 'ed25519-base16'},
|
||||
'power': 10}
|
||||
|
||||
validators = [node1]
|
||||
updates = [node1_new_power, node2]
|
||||
b.store_validator_set(1, validators)
|
||||
updated_validator_set = new_validator_set(b.get_validators(1), updates)
|
||||
|
||||
updated_validators = []
|
||||
for u in updates:
|
||||
updated_validators.append({'public_key': {'type': 'ed25519-base64',
|
||||
'value': public_key_to_base64(u['public_key']['value'])},
|
||||
'voting_power': u['power']})
|
||||
|
||||
assert updated_validator_set == updated_validators
|
||||
|
||||
|
||||
def test_info_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).info(types.RequestInfo())
|
||||
|
||||
|
||||
def test_check_tx_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).check_tx('some bytes')
|
||||
|
||||
|
||||
def test_begin_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).info(types.RequestBeginBlock())
|
||||
|
||||
|
||||
def test_deliver_tx_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).deliver_tx('some bytes')
|
||||
|
||||
|
||||
def test_end_block_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).info(types.RequestEndBlock())
|
||||
|
||||
|
||||
def test_commit_aborts_if_chain_is_not_synced(b):
|
||||
b.store_abci_chain(0, 'chain-XYZ', False)
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
App(b).commit()
|
||||
|
||||
@ -39,7 +39,7 @@ def flush_tarantool_db(connection, dbname):
|
||||
for _id in _all_data:
|
||||
if "assets" == s:
|
||||
connection.run(connection.space(s).delete(_id[1]), only_data=False)
|
||||
elif "abci_chains" == s:
|
||||
elif s in ["blocks", "abci_chains"]:
|
||||
connection.run(connection.space(s).delete(_id[2], only_data=False))
|
||||
else:
|
||||
connection.run(connection.space(s).delete(_id[0], only_data=False))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user