mirror of
https://github.com/planetmint/planetmint.git
synced 2025-03-30 15:08:31 +00:00
initial singleton usage
Signed-off-by: Jürgen Eckel <juergen@riddleandcode.com>
This commit is contained in:
parent
e69742808f
commit
5b601478fa
40
docker-compose-aio.yml
Normal file
40
docker-compose-aio.yml
Normal file
@ -0,0 +1,40 @@
|
||||
# Copyright © 2020 Interplanetary Database Association e.V.,
|
||||
# Planetmint and IPDB software contributors.
|
||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||
|
||||
version: '2.2'
|
||||
|
||||
services:
|
||||
planetmint-all-in-one:
|
||||
image: planetmint/planetmint-aio:latest
|
||||
expose:
|
||||
- "22"
|
||||
- "9984"
|
||||
- "9985"
|
||||
- "26656"
|
||||
- "26657"
|
||||
- "26658"
|
||||
command: ["/usr/src/app/scripts/pre-config-planetmint.sh", "/usr/src/app/scripts/all-in-one.bash"]
|
||||
volumes:
|
||||
- ./integration/scripts:/usr/src/app/scripts
|
||||
- shared:/shared
|
||||
scale: ${SCALE:-4}
|
||||
|
||||
test:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: integration/python/Dockerfile
|
||||
depends_on:
|
||||
- planetmint-all-in-one
|
||||
command: ["/scripts/pre-config-test.sh", "/scripts/wait-for-planetmint.sh", "/scripts/test.sh", "pytest", "/src"]
|
||||
environment:
|
||||
SCALE: ${SCALE:-4}
|
||||
volumes:
|
||||
- ./integration/python/src:/src
|
||||
- ./integration/scripts:/scripts
|
||||
- ./integration/cli:/tests
|
||||
- shared:/shared
|
||||
|
||||
volumes:
|
||||
shared:
|
@ -28,14 +28,13 @@ from planetmint.backend.models.output import Output
|
||||
from planetmint.model.dataaccessor import DataAccessor
|
||||
from planetmint.config import Config
|
||||
from planetmint.config_utils import load_validation_plugin
|
||||
from planetmint.utils.singleton import Singleton
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Validator:
|
||||
def __init__(self, async_io: bool = False):
|
||||
self.async_io = async_io
|
||||
self.models = DataAccessor(async_io=async_io)
|
||||
class Validator():
|
||||
def __init__(self):
|
||||
self.models = DataAccessor()
|
||||
self.validation = Validator._get_validation_method()
|
||||
|
||||
@staticmethod
|
||||
@ -260,7 +259,7 @@ class Validator:
|
||||
value as the `voting_power`
|
||||
"""
|
||||
validators = {}
|
||||
for validator in self.models.get_validators(height):
|
||||
for validator in self.models.get_validators(height=height):
|
||||
# NOTE: we assume that Tendermint encodes public key in base64
|
||||
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
|
||||
validators[public_key] = validator["voting_power"]
|
||||
|
@ -64,7 +64,6 @@ class DBConnection(metaclass=DBSingleton):
|
||||
backend: str = None,
|
||||
connection_timeout: int = None,
|
||||
max_tries: int = None,
|
||||
async_io: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""Create a new :class:`~.Connection` instance.
|
||||
|
@ -420,11 +420,12 @@ def store_validator_set(conn, validators_update: dict):
|
||||
conn.connect().select(TARANT_TABLE_VALIDATOR_SETS, validators_update["height"], index="height", limit=1).data
|
||||
)
|
||||
unique_id = uuid4().hex if _validator is None or len(_validator) == 0 else _validator[0][0]
|
||||
conn.connect().upsert(
|
||||
result = conn.connect().upsert(
|
||||
TARANT_TABLE_VALIDATOR_SETS,
|
||||
(unique_id, validators_update["height"], validators_update["validators"]),
|
||||
op_list=[("=", 1, validators_update["height"]), ("=", 2, validators_update["validators"])],
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@register_query(TarantoolDBConnection)
|
||||
|
@ -17,12 +17,20 @@ from planetmint.backend.models.output import Output
|
||||
from planetmint.backend.models.asset import Asset
|
||||
from planetmint.backend.models.metadata import MetaData
|
||||
from planetmint.backend.models.dbtransaction import DbTransaction
|
||||
from planetmint.utils.singleton import Singleton
|
||||
|
||||
|
||||
class DataAccessor:
|
||||
def __init__(self, database_connection=None, async_io: bool = False):
|
||||
class DataAccessor(metaclass=Singleton):
|
||||
def __init__(self, database_connection=None):
|
||||
config_utils.autoconfigure()
|
||||
self.connection = database_connection if database_connection is not None else Connection(async_io=async_io)
|
||||
self.connection = database_connection if database_connection is not None else Connection()
|
||||
|
||||
|
||||
def close_connection(self):
|
||||
self.connection.close()
|
||||
|
||||
def connect(self):
|
||||
self.connection.connect()
|
||||
|
||||
def store_bulk_transactions(self, transactions):
|
||||
txns = []
|
||||
@ -131,7 +139,7 @@ class DataAccessor:
|
||||
value as the `voting_power`
|
||||
"""
|
||||
validators = {}
|
||||
for validator in self.get_validators(height):
|
||||
for validator in self.get_validators(height = height):
|
||||
# NOTE: we assume that Tendermint encodes public key in base64
|
||||
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
|
||||
validators[public_key] = validator["voting_power"]
|
||||
|
@ -1,7 +1,7 @@
|
||||
[pytest]
|
||||
testpaths = tests/
|
||||
norecursedirs = .* *.egg *.egg-info env* devenv* docs
|
||||
addopts = -m "abci"
|
||||
addopts = -m "not abci"
|
||||
looponfailroots = planetmint tests
|
||||
asyncio_mode = strict
|
||||
markers =
|
||||
|
@ -157,14 +157,14 @@ def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk
|
||||
)
|
||||
tx_create_signed = tx_create.sign([alice.private_key])
|
||||
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
inputs = tx_create.to_inputs()
|
||||
# TRANSFER
|
||||
tx_transfer = Transfer.generate(
|
||||
tx_create.to_inputs(), [([alice.public_key], 50), ([alice.public_key], 50)], asset_ids=[tx_create.id]
|
||||
inputs, [([alice.public_key], 50), ([alice.public_key], 50)], asset_ids=[tx_create.id]
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 2
|
||||
assert tx_transfer_signed.outputs[0].amount == 50
|
||||
|
@ -25,6 +25,96 @@ from planetmint.backend.connection import Connection
|
||||
|
||||
from tests.utils import generate_election, generate_validators
|
||||
|
||||
def mock_get_validators(height):
|
||||
return [
|
||||
{
|
||||
"public_key": {"value": "zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=", "type": "ed25519-base64"},
|
||||
"voting_power": 10,
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@patch("planetmint.commands.utils.start")
|
||||
def test_main_entrypoint(mock_start):
|
||||
from planetmint.commands.planetmint import main
|
||||
from planetmint.model.dataaccessor import DataAccessor
|
||||
|
||||
da = DataAccessor
|
||||
del da
|
||||
main()
|
||||
|
||||
assert mock_start.called
|
||||
|
||||
#@pytest.mark.bdb
|
||||
def test_chain_migration_election_show_shows_inconclusive(b, test_abci_rpc ):
|
||||
|
||||
from tests.utils import flush_db
|
||||
flush_db(b.models.connection, "dbname")
|
||||
validators = generate_validators([1] * 4)
|
||||
output = b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||
|
||||
public_key = validators[0]["public_key"]
|
||||
private_key = validators[0]["private_key"]
|
||||
voter_keys = [v["private_key"] for v in validators]
|
||||
|
||||
election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys)
|
||||
|
||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||
|
||||
b.process_block(1, [election])
|
||||
b.models.store_bulk_transactions([election])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
|
||||
b.models.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
|
||||
b.models.store_validator_set(2, [v["storage"] for v in validators])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
|
||||
b.models.store_block(Block(height=2, transactions=[], app_hash="")._asdict())
|
||||
# TODO insert yet another block here when upgrading to Tendermint 0.22.4.
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=inconclusive"
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_chain_migration_election_show_shows_concluded(b):
|
||||
validators = generate_validators([1] * 4)
|
||||
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||
|
||||
public_key = validators[0]["public_key"]
|
||||
private_key = validators[0]["private_key"]
|
||||
voter_keys = [v["private_key"] for v in validators]
|
||||
|
||||
election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys)
|
||||
|
||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||
|
||||
b.models.store_bulk_transactions([election])
|
||||
b.process_block(1, [election])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
|
||||
b.models.store_abci_chain(1, "chain-X")
|
||||
b.models.store_block(Block(height=1, transactions=[v.id for v in votes], app_hash="last_app_hash")._asdict())
|
||||
b.process_block(2, votes)
|
||||
|
||||
assert (
|
||||
run_election_show(Namespace(election_id=election.id), b)
|
||||
== f'''status=concluded
|
||||
chain_id=chain-X-migrated-at-height-1
|
||||
app_hash=last_app_hash
|
||||
validators=[{''.join([f"""
|
||||
{{
|
||||
"pub_key": {{
|
||||
"type": "tendermint/PubKeyEd25519",
|
||||
"value": "{v['public_key']}"
|
||||
}},
|
||||
"power": {v['storage']['voting_power']}
|
||||
}}{',' if i + 1 != len(validators) else ''}""" for i, v in enumerate(validators)])}
|
||||
]'''
|
||||
)
|
||||
|
||||
|
||||
def test_make_sure_we_dont_remove_any_command():
|
||||
# thanks to: http://stackoverflow.com/a/18161115/597097
|
||||
@ -59,13 +149,23 @@ def test_make_sure_we_dont_remove_any_command():
|
||||
assert parser.parse_args(["tendermint-version"]).command
|
||||
|
||||
|
||||
@patch("planetmint.commands.utils.start")
|
||||
def test_main_entrypoint(mock_start):
|
||||
from planetmint.commands.planetmint import main
|
||||
|
||||
main()
|
||||
|
||||
assert mock_start.called
|
||||
@pytest.mark.bdb
|
||||
def test_election_approve_called_with_bad_key(monkeypatch, caplog, b, bad_validator_path, new_validator, node_key, test_abci_rpc):
|
||||
from argparse import Namespace
|
||||
|
||||
b, election_id = call_election(monkeypatch, b, new_validator, node_key, test_abci_rpc)
|
||||
|
||||
# call run_upsert_validator_approve with args that point to the election, but a bad signing key
|
||||
args = Namespace(action="approve", election_id=election_id, sk=bad_validator_path, config={})
|
||||
|
||||
with caplog.at_level(logging.ERROR):
|
||||
assert not run_election_approve(args, b, test_abci_rpc)
|
||||
assert (
|
||||
caplog.records[0].msg == "The key you provided does not match any of "
|
||||
"the eligible voters in this election."
|
||||
)
|
||||
|
||||
|
||||
@patch("planetmint.config_utils.setup_logging")
|
||||
@ -444,11 +544,11 @@ def test_election_approve_with_tendermint(b, priv_validator_path, user_sk, valid
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_election_approve_without_tendermint(caplog, b, priv_validator_path, new_validator, node_key, test_abci_rpc):
|
||||
def test_election_approve_without_tendermint(monkeypatch, caplog, b, priv_validator_path, new_validator, node_key, test_abci_rpc):
|
||||
from planetmint.commands.planetmint import run_election_approve
|
||||
from argparse import Namespace
|
||||
|
||||
b, election_id = call_election(b, new_validator, node_key, test_abci_rpc)
|
||||
b, election_id = call_election(monkeypatch, b, new_validator, node_key, test_abci_rpc)
|
||||
|
||||
# call run_election_approve with args that point to the election
|
||||
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
|
||||
@ -460,11 +560,13 @@ def test_election_approve_without_tendermint(caplog, b, priv_validator_path, new
|
||||
assert b.models.get_transaction(approval_id)
|
||||
|
||||
|
||||
|
||||
from unittest import mock
|
||||
@pytest.mark.bdb
|
||||
def test_election_approve_failure(caplog, b, priv_validator_path, new_validator, node_key, test_abci_rpc):
|
||||
def test_election_approve_failure(monkeypatch, caplog, b, priv_validator_path, new_validator, node_key, test_abci_rpc):
|
||||
from argparse import Namespace
|
||||
|
||||
b, election_id = call_election(b, new_validator, node_key, test_abci_rpc)
|
||||
b, election_id = call_election(monkeypatch, b, new_validator, node_key, test_abci_rpc)
|
||||
|
||||
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||
b.models.store_bulk_transactions([transaction])
|
||||
@ -480,91 +582,6 @@ def test_election_approve_failure(caplog, b, priv_validator_path, new_validator,
|
||||
assert caplog.records[0].msg == "Failed to commit vote"
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_election_approve_called_with_bad_key(caplog, b, bad_validator_path, new_validator, node_key, test_abci_rpc):
|
||||
from argparse import Namespace
|
||||
|
||||
b, election_id = call_election(b, new_validator, node_key, test_abci_rpc)
|
||||
|
||||
# call run_upsert_validator_approve with args that point to the election, but a bad signing key
|
||||
args = Namespace(action="approve", election_id=election_id, sk=bad_validator_path, config={})
|
||||
|
||||
with caplog.at_level(logging.ERROR):
|
||||
assert not run_election_approve(args, b, test_abci_rpc)
|
||||
assert (
|
||||
caplog.records[0].msg == "The key you provided does not match any of "
|
||||
"the eligible voters in this election."
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_chain_migration_election_show_shows_inconclusive(b):
|
||||
validators = generate_validators([1] * 4)
|
||||
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||
|
||||
public_key = validators[0]["public_key"]
|
||||
private_key = validators[0]["private_key"]
|
||||
voter_keys = [v["private_key"] for v in validators]
|
||||
|
||||
election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys)
|
||||
|
||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||
|
||||
b.process_block(1, [election])
|
||||
b.models.store_bulk_transactions([election])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
|
||||
b.models.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
|
||||
b.models.store_validator_set(2, [v["storage"] for v in validators])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
|
||||
b.models.store_block(Block(height=2, transactions=[], app_hash="")._asdict())
|
||||
# TODO insert yet another block here when upgrading to Tendermint 0.22.4.
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=inconclusive"
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_chain_migration_election_show_shows_concluded(b):
|
||||
validators = generate_validators([1] * 4)
|
||||
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||
|
||||
public_key = validators[0]["public_key"]
|
||||
private_key = validators[0]["private_key"]
|
||||
voter_keys = [v["private_key"] for v in validators]
|
||||
|
||||
election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys)
|
||||
|
||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||
|
||||
b.models.store_bulk_transactions([election])
|
||||
b.process_block(1, [election])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
|
||||
b.models.store_abci_chain(1, "chain-X")
|
||||
b.models.store_block(Block(height=1, transactions=[v.id for v in votes], app_hash="last_app_hash")._asdict())
|
||||
b.process_block(2, votes)
|
||||
|
||||
assert (
|
||||
run_election_show(Namespace(election_id=election.id), b)
|
||||
== f'''status=concluded
|
||||
chain_id=chain-X-migrated-at-height-1
|
||||
app_hash=last_app_hash
|
||||
validators=[{''.join([f"""
|
||||
{{
|
||||
"pub_key": {{
|
||||
"type": "tendermint/PubKeyEd25519",
|
||||
"value": "{v['public_key']}"
|
||||
}},
|
||||
"power": {v['storage']['voting_power']}
|
||||
}}{',' if i + 1 != len(validators) else ''}""" for i, v in enumerate(validators)])}
|
||||
]'''
|
||||
)
|
||||
|
||||
|
||||
def test_bigchain_tendermint_version(capsys):
|
||||
from planetmint.commands.planetmint import run_tendermint_version
|
||||
|
||||
@ -578,32 +595,29 @@ def test_bigchain_tendermint_version(capsys):
|
||||
assert sorted(output_config["tendermint"]) == sorted(__tm_supported_versions__)
|
||||
|
||||
|
||||
def mock_get_validators(height):
|
||||
return [
|
||||
{
|
||||
"public_key": {"value": "zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=", "type": "ed25519-base64"},
|
||||
"voting_power": 10,
|
||||
}
|
||||
]
|
||||
|
||||
def call_election(monkeypatch, b, new_validator, node_key, abci_rpc):
|
||||
|
||||
def call_election(b, new_validator, node_key, abci_rpc):
|
||||
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||
def mock_write(self, modelist, endpoint, mode_commit, transaction, mode):
|
||||
b.models.store_bulk_transactions([transaction])
|
||||
return (202, "")
|
||||
with monkeypatch.context() as m:
|
||||
m.setattr("planetmint.model.dataaccessor.DataAccessor.get_validators", mock_get_validators)
|
||||
m.setattr("planetmint.abci.rpc.ABCI_RPC.write_transaction", mock_write)
|
||||
|
||||
# patch the validator set. We now have one validator with power 10
|
||||
b.models.get_validators = mock_get_validators
|
||||
abci_rpc.write_transaction = mock_write
|
||||
# patch the validator set. We now have one validator with power 10
|
||||
#b.models.get_validators = mock_get_validators
|
||||
#abci_rpc.write_transaction = mock_write
|
||||
|
||||
# our voters is a list of length 1, populated from our mocked validator
|
||||
voters = b.get_recipients_list()
|
||||
# and our voter is the public key from the voter list
|
||||
voter = node_key.public_key
|
||||
valid_election = ValidatorElection.generate([voter], voters, new_validator, None).sign([node_key.private_key])
|
||||
# our voters is a list of length 1, populated from our mocked validator
|
||||
voters = b.get_recipients_list()
|
||||
# and our voter is the public key from the voter list
|
||||
voter = node_key.public_key
|
||||
valid_election = ValidatorElection.generate([voter], voters, new_validator, None).sign([node_key.private_key])
|
||||
|
||||
# patch in an election with a vote issued to the user
|
||||
election_id = valid_election.id
|
||||
b.models.store_bulk_transactions([valid_election])
|
||||
# patch in an election with a vote issued to the user
|
||||
election_id = valid_election.id
|
||||
b.models.store_bulk_transactions([valid_election])
|
||||
|
||||
return b, election_id
|
||||
m.undo()
|
||||
return b, election_id
|
||||
|
@ -27,7 +27,10 @@ from transactions.common import crypto
|
||||
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
||||
from planetmint.abci.utils import key_from_base64
|
||||
from planetmint.backend import schema, query
|
||||
from transactions.common.crypto import key_pair_from_ed25519_key, public_key_from_ed25519_key
|
||||
from transactions.common.crypto import (
|
||||
key_pair_from_ed25519_key,
|
||||
public_key_from_ed25519_key,
|
||||
)
|
||||
from planetmint.abci.block import Block
|
||||
from planetmint.abci.rpc import MODE_LIST
|
||||
from tests.utils import gen_vote
|
||||
@ -107,7 +110,10 @@ def _configure_planetmint(request):
|
||||
# backend = request.config.getoption('--database-backend')
|
||||
backend = "tarantool_db"
|
||||
|
||||
config = {"database": Config().get_db_map(backend), "tendermint": Config()._private_real_config["tendermint"]}
|
||||
config = {
|
||||
"database": Config().get_db_map(backend),
|
||||
"tendermint": Config()._private_real_config["tendermint"],
|
||||
}
|
||||
config["database"]["name"] = test_db_name
|
||||
config = config_utils.env_config(config)
|
||||
config_utils.set_config(config)
|
||||
@ -133,6 +139,28 @@ def _setup_database(_configure_planetmint): # TODO Here is located setup databa
|
||||
print("Finished deleting `{}`".format(dbname))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def da_reset(_setup_database):
|
||||
from transactions.common.memoize import to_dict, from_dict
|
||||
from transactions.common.transaction import Transaction
|
||||
from .utils import flush_db
|
||||
from planetmint.model.dataaccessor import DataAccessor
|
||||
da = DataAccessor()
|
||||
del da
|
||||
da = DataAccessor()
|
||||
da.close_connection()
|
||||
da.connect()
|
||||
|
||||
yield
|
||||
dbname = Config().get()["database"]["name"]
|
||||
flush_db(da.connection, dbname)
|
||||
|
||||
|
||||
to_dict.cache_clear()
|
||||
from_dict.cache_clear()
|
||||
Transaction._input_valid.cache_clear()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def _bdb(_setup_database):
|
||||
from transactions.common.memoize import to_dict, from_dict
|
||||
@ -140,6 +168,7 @@ def _bdb(_setup_database):
|
||||
from .utils import flush_db
|
||||
from planetmint.config import Config
|
||||
|
||||
|
||||
conn = Connection()
|
||||
conn.close()
|
||||
conn.connect()
|
||||
@ -273,6 +302,9 @@ def test_abci_rpc():
|
||||
def b():
|
||||
from planetmint.application import Validator
|
||||
|
||||
old_validator_instance = Validator()
|
||||
del old_validator_instance.models
|
||||
del old_validator_instance
|
||||
validator = Validator()
|
||||
validator.models.connection.close()
|
||||
validator.models.connection.connect()
|
||||
@ -296,7 +328,12 @@ def mock_get_validators(network_validators):
|
||||
def validator_set(height):
|
||||
validators = []
|
||||
for public_key, power in network_validators.items():
|
||||
validators.append({"public_key": {"type": "ed25519-base64", "value": public_key}, "voting_power": power})
|
||||
validators.append(
|
||||
{
|
||||
"public_key": {"type": "ed25519-base64", "value": public_key},
|
||||
"voting_power": power,
|
||||
}
|
||||
)
|
||||
return validators
|
||||
|
||||
return validator_set
|
||||
@ -319,7 +356,10 @@ def signed_create_tx(alice, create_tx):
|
||||
@pytest.fixture
|
||||
def posted_create_tx(b, signed_create_tx, test_abci_rpc):
|
||||
res = test_abci_rpc.post_transaction(
|
||||
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, signed_create_tx, BROADCAST_TX_COMMIT
|
||||
MODE_LIST,
|
||||
test_abci_rpc.tendermint_rpc_endpoint,
|
||||
signed_create_tx,
|
||||
BROADCAST_TX_COMMIT,
|
||||
)
|
||||
assert res.status_code == 200
|
||||
return signed_create_tx
|
||||
@ -339,7 +379,9 @@ def double_spend_tx(signed_create_tx, carol_pubkey, user_sk):
|
||||
from transactions.types.assets.transfer import Transfer
|
||||
|
||||
inputs = signed_create_tx.to_inputs()
|
||||
tx = Transfer.generate(inputs, [([carol_pubkey], 1)], asset_ids=[signed_create_tx.id])
|
||||
tx = Transfer.generate(
|
||||
inputs, [([carol_pubkey], 1)], asset_ids=[signed_create_tx.id]
|
||||
)
|
||||
return tx.sign([user_sk])
|
||||
|
||||
|
||||
@ -356,7 +398,9 @@ def inputs(user_pk, b, alice):
|
||||
for height in range(1, 4):
|
||||
transactions = [
|
||||
Create.generate(
|
||||
[alice.public_key], [([user_pk], 1)], metadata=multihash(marshal({"data": f"{random.random()}"}))
|
||||
[alice.public_key],
|
||||
[([user_pk], 1)],
|
||||
metadata=multihash(marshal({"data": f"{random.random()}"})),
|
||||
).sign([alice.private_key])
|
||||
for _ in range(10)
|
||||
]
|
||||
@ -428,7 +472,13 @@ def _abci_http(request):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def abci_http(_setup_database, _configure_planetmint, abci_server, tendermint_host, tendermint_port):
|
||||
def abci_http(
|
||||
_setup_database,
|
||||
_configure_planetmint,
|
||||
abci_server,
|
||||
tendermint_host,
|
||||
tendermint_port,
|
||||
):
|
||||
import requests
|
||||
import time
|
||||
|
||||
@ -553,144 +603,15 @@ def utxoset(dummy_unspent_outputs, utxo_collection):
|
||||
|
||||
num_rows_before_operation = utxo_collection.select().rowcount
|
||||
for utxo in dummy_unspent_outputs:
|
||||
res = utxo_collection.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
||||
res = utxo_collection.insert(
|
||||
(uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo)
|
||||
)
|
||||
assert res
|
||||
num_rows_after_operation = utxo_collection.select().rowcount
|
||||
assert num_rows_after_operation == num_rows_before_operation + 3
|
||||
return dummy_unspent_outputs, utxo_collection
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def network_validators(node_keys):
|
||||
validator_pub_power = {}
|
||||
voting_power = [8, 10, 7, 9]
|
||||
for pub, priv in node_keys.items():
|
||||
validator_pub_power[pub] = voting_power.pop()
|
||||
|
||||
return validator_pub_power
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def network_validators58(network_validators):
|
||||
network_validators_base58 = {}
|
||||
for p, v in network_validators.items():
|
||||
p = public_key_from_ed25519_key(key_from_base64(p))
|
||||
network_validators_base58[p] = v
|
||||
|
||||
return network_validators_base58
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def node_key(node_keys):
|
||||
(pub, priv) = list(node_keys.items())[0]
|
||||
return key_pair_from_ed25519_key(key_from_base64(priv))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ed25519_node_keys(node_keys):
|
||||
(pub, priv) = list(node_keys.items())[0]
|
||||
node_keys_dict = {}
|
||||
for pub, priv in node_keys.items():
|
||||
key = key_pair_from_ed25519_key(key_from_base64(priv))
|
||||
node_keys_dict[key.public_key] = key
|
||||
|
||||
return node_keys_dict
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def node_keys():
|
||||
return {
|
||||
"zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=": "cM5oW4J0zmUSZ/+QRoRlincvgCwR0pEjFoY//ZnnjD3Mv8Nqy8q6VdnOFI0XDHhwtFcqRIz0Y8rtjSdngUTKUw==",
|
||||
"GIijU7GBcVyiVUcB0GwWZbxCxdk2xV6pxdvL24s/AqM=": "mdz7IjP6mGXs6+ebgGJkn7kTXByUeeGhV+9aVthLuEAYiKNTsYFxXKJVRwHQbBZlvELF2TbFXqnF28vbiz8Cow==",
|
||||
"JbfwrLvCVIwOPm8tj8936ki7IYbmGHjPiKb6nAZegRA=": "83VINXdj2ynOHuhvSZz5tGuOE5oYzIi0mEximkX1KYMlt/Csu8JUjA4+by2Pz3fqSLshhuYYeM+IpvqcBl6BEA==",
|
||||
"PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=": "uz8bYgoL4rHErWT1gjjrnA+W7bgD/uDQWSRKDmC8otc95wnnxJo1GxYlmh0OaqOkJaobpu13BcUcvITjRFiVgw==",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def priv_validator_path(node_keys):
|
||||
(public_key, private_key) = list(node_keys.items())[0]
|
||||
priv_validator = {
|
||||
"address": "84F787D95E196DC5DE5F972666CFECCA36801426",
|
||||
"pub_key": {"type": "AC26791624DE60", "value": public_key},
|
||||
"last_height": 0,
|
||||
"last_round": 0,
|
||||
"last_step": 0,
|
||||
"priv_key": {"type": "954568A3288910", "value": private_key},
|
||||
}
|
||||
fd, path = tempfile.mkstemp()
|
||||
socket = os.fdopen(fd, "w")
|
||||
json.dump(priv_validator, socket)
|
||||
socket.close()
|
||||
return path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def bad_validator_path(node_keys):
|
||||
(public_key, private_key) = list(node_keys.items())[1]
|
||||
priv_validator = {
|
||||
"address": "84F787D95E196DC5DE5F972666CFECCA36801426",
|
||||
"pub_key": {"type": "AC26791624DE60", "value": public_key},
|
||||
"last_height": 0,
|
||||
"last_round": 0,
|
||||
"last_step": 0,
|
||||
"priv_key": {"type": "954568A3288910", "value": private_key},
|
||||
}
|
||||
fd, path = tempfile.mkstemp()
|
||||
socket = os.fdopen(fd, "w")
|
||||
json.dump(priv_validator, socket)
|
||||
socket.close()
|
||||
return path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def validators(b, node_keys):
|
||||
from planetmint.backend import query
|
||||
import time
|
||||
|
||||
def timestamp(): # we need this to force unique election_ids for setup and teardown of fixtures
|
||||
return str(time.time())
|
||||
|
||||
height = get_block_height(b)
|
||||
|
||||
original_validators = b.models.get_validators()
|
||||
|
||||
(public_key, private_key) = list(node_keys.items())[0]
|
||||
|
||||
validator_set = [
|
||||
{
|
||||
"address": "F5426F0980E36E03044F74DD414248D29ABCBDB2",
|
||||
"public_key": {"value": public_key, "type": "ed25519-base64"},
|
||||
"voting_power": 10,
|
||||
}
|
||||
]
|
||||
|
||||
validator_update = {"validators": validator_set, "height": height + 1, "election_id": f"setup_at_{timestamp()}"}
|
||||
|
||||
query.store_validator_set(b.models.connection, validator_update)
|
||||
|
||||
yield
|
||||
|
||||
height = get_block_height(b)
|
||||
|
||||
validator_update = {
|
||||
"validators": original_validators,
|
||||
"height": height,
|
||||
"election_id": f"teardown_at_{timestamp()}",
|
||||
}
|
||||
|
||||
query.store_validator_set(b.models.connection, validator_update)
|
||||
|
||||
|
||||
def get_block_height(b):
|
||||
if b.models.get_latest_block():
|
||||
height = b.models.get_latest_block()["height"]
|
||||
else:
|
||||
height = 0
|
||||
|
||||
return height
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def new_validator():
|
||||
public_key = "1718D2DBFF00158A0852A17A01C78F4DCF3BA8E4FB7B8586807FAC182A535034"
|
||||
@ -698,20 +619,30 @@ def new_validator():
|
||||
node_id = "fake_node_id"
|
||||
|
||||
return [
|
||||
{"data": {"public_key": {"value": public_key, "type": "ed25519-base16"}, "power": power, "node_id": node_id}}
|
||||
{
|
||||
"data": {
|
||||
"public_key": {"value": public_key, "type": "ed25519-base16"},
|
||||
"power": power,
|
||||
"node_id": node_id,
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_upsert_validator_election(b_mock, node_key, new_validator):
|
||||
voters = b_mock.get_recipients_list()
|
||||
return ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign([node_key.private_key])
|
||||
return ValidatorElection.generate(
|
||||
[node_key.public_key], voters, new_validator, None
|
||||
).sign([node_key.private_key])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_upsert_validator_election_2(b_mock, node_key, new_validator):
|
||||
voters = b_mock.get_recipients_list()
|
||||
return ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign([node_key.private_key])
|
||||
return ValidatorElection.generate(
|
||||
[node_key.public_key], voters, new_validator, None
|
||||
).sign([node_key.private_key])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -720,20 +651,28 @@ def ongoing_validator_election(b, valid_upsert_validator_election, ed25519_node_
|
||||
genesis_validators = {"validators": validators, "height": 0}
|
||||
query.store_validator_set(b.models.connection, genesis_validators)
|
||||
b.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||
query.store_election(b.models.connection, valid_upsert_validator_election.id, 1, is_concluded=False)
|
||||
block_1 = Block(app_hash="hash_1", height=1, transactions=[valid_upsert_validator_election.id])
|
||||
query.store_election(
|
||||
b.models.connection, valid_upsert_validator_election.id, 1, is_concluded=False
|
||||
)
|
||||
block_1 = Block(
|
||||
app_hash="hash_1", height=1, transactions=[valid_upsert_validator_election.id]
|
||||
)
|
||||
b.models.store_block(block_1._asdict())
|
||||
return valid_upsert_validator_election
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ongoing_validator_election_2(b, valid_upsert_validator_election_2, ed25519_node_keys):
|
||||
def ongoing_validator_election_2(
|
||||
b, valid_upsert_validator_election_2, ed25519_node_keys
|
||||
):
|
||||
validators = b.models.get_validators(height=1)
|
||||
genesis_validators = {"validators": validators, "height": 0, "election_id": None}
|
||||
query.store_validator_set(b.models.connection, genesis_validators)
|
||||
|
||||
b.models.store_bulk_transactions([valid_upsert_validator_election_2])
|
||||
block_1 = Block(app_hash="hash_2", height=1, transactions=[valid_upsert_validator_election_2.id])
|
||||
block_1 = Block(
|
||||
app_hash="hash_2", height=1, transactions=[valid_upsert_validator_election_2.id]
|
||||
)
|
||||
b.models.store_block(block_1._asdict())
|
||||
return valid_upsert_validator_election_2
|
||||
|
||||
@ -861,7 +800,9 @@ def signed_2_0_transfer_tx():
|
||||
],
|
||||
"operation": "TRANSFER",
|
||||
"metadata": "QmTjWHzypFxE8uuXJXMJQJxgAEKjoWmQimGiutmPyJ6CAB",
|
||||
"asset": {"id": "334014a29d99a488789c711b7dc5fceb534d1a9290b14d0270dbe6b60e2f036e"},
|
||||
"asset": {
|
||||
"id": "334014a29d99a488789c711b7dc5fceb534d1a9290b14d0270dbe6b60e2f036e"
|
||||
},
|
||||
"version": "2.0",
|
||||
"id": "e577641b0e2eb619e282f802516ce043e9d4af51dd4b6c959e18246e85cae2a6",
|
||||
}
|
||||
|
@ -5,10 +5,13 @@ from planetmint.abci.block import Block
|
||||
from transactions.types.elections.election import Election
|
||||
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
||||
from transactions.types.elections.validator_election import ValidatorElection
|
||||
|
||||
from planetmint.model.dataaccessor import DataAccessor
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_process_block_concludes_all_elections(b):
|
||||
del b.models
|
||||
b.models = DataAccessor()
|
||||
b.models.connect()
|
||||
validators = generate_validators([1] * 4)
|
||||
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||
|
||||
|
@ -7,6 +7,15 @@ import pytest
|
||||
|
||||
BLOCKS_ENDPOINT = "/api/v1/blocks/"
|
||||
|
||||
@pytest.mark.bdb
|
||||
@pytest.mark.usefixtures("inputs")
|
||||
def test_get_latest_block(client):
|
||||
res = client.get(BLOCKS_ENDPOINT + "latest")
|
||||
assert res.status_code == 200
|
||||
assert len(res.json["transaction_ids"]) == 10
|
||||
assert res.json["app_hash"] == "hash3"
|
||||
assert res.json["height"] == 3
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
@pytest.mark.usefixtures("inputs")
|
||||
@ -55,14 +64,6 @@ def test_get_blocks_by_txid_endpoint_returns_400_bad_query_params(client):
|
||||
assert res.json == {"message": "Unknown arguments: status"}
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
@pytest.mark.usefixtures("inputs")
|
||||
def test_get_latest_block(client):
|
||||
res = client.get(BLOCKS_ENDPOINT + "latest")
|
||||
assert res.status_code == 200
|
||||
assert len(res.json["transaction_ids"]) == 10
|
||||
assert res.json["app_hash"] == "hash3"
|
||||
assert res.json["height"] == 3
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
|
Loading…
x
Reference in New Issue
Block a user