mirror of
https://github.com/planetmint/planetmint.git
synced 2025-03-30 15:08:31 +00:00
blackified
Signed-off-by: Jürgen Eckel <juergen@riddleandcode.com>
This commit is contained in:
parent
7f3977b954
commit
6d166747d1
@ -51,7 +51,7 @@ def test_load_validation_plugin_raises_with_invalid_subclass(monkeypatch):
|
||||
|
||||
monkeypatch.setattr(
|
||||
config_utils, "iter_entry_points", lambda *args: [type("entry_point", (object,), {"load": lambda: object})]
|
||||
)my_config
|
||||
)
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
# Since the function is decorated with `lru_cache`, we need to
|
||||
@ -317,7 +317,7 @@ def test_write_config():
|
||||
def test_database_envs(env_name, env_value, config_key, monkeypatch):
|
||||
monkeypatch.setattr("os.environ", {env_name: env_value})
|
||||
planetmint.config_utils.autoconfigure()
|
||||
Config
|
||||
|
||||
expected_config = Config().get()
|
||||
expected_config["database"][config_key] = env_value
|
||||
|
||||
|
@ -2,4 +2,3 @@
|
||||
# Planetmint and IPDB software contributors.
|
||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||
|
||||
|
@ -169,9 +169,7 @@ class App(BaseApplication):
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
logger.debug("deliver_tx: %s", raw_transaction)
|
||||
transaction = self.validator.is_valid_transaction(
|
||||
decode_transaction(raw_transaction), self.block_transactions
|
||||
)
|
||||
transaction = self.validator.is_valid_transaction(decode_transaction(raw_transaction), self.block_transactions)
|
||||
|
||||
if not transaction:
|
||||
logger.debug("deliver_tx: INVALID")
|
||||
@ -225,7 +223,7 @@ class App(BaseApplication):
|
||||
|
||||
# register a new block only when new transactions are received
|
||||
if self.block_txn_ids:
|
||||
self.models.store_bulk_transactions( self.block_transactions)
|
||||
self.models.store_bulk_transactions(self.block_transactions)
|
||||
|
||||
block = Block(app_hash=self.block_txn_hash, height=self.new_height, transactions=self.block_txn_ids)
|
||||
# NOTE: storing the block should be the last operation during commit
|
||||
@ -247,4 +245,3 @@ class App(BaseApplication):
|
||||
self.events_queue.put(event)
|
||||
|
||||
return ResponseCommit(data=data)
|
||||
|
||||
|
@ -1,2 +1,2 @@
|
||||
from .validator import Validator
|
||||
from .basevalidationrules import BaseValidationRules
|
||||
from .basevalidationrules import BaseValidationRules
|
||||
|
@ -43,7 +43,6 @@ class Validator:
|
||||
self.models = Models()
|
||||
self.validation = Validator._get_validationmethod()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _get_validationmethod():
|
||||
validationPlugin = Config().get().get("validation_plugin")
|
||||
@ -323,7 +322,7 @@ class Validator:
|
||||
|
||||
def count_votes(self, election_pk, transactions):
|
||||
votes = 0
|
||||
for txn in transactions :
|
||||
for txn in transactions:
|
||||
if txn.operation == Vote.OPERATION:
|
||||
for output in txn.outputs:
|
||||
# NOTE: We enforce that a valid vote to election id will have only
|
||||
@ -552,4 +551,4 @@ class Validator:
|
||||
# NOTE: the pre-commit state is always at most 1 block ahead of the commited state
|
||||
if latest_block["height"] < pre_commit["height"]:
|
||||
self.rollback_election(pre_commit["height"], pre_commit["transactions"])
|
||||
self.models.delete_transactions(pre_commit["transactions"])
|
||||
self.models.delete_transactions(pre_commit["transactions"])
|
||||
|
@ -15,8 +15,6 @@ import sys
|
||||
import planetmint
|
||||
|
||||
|
||||
|
||||
|
||||
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
||||
from transactions.common.exceptions import DatabaseDoesNotExist, ValidationError
|
||||
from transactions.types.elections.vote import Vote
|
||||
@ -230,7 +228,7 @@ def run_election_approve(args, validator: Validator):
|
||||
return False
|
||||
|
||||
|
||||
def run_election_show(args, validator:Validator):
|
||||
def run_election_show(args, validator: Validator):
|
||||
"""Retrieves information about an election
|
||||
|
||||
:param args: dict
|
||||
@ -285,14 +283,14 @@ def run_drop(args):
|
||||
def run_start(args):
|
||||
"""Start the processes to run the node"""
|
||||
logger.info("Planetmint Version %s", planetmint.version.__version__)
|
||||
|
||||
|
||||
# Configure Logging
|
||||
setup_logging()
|
||||
|
||||
if not args.skip_initialize_database:
|
||||
logger.info("Initializing database")
|
||||
_run_init()
|
||||
|
||||
|
||||
validator = Validator()
|
||||
validator.rollback()
|
||||
|
||||
|
@ -21,7 +21,7 @@ from planetmint.backend.models.dbtransaction import DbTransaction
|
||||
|
||||
|
||||
class Models:
|
||||
def __init__(self, database_connection = None):
|
||||
def __init__(self, database_connection=None):
|
||||
config_utils.autoconfigure()
|
||||
self.connection = database_connection if database_connection is not None else Connection()
|
||||
|
||||
@ -82,8 +82,6 @@ class Models:
|
||||
elif spent is False:
|
||||
return self.fastquery.filter_spent_outputs(outputs)
|
||||
|
||||
|
||||
|
||||
def store_block(self, block):
|
||||
"""Create a new block."""
|
||||
|
||||
@ -230,7 +228,6 @@ class Models:
|
||||
# flatten and return all found assets
|
||||
return list(chain.from_iterable([Asset.list_to_dict(tx.assets) for tx in asset_txs]))
|
||||
|
||||
|
||||
def get_metadata(self, txn_ids) -> list[MetaData]:
|
||||
"""Return a list of metadata that match the transaction ids (txn_ids)
|
||||
|
||||
@ -277,11 +274,10 @@ class Models:
|
||||
def store_abci_chain(self, height, chain_id, is_synced=True):
|
||||
return backend.query.store_abci_chain(self.connection, height, chain_id, is_synced)
|
||||
|
||||
def get_asset_tokens_for_public_key(self, transaction_id, election_pk ):
|
||||
def get_asset_tokens_for_public_key(self, transaction_id, election_pk):
|
||||
txns = backend.query.get_asset_tokens_for_public_key(self.connection, transaction_id, election_pk)
|
||||
return txns
|
||||
|
||||
@property
|
||||
def fastquery(self):
|
||||
return FastQuery(self.connection)
|
||||
|
||||
|
@ -1,2 +0,0 @@
|
||||
|
||||
class
|
@ -10,11 +10,12 @@ from transactions.types.assets.transfer import Transfer
|
||||
from transactions.types.assets.compose import Compose
|
||||
from transactions.types.assets.decompose import Decompose
|
||||
|
||||
|
||||
def test_asset_transfer(b, signed_create_tx, user_pk, user_sk, _bdb):
|
||||
tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], [signed_create_tx.id])
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [signed_create_tx])
|
||||
b.models.store_bulk_transactions([signed_create_tx])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert tx_transfer_signed.assets[0]["id"] == signed_create_tx.id
|
||||
@ -32,7 +33,7 @@ def test_validate_transfer_asset_id_mismatch(b, signed_create_tx, user_pk, user_
|
||||
tx_transfer.assets[0]["id"] = "a" * 64
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [signed_create_tx])
|
||||
b.models.store_bulk_transactions([signed_create_tx])
|
||||
|
||||
with pytest.raises(AssetIdMismatch):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
@ -72,7 +73,7 @@ def test_asset_id_mismatch(alice, user_pk):
|
||||
|
||||
def test_compose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
|
||||
validated = b.validate_transaction(signed_create_tx)
|
||||
b.models.store_bulk_transactions( [validated])
|
||||
b.models.store_bulk_transactions([validated])
|
||||
|
||||
inputs = signed_create_tx.to_inputs()
|
||||
assets = [signed_create_tx.id, "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK8n"]
|
||||
@ -83,7 +84,7 @@ def test_compose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx
|
||||
|
||||
def test_decompose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
|
||||
validated = b.validate_transaction(signed_create_tx)
|
||||
b.models.store_bulk_transactions( [validated])
|
||||
b.models.store_bulk_transactions([validated])
|
||||
|
||||
inputs = signed_create_tx.to_inputs()
|
||||
assets = [
|
||||
@ -101,7 +102,7 @@ def test_decompose_valid_transactions(b, user_pk, user_sk, alice, signed_create_
|
||||
|
||||
def test_create_decompose_output(b, user_pk, user_sk, signed_create_tx, _bdb):
|
||||
validated = b.validate_transaction(signed_create_tx)
|
||||
b.models.store_bulk_transactions( [validated])
|
||||
b.models.store_bulk_transactions([validated])
|
||||
|
||||
inputs = signed_create_tx.to_inputs()
|
||||
assets = [
|
||||
@ -115,7 +116,7 @@ def test_create_decompose_output(b, user_pk, user_sk, signed_create_tx, _bdb):
|
||||
)
|
||||
decompose_transaction.sign([user_sk])
|
||||
validated_decompose = b.validate_transaction(decompose_transaction)
|
||||
b.models.store_bulk_transactions( [validated_decompose])
|
||||
b.models.store_bulk_transactions([validated_decompose])
|
||||
|
||||
create_inputs = decompose_transaction.to_inputs([0])
|
||||
create_tx = Create.generate([user_pk], recipients=[([user_pk], 1)], assets=[assets[0]], inputs=create_inputs)
|
||||
@ -140,6 +141,6 @@ def test_v_2_0_validation_create_invalid(b, signed_2_0_create_tx_assets, _bdb):
|
||||
|
||||
def test_v_2_0_validation_transfer(b, signed_2_0_create_tx, signed_2_0_transfer_tx, _bdb):
|
||||
validated = b.validate_transaction(signed_2_0_create_tx)
|
||||
b.models.store_bulk_transactions( [validated])
|
||||
b.models.store_bulk_transactions([validated])
|
||||
assert validated.to_dict() == signed_2_0_create_tx
|
||||
assert b.validate_transaction(signed_2_0_transfer_tx).to_dict() == signed_2_0_transfer_tx
|
||||
|
@ -137,7 +137,7 @@ def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk,
|
||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 1
|
||||
@ -163,7 +163,7 @@ def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 2
|
||||
@ -190,7 +190,7 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 1
|
||||
@ -201,7 +201,7 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk
|
||||
assert len(condition["condition"]["details"]["subconditions"]) == 2
|
||||
|
||||
assert len(tx_transfer_signed.inputs) == 1
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -227,7 +227,7 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 2
|
||||
@ -240,7 +240,7 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
||||
|
||||
assert len(tx_transfer_signed.inputs) == 1
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -265,7 +265,7 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk
|
||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 1
|
||||
@ -276,7 +276,7 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk
|
||||
assert "subconditions" in ffill
|
||||
assert len(ffill["subconditions"]) == 2
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -299,14 +299,14 @@ def test_multiple_in_single_own_single_out_single_own_transfer(alice, b, user_pk
|
||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 1
|
||||
assert tx_transfer_signed.outputs[0].amount == 100
|
||||
assert len(tx_transfer_signed.inputs) == 2
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -331,7 +331,7 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_
|
||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 1
|
||||
@ -345,7 +345,7 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_
|
||||
assert len(ffill_fid0["subconditions"]) == 2
|
||||
assert len(ffill_fid1["subconditions"]) == 2
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -371,7 +371,7 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk
|
||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 1
|
||||
assert tx_transfer_signed.outputs[0].amount == 100
|
||||
@ -383,7 +383,7 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk
|
||||
assert "subconditions" in ffill_fid1
|
||||
assert len(ffill_fid1["subconditions"]) == 2
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -410,7 +410,7 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
||||
tx_create.to_inputs(), [([alice.public_key], 50), ([alice.public_key, user_pk], 50)], asset_ids=[tx_create.id]
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 2
|
||||
@ -430,7 +430,7 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
||||
assert "subconditions" in ffill_fid1
|
||||
assert len(ffill_fid1["subconditions"]) == 2
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -466,7 +466,7 @@ def test_multiple_in_different_transactions(alice, b, user_pk, user_sk):
|
||||
)
|
||||
tx_transfer2_signed = tx_transfer2.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed, tx_transfer1_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed, tx_transfer1_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer2_signed) == tx_transfer2_signed
|
||||
assert len(tx_transfer2_signed.outputs) == 1
|
||||
@ -491,7 +491,7 @@ def test_amount_error_transfer(alice, b, user_pk, user_sk):
|
||||
)
|
||||
tx_create_signed = tx_create.sign([alice.private_key])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
# TRANSFER
|
||||
# output amount less than input amount
|
||||
@ -529,12 +529,12 @@ def test_threshold_same_public_key(alice, b, user_pk, user_sk):
|
||||
# TRANSFER
|
||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk, user_sk])
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
# assert tx_transfer_signed.validate(b) == tx_transfer_signed
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
# tx_transfer_signed.validate(b)
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
@ -554,13 +554,13 @@ def test_sum_amount(alice, b, user_pk, user_sk):
|
||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 3)], asset_ids=[tx_create.id])
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 1
|
||||
assert tx_transfer_signed.outputs[0].amount == 3
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
||||
@ -581,13 +581,13 @@ def test_divide(alice, b, user_pk, user_sk):
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||
assert len(tx_transfer_signed.outputs) == 3
|
||||
for output in tx_transfer_signed.outputs:
|
||||
assert output.amount == 1
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(tx_transfer_signed)
|
||||
|
@ -282,7 +282,7 @@ def test_run_recover(b, alice, bob, test_models):
|
||||
).sign([bob.private_key])
|
||||
|
||||
# store the transactions
|
||||
b.models.store_bulk_transactions( [tx1, tx2])
|
||||
b.models.store_bulk_transactions([tx1, tx2])
|
||||
|
||||
# create a random block
|
||||
block8 = Block(app_hash="random_app_hash1", height=8, transactions=["txid_doesnt_matter"])._asdict()
|
||||
@ -337,7 +337,7 @@ def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_valida
|
||||
# from planetmint.abci.rpc import write_transaction
|
||||
|
||||
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||
b.models.store_bulk_transactions( [transaction])
|
||||
b.models.store_bulk_transactions([transaction])
|
||||
return (202, "")
|
||||
|
||||
b.models.get_validators = mock_get_validators
|
||||
@ -372,7 +372,7 @@ def test_election_new_chain_migration_with_tendermint(b, priv_validator_path, us
|
||||
@pytest.mark.skip(reason="mock_write overwrite doesn't work")
|
||||
def test_election_new_chain_migration_without_tendermint(caplog, b, priv_validator_path, user_sk):
|
||||
def mock_write(tx, mode):
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
return (202, "")
|
||||
|
||||
b.models.get_validators = mock_get_validators
|
||||
@ -527,7 +527,7 @@ def test_chain_migration_election_show_shows_inconclusive(b):
|
||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||
|
||||
b.process_block(1, [election])
|
||||
b.models.store_bulk_transactions( [election])
|
||||
b.models.store_bulk_transactions([election])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
|
||||
@ -555,7 +555,7 @@ def test_chain_migration_election_show_shows_concluded(b):
|
||||
|
||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||
|
||||
b.models.store_bulk_transactions( [election])
|
||||
b.models.store_bulk_transactions([election])
|
||||
b.process_block(1, [election])
|
||||
|
||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||
@ -605,7 +605,7 @@ def mock_get_validators(height):
|
||||
|
||||
def call_election(b, new_validator, node_key):
|
||||
def mock_write(tx, mode):
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
return (202, "")
|
||||
|
||||
# patch the validator set. We now have one validator with power 10
|
||||
@ -620,6 +620,6 @@ def call_election(b, new_validator, node_key):
|
||||
|
||||
# patch in an election with a vote issued to the user
|
||||
election_id = valid_election.id
|
||||
b.models.store_bulk_transactions( [valid_election])
|
||||
b.models.store_bulk_transactions([valid_election])
|
||||
|
||||
return b, election_id
|
||||
|
@ -255,19 +255,25 @@ def test_models():
|
||||
|
||||
return Models()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_validator():
|
||||
from planetmint.application import Validator
|
||||
|
||||
return Validator()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_abci_rpc():
|
||||
from planetmint.abci.rpc import ABCI_RPC
|
||||
|
||||
return ABCI_RPC()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def b():
|
||||
from planetmint.application import Validator
|
||||
|
||||
return Validator()
|
||||
|
||||
|
||||
@ -310,7 +316,9 @@ def signed_create_tx(alice, create_tx):
|
||||
|
||||
@pytest.fixture
|
||||
def posted_create_tx(b, signed_create_tx, test_abci_rpc):
|
||||
res = test_abci_rpc.post_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, signed_create_tx, BROADCAST_TX_COMMIT)
|
||||
res = test_abci_rpc.post_transaction(
|
||||
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, signed_create_tx, BROADCAST_TX_COMMIT
|
||||
)
|
||||
assert res.status_code == 200
|
||||
return signed_create_tx
|
||||
|
||||
@ -353,7 +361,7 @@ def inputs(user_pk, b, alice):
|
||||
tx_ids = [tx.id for tx in transactions]
|
||||
block = Block(app_hash="hash" + str(height), height=height, transactions=tx_ids)
|
||||
b.models.store_block(block._asdict())
|
||||
b.models.store_bulk_transactions( transactions)
|
||||
b.models.store_bulk_transactions(transactions)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -715,7 +723,7 @@ def ongoing_validator_election(b, valid_upsert_validator_election, ed25519_node_
|
||||
validators = b.models.get_validators(height=1)
|
||||
genesis_validators = {"validators": validators, "height": 0}
|
||||
query.store_validator_set(b.models.connection, genesis_validators)
|
||||
b.models.store_bulk_transactions( [valid_upsert_validator_election])
|
||||
b.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||
query.store_election(b.models.connection, valid_upsert_validator_election.id, 1, is_concluded=False)
|
||||
block_1 = Block(app_hash="hash_1", height=1, transactions=[valid_upsert_validator_election.id])
|
||||
b.models.store_block(block_1._asdict())
|
||||
@ -728,7 +736,7 @@ def ongoing_validator_election_2(b, valid_upsert_validator_election_2, ed25519_n
|
||||
genesis_validators = {"validators": validators, "height": 0, "election_id": None}
|
||||
query.store_validator_set(b.models.connection, genesis_validators)
|
||||
|
||||
b.models.store_bulk_transactions( [valid_upsert_validator_election_2])
|
||||
b.models.store_bulk_transactions([valid_upsert_validator_election_2])
|
||||
block_1 = Block(app_hash="hash_2", height=1, transactions=[valid_upsert_validator_election_2.id])
|
||||
b.models.store_block(block_1._asdict())
|
||||
return valid_upsert_validator_election_2
|
||||
|
@ -29,7 +29,7 @@ class TestBigchainApi(object):
|
||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
|
||||
tx = tx.sign([alice.private_key])
|
||||
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
transfer_tx = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], asset_ids=[tx.id])
|
||||
transfer_tx = transfer_tx.sign([alice.private_key])
|
||||
@ -39,13 +39,13 @@ class TestBigchainApi(object):
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(transfer_tx2, [transfer_tx])
|
||||
|
||||
b.models.store_bulk_transactions( [transfer_tx])
|
||||
b.models.store_bulk_transactions([transfer_tx])
|
||||
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(transfer_tx2)
|
||||
|
||||
with pytest.raises(CriticalDoubleSpend):
|
||||
b.models.store_bulk_transactions( [transfer_tx2])
|
||||
b.models.store_bulk_transactions([transfer_tx2])
|
||||
|
||||
def test_double_inclusion(self, b, alice):
|
||||
from tarantool.error import DatabaseError
|
||||
@ -56,13 +56,13 @@ class TestBigchainApi(object):
|
||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
|
||||
tx = tx.sign([alice.private_key])
|
||||
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
if isinstance(b.models.connection, TarantoolDBConnection):
|
||||
with pytest.raises(CriticalDoubleSpend):
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
else:
|
||||
with pytest.raises(OperationError):
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
@pytest.mark.usefixtures("inputs")
|
||||
def test_non_create_input_not_found(self, b, user_pk):
|
||||
@ -82,7 +82,7 @@ class TestBigchainApi(object):
|
||||
asset1 = {"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}
|
||||
|
||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=[asset1]).sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
tx_from_db = b.models.get_transaction(tx.id)
|
||||
|
||||
@ -124,7 +124,7 @@ class TestTransactionValidation(object):
|
||||
def test_non_create_double_spend(self, b, signed_create_tx, signed_transfer_tx, double_spend_tx):
|
||||
from transactions.common.exceptions import DoubleSpend
|
||||
|
||||
b.models.store_bulk_transactions( [signed_create_tx, signed_transfer_tx])
|
||||
b.models.store_bulk_transactions([signed_create_tx, signed_transfer_tx])
|
||||
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(double_spend_tx)
|
||||
@ -167,7 +167,7 @@ class TestMultipleInputs(object):
|
||||
|
||||
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
||||
tx = tx.sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
owned_input = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||
input_tx = b.models.get_transaction(owned_input.txid)
|
||||
@ -189,7 +189,7 @@ class TestMultipleInputs(object):
|
||||
|
||||
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
||||
tx = tx.sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
# get input
|
||||
tx_link = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||
@ -208,7 +208,7 @@ class TestMultipleInputs(object):
|
||||
|
||||
tx = Create.generate([alice.public_key], [([user_pk], 1)])
|
||||
tx = tx.sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||
@ -217,7 +217,7 @@ class TestMultipleInputs(object):
|
||||
|
||||
tx_transfer = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
|
||||
tx_transfer = tx_transfer.sign([user_sk])
|
||||
b.models.store_bulk_transactions( [tx_transfer])
|
||||
b.models.store_bulk_transactions([tx_transfer])
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||
@ -231,7 +231,7 @@ class TestMultipleInputs(object):
|
||||
# create divisible asset
|
||||
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1)])
|
||||
tx_create_signed = tx_create.sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
# get input
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
@ -246,7 +246,7 @@ class TestMultipleInputs(object):
|
||||
tx_create.to_inputs(), [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||
@ -260,7 +260,7 @@ class TestMultipleInputs(object):
|
||||
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
||||
tx = tx.sign([alice.private_key])
|
||||
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
@ -271,7 +271,7 @@ class TestMultipleInputs(object):
|
||||
|
||||
tx = Transfer.generate(tx.to_inputs(), [([user3_pk], 1)], asset_ids=[tx.id])
|
||||
tx = tx.sign([user_sk, user2_sk])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||
@ -285,7 +285,7 @@ class TestMultipleInputs(object):
|
||||
|
||||
tx = Create.generate([alice.public_key], [([user_pk], 1)])
|
||||
tx = tx.sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||
|
||||
@ -297,7 +297,7 @@ class TestMultipleInputs(object):
|
||||
# create a transaction and send it
|
||||
tx = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
|
||||
tx = tx.sign([user_sk])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
spent_inputs_user1 = b.models.get_spent(input_txid, 0)
|
||||
assert spent_inputs_user1 == tx.to_dict()
|
||||
@ -309,7 +309,7 @@ class TestMultipleInputs(object):
|
||||
# create a divisible asset with 3 outputs
|
||||
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1), ([user_pk], 1)])
|
||||
tx_create_signed = tx_create.sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
|
||||
@ -322,7 +322,7 @@ class TestMultipleInputs(object):
|
||||
tx_create.to_inputs()[:2], [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
|
||||
)
|
||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||
b.models.store_bulk_transactions( [tx_transfer_signed])
|
||||
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||
|
||||
# check that used inputs are marked as spent
|
||||
for ffill in tx_create.to_inputs()[:2]:
|
||||
@ -344,7 +344,7 @@ class TestMultipleInputs(object):
|
||||
tx = tx.sign([alice.private_key])
|
||||
transactions.append(tx)
|
||||
|
||||
b.models.store_bulk_transactions( transactions)
|
||||
b.models.store_bulk_transactions(transactions)
|
||||
|
||||
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||
# check spents
|
||||
@ -354,7 +354,7 @@ class TestMultipleInputs(object):
|
||||
# create a transaction
|
||||
tx = Transfer.generate(transactions[0].to_inputs(), [([user3_pk], 1)], asset_ids=[transactions[0].id])
|
||||
tx = tx.sign([user_sk, user2_sk])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
# check that used inputs are marked as spent
|
||||
assert b.models.get_spent(transactions[0].id, 0) == tx.to_dict()
|
||||
@ -365,6 +365,7 @@ class TestMultipleInputs(object):
|
||||
|
||||
def test_get_outputs_filtered_only_unspent(b):
|
||||
from transactions.common.transaction import TransactionLink
|
||||
|
||||
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
|
||||
with patch(go) as get_outputs:
|
||||
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
||||
@ -378,6 +379,7 @@ def test_get_outputs_filtered_only_unspent(b):
|
||||
|
||||
def test_get_outputs_filtered_only_spent(b):
|
||||
from transactions.common.transaction import TransactionLink
|
||||
|
||||
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
|
||||
with patch(go) as get_outputs:
|
||||
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
||||
@ -389,12 +391,15 @@ def test_get_outputs_filtered_only_spent(b):
|
||||
assert out == [TransactionLink("b", 2)]
|
||||
|
||||
|
||||
#@patch("planetmint.model.fastquery.FastQuery.filter_unspent_outputs")
|
||||
#@patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
|
||||
def test_get_outputs_filtered(b, mocker,):
|
||||
# @patch("planetmint.model.fastquery.FastQuery.filter_unspent_outputs")
|
||||
# @patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
|
||||
def test_get_outputs_filtered(
|
||||
b,
|
||||
mocker,
|
||||
):
|
||||
from transactions.common.transaction import TransactionLink
|
||||
|
||||
mock_filter_spent_outputs = mocker.patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
|
||||
|
||||
mock_filter_spent_outputs = mocker.patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
|
||||
mock_filter_unspent_outputs = mocker.patch("planetmint.model.fastquery.FastQuery.filter_unspent_outputs")
|
||||
|
||||
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
|
||||
@ -417,7 +422,7 @@ def test_cant_spend_same_input_twice_in_tx(b, alice):
|
||||
tx_create = Create.generate([alice.public_key], [([alice.public_key], 100)])
|
||||
tx_create_signed = tx_create.sign([alice.private_key])
|
||||
assert b.validate_transaction(tx_create_signed) == tx_create_signed
|
||||
b.models.store_bulk_transactions( [tx_create_signed])
|
||||
b.models.store_bulk_transactions([tx_create_signed])
|
||||
|
||||
# Create a transfer transaction with duplicated fulfillments
|
||||
dup_inputs = tx_create.to_inputs() + tx_create.to_inputs()
|
||||
@ -442,6 +447,6 @@ def test_transaction_unicode(b, alice):
|
||||
)
|
||||
|
||||
tx_1 = copy.deepcopy(tx)
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
assert beer_json["data"] in serialize(tx_1.to_dict())
|
||||
|
@ -32,7 +32,7 @@ def test_process_block_concludes_all_elections(b):
|
||||
b.models.store_abci_chain(1, "chain-X")
|
||||
b.process_block(1, txs)
|
||||
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||
b.models.store_bulk_transactions( txs)
|
||||
b.models.store_bulk_transactions(txs)
|
||||
|
||||
b.process_block(2, total_votes)
|
||||
|
||||
@ -79,7 +79,7 @@ def test_process_block_approves_only_one_validator_update(b):
|
||||
|
||||
b.process_block(1, txs)
|
||||
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||
b.models.store_bulk_transactions( txs)
|
||||
b.models.store_bulk_transactions(txs)
|
||||
|
||||
b.process_block(2, total_votes)
|
||||
|
||||
@ -125,7 +125,7 @@ def test_process_block_approves_after_pending_validator_update(b):
|
||||
b.models.store_abci_chain(1, "chain-X")
|
||||
b.process_block(1, txs)
|
||||
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||
b.models.store_bulk_transactions( txs)
|
||||
b.models.store_bulk_transactions(txs)
|
||||
|
||||
b.process_block(2, total_votes)
|
||||
|
||||
@ -138,7 +138,11 @@ def test_process_block_approves_after_pending_validator_update(b):
|
||||
assert not b.models.get_election(txs[1].id)["is_concluded"]
|
||||
assert b.models.get_election(txs[2].id)["is_concluded"]
|
||||
|
||||
assert b.models.get_latest_abci_chain() == {"height": 2, "chain_id": "chain-X-migrated-at-height-1", "is_synced": False}
|
||||
assert b.models.get_latest_abci_chain() == {
|
||||
"height": 2,
|
||||
"chain_id": "chain-X-migrated-at-height-1",
|
||||
"is_synced": False,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
@ -160,7 +164,7 @@ def test_process_block_does_not_approve_after_validator_update(b):
|
||||
|
||||
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||
b.process_block(1, txs)
|
||||
b.models.store_bulk_transactions( txs)
|
||||
b.models.store_bulk_transactions(txs)
|
||||
|
||||
second_election, second_votes = generate_election(
|
||||
b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys
|
||||
@ -168,7 +172,9 @@ def test_process_block_does_not_approve_after_validator_update(b):
|
||||
|
||||
b.process_block(2, total_votes + [second_election])
|
||||
|
||||
b.models.store_block(Block(height=2, transactions=[v.id for v in total_votes + [second_election]], app_hash="")._asdict())
|
||||
b.models.store_block(
|
||||
Block(height=2, transactions=[v.id for v in total_votes + [second_election]], app_hash="")._asdict()
|
||||
)
|
||||
|
||||
b.models.store_abci_chain(1, "chain-X")
|
||||
b.process_block(3, second_votes)
|
||||
@ -198,7 +204,7 @@ def test_process_block_applies_only_one_migration(b):
|
||||
b.models.store_abci_chain(1, "chain-X")
|
||||
b.process_block(1, txs)
|
||||
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||
b.models.store_bulk_transactions( txs)
|
||||
b.models.store_bulk_transactions(txs)
|
||||
|
||||
b.process_block(1, total_votes)
|
||||
chain = b.models.get_latest_abci_chain()
|
||||
|
@ -398,7 +398,7 @@ def test_rollback_pre_commit_state_after_crash(b, test_models):
|
||||
total_votes += votes
|
||||
txs += [validator_election, *votes]
|
||||
|
||||
b.models.store_bulk_transactions( txs)
|
||||
b.models.store_bulk_transactions(txs)
|
||||
b.models.store_abci_chain(2, "new_chain")
|
||||
b.models.store_validator_set(2, [v["storage"] for v in validators])
|
||||
# TODO change to `4` when upgrading to Tendermint 0.22.4.
|
||||
|
@ -23,8 +23,7 @@ from uuid import uuid4
|
||||
|
||||
from planetmint.abci.rpc import ABCI_RPC
|
||||
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
|
||||
from tests.utils import delete_unspent_outputs, get_utxoset_merkle_root, store_unspent_outputs, \
|
||||
update_utxoset
|
||||
from tests.utils import delete_unspent_outputs, get_utxoset_merkle_root, store_unspent_outputs, update_utxoset
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
@ -69,7 +68,7 @@ def test_asset_is_separated_from_transaciton(b):
|
||||
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
|
||||
tx_dict = copy.deepcopy(tx.to_dict())
|
||||
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
assert "asset" not in backend.query.get_transaction_single(b.models.connection, tx.id)
|
||||
assert backend.query.get_asset(b.models.connection, tx.id).data == assets[0]
|
||||
assert b.models.get_transaction(tx.id).to_dict() == tx_dict
|
||||
@ -112,7 +111,9 @@ def test_write_and_post_transaction(mock_post, b, test_abci_rpc):
|
||||
)
|
||||
|
||||
tx = b.validate_transaction(tx)
|
||||
test_abci_rpc.write_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_ASYNC)
|
||||
test_abci_rpc.write_transaction(
|
||||
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_ASYNC
|
||||
)
|
||||
|
||||
assert mock_post.called
|
||||
args, kwargs = mock_post.call_args
|
||||
@ -168,19 +169,19 @@ def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn):
|
||||
@pytest.mark.bdb
|
||||
def test_store_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
|
||||
mocked_store_transaction = mocker.patch("planetmint.backend.query.store_transactions")
|
||||
b.models.store_bulk_transactions( [signed_create_tx])
|
||||
b.models.store_bulk_transactions([signed_create_tx])
|
||||
mocked_store_transaction.assert_any_call(b.models.connection, [signed_create_tx.to_dict()], "transactions")
|
||||
mocked_store_transaction.reset_mock()
|
||||
b.models.store_bulk_transactions( [signed_transfer_tx])
|
||||
b.models.store_bulk_transactions([signed_transfer_tx])
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
def test_store_bulk_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
|
||||
mocked_store_transactions = mocker.patch("planetmint.backend.query.store_transactions")
|
||||
b.models.store_bulk_transactions( (signed_create_tx,))
|
||||
b.models.store_bulk_transactions((signed_create_tx,))
|
||||
mocked_store_transactions.assert_any_call(b.models.connection, [signed_create_tx.to_dict()], "transactions")
|
||||
mocked_store_transactions.reset_mock()
|
||||
b.models.store_bulk_transactions( (signed_transfer_tx,))
|
||||
b.models.store_bulk_transactions((signed_transfer_tx,))
|
||||
|
||||
|
||||
@pytest.mark.bdb
|
||||
@ -303,7 +304,7 @@ def test_get_spent_transaction_double_spend(b, alice, bob, carol):
|
||||
tx.to_inputs() + tx.to_inputs(), [([bob.public_key], 1)], asset_ids=[tx.id]
|
||||
).sign([alice.private_key])
|
||||
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.validate_transaction(same_input_double_spend)
|
||||
@ -313,7 +314,7 @@ def test_get_spent_transaction_double_spend(b, alice, bob, carol):
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer, double_spend])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_transfer])
|
||||
b.models.store_bulk_transactions([tx_transfer])
|
||||
|
||||
with pytest.raises(DoubleSpend):
|
||||
b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [double_spend])
|
||||
@ -380,13 +381,13 @@ def test_get_spent_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
|
||||
bob = generate_key_pair()
|
||||
|
||||
tx1 = Create.generate([user_pk], [([alice.public_key], 3), ([user_pk], 2)]).sign([user_sk])
|
||||
b.models.store_bulk_transactions( [tx1])
|
||||
b.models.store_bulk_transactions([tx1])
|
||||
|
||||
inputs = tx1.to_inputs()
|
||||
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], [tx1.id]).sign([user_sk])
|
||||
assert b.validate_transaction(tx2)
|
||||
|
||||
b.models.store_bulk_transactions( [tx2])
|
||||
b.models.store_bulk_transactions([tx2])
|
||||
|
||||
tx3 = Transfer.generate([inputs[1]], [([bob.public_key], 2)], [tx1.id]).sign([user_sk])
|
||||
|
||||
|
@ -54,7 +54,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
[([carol.public_key], 8)],
|
||||
).sign([carol.private_key])
|
||||
assert b.validate_transaction(tx_1)
|
||||
b.models.store_bulk_transactions( [tx_1])
|
||||
b.models.store_bulk_transactions([tx_1])
|
||||
|
||||
tx_2 = Transfer.generate(
|
||||
tx_1.to_inputs(),
|
||||
@ -62,7 +62,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
asset_ids=[tx_1.id],
|
||||
).sign([carol.private_key])
|
||||
assert b.validate_transaction(tx_2)
|
||||
b.models.store_bulk_transactions( [tx_2])
|
||||
b.models.store_bulk_transactions([tx_2])
|
||||
|
||||
tx_3 = Transfer.generate(
|
||||
tx_2.to_inputs()[2:3],
|
||||
@ -70,7 +70,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
asset_ids=[tx_1.id],
|
||||
).sign([carol.private_key])
|
||||
assert b.validate_transaction(tx_3)
|
||||
b.models.store_bulk_transactions( [tx_3])
|
||||
b.models.store_bulk_transactions([tx_3])
|
||||
|
||||
tx_4 = Transfer.generate(
|
||||
tx_2.to_inputs()[1:2] + tx_3.to_inputs()[0:1],
|
||||
@ -78,7 +78,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
asset_ids=[tx_1.id],
|
||||
).sign([alice.private_key])
|
||||
assert b.validate_transaction(tx_4)
|
||||
b.models.store_bulk_transactions( [tx_4])
|
||||
b.models.store_bulk_transactions([tx_4])
|
||||
|
||||
tx_5 = Transfer.generate(
|
||||
tx_2.to_inputs()[0:1],
|
||||
@ -87,7 +87,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
||||
).sign([bob.private_key])
|
||||
assert b.validate_transaction(tx_5)
|
||||
|
||||
b.models.store_bulk_transactions( [tx_5])
|
||||
b.models.store_bulk_transactions([tx_5])
|
||||
assert b.models.get_spent(tx_2.id, 0) == tx_5.to_dict()
|
||||
assert not b.models.get_spent(tx_5.id, 0)
|
||||
assert b.models.get_outputs_filtered(alice.public_key)
|
||||
|
@ -23,7 +23,7 @@ def txlist(b, user_pk, user2_pk, user_sk, user2_sk):
|
||||
# Create a TRANSFER transactions
|
||||
transfer1 = Transfer.generate(create1.to_inputs(), [([user_pk], 8)], [create1.id]).sign([user2_sk])
|
||||
|
||||
b.models.store_bulk_transactions( [create1, create2, transfer1])
|
||||
b.models.store_bulk_transactions([create1, create2, transfer1])
|
||||
|
||||
return type(
|
||||
"",
|
||||
|
@ -61,7 +61,7 @@ def test_upsert_validator_valid_non_election_vote(b_mock, valid_upsert_validator
|
||||
def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
||||
alice = generate_key_pair()
|
||||
|
||||
b_mock.models.store_bulk_transactions( [valid_upsert_validator_election])
|
||||
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||
|
||||
input0 = valid_upsert_validator_election.to_inputs()[0]
|
||||
votes = valid_upsert_validator_election.outputs[0].amount
|
||||
@ -114,7 +114,7 @@ def test_upsert_validator_invalid_election_vote(b_mock, valid_upsert_validator_e
|
||||
@pytest.mark.bdb
|
||||
def test_valid_election_votes_received(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
||||
alice = generate_key_pair()
|
||||
b_mock.models.store_bulk_transactions( [valid_upsert_validator_election])
|
||||
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 0
|
||||
|
||||
input0 = valid_upsert_validator_election.to_inputs()[0]
|
||||
@ -142,7 +142,7 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
|
||||
).sign([alice.private_key])
|
||||
|
||||
assert b_mock.validate_transaction(alice_casted_vote)
|
||||
b_mock.models.store_bulk_transactions( [alice_casted_vote])
|
||||
b_mock.models.store_bulk_transactions([alice_casted_vote])
|
||||
|
||||
# Check if the delegated vote is count as valid vote
|
||||
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 2
|
||||
@ -174,7 +174,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
|
||||
assert b_mock.validate_transaction(tx_vote0)
|
||||
assert not b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote0])
|
||||
|
||||
b_mock.models.store_bulk_transactions( [tx_vote0])
|
||||
b_mock.models.store_bulk_transactions([tx_vote0])
|
||||
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
|
||||
|
||||
# Node 1: cast vote
|
||||
@ -202,7 +202,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
|
||||
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2])
|
||||
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2, tx_vote3])
|
||||
|
||||
b_mock.models.store_bulk_transactions( [tx_vote2])
|
||||
b_mock.models.store_bulk_transactions([tx_vote2])
|
||||
|
||||
# Once the blockchain records >2/3 of the votes the election is assumed to be.has_concludedd
|
||||
# so any invocation of `.has_concluded` for that election should return False
|
||||
@ -291,7 +291,7 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
|
||||
voters = b.get_recipients_list()
|
||||
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
|
||||
# store election
|
||||
b.models.store_bulk_transactions( [election])
|
||||
b.models.store_bulk_transactions([election])
|
||||
|
||||
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
|
||||
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
|
||||
@ -323,13 +323,13 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
|
||||
voters = b.get_recipients_list()
|
||||
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
|
||||
# store election
|
||||
b.models.store_bulk_transactions( [election])
|
||||
b.models.store_bulk_transactions([election])
|
||||
|
||||
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
|
||||
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
|
||||
tx_vote2 = gen_vote(election, 2, ed25519_node_keys)
|
||||
|
||||
b.models.store_bulk_transactions( [tx_vote0, tx_vote1])
|
||||
b.models.store_bulk_transactions([tx_vote0, tx_vote1])
|
||||
|
||||
update = b.process_block(9, [tx_vote2])
|
||||
assert len(update) == 1
|
||||
|
@ -196,4 +196,4 @@ def update_utxoset(connection, transaction):
|
||||
spent_outputs = [spent_output for spent_output in transaction.spent_outputs]
|
||||
if spent_outputs:
|
||||
delete_unspent_outputs(connection, *spent_outputs)
|
||||
store_unspent_outputs(connection, *[utxo._asdict() for utxo in transaction.unspent_outputs])
|
||||
store_unspent_outputs(connection, *[utxo._asdict() for utxo in transaction.unspent_outputs])
|
||||
|
@ -18,7 +18,7 @@ def test_get_assets_tendermint(client, b, alice):
|
||||
assets = [{"data": multihash(marshal({"msg": "abc"}))}]
|
||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
|
||||
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
res = client.get(ASSETS_ENDPOINT + assets[0]["data"])
|
||||
assert res.status_code == 200
|
||||
@ -33,7 +33,7 @@ def test_get_assets_tendermint_limit(client, b, alice, bob):
|
||||
tx_1 = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
|
||||
tx_2 = Create.generate([bob.public_key], [([bob.public_key], 1)], assets=assets).sign([bob.private_key])
|
||||
|
||||
b.models.store_bulk_transactions( [tx_1, tx_2])
|
||||
b.models.store_bulk_transactions([tx_1, tx_2])
|
||||
|
||||
res = client.get(ASSETS_ENDPOINT + assets[0]["data"] + "?limit=1")
|
||||
assert res.status_code == 200
|
||||
|
@ -28,7 +28,7 @@ def test_get_block_endpoint(b, client, alice):
|
||||
# deepcopy, for more info see:
|
||||
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
|
||||
tx_dict = copy.deepcopy(tx.to_dict())
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
block = Block(app_hash="random_utxo", height=31, transactions=[tx.id])
|
||||
b.models.store_block(block._asdict())
|
||||
@ -55,7 +55,7 @@ def test_get_block_containing_transaction(b, client, alice):
|
||||
[alice.public_key], [([alice.public_key], 1)], assets=[{"data": multihash(marshal({"cycle": "hero"}))}]
|
||||
)
|
||||
tx = tx.sign([alice.private_key])
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
block = Block(app_hash="random_utxo", height=13, transactions=[tx.id])
|
||||
b.models.store_block(block._asdict())
|
||||
|
@ -27,7 +27,7 @@ def test_get_metadata_tendermint(client, b, alice):
|
||||
[alice.private_key]
|
||||
)
|
||||
|
||||
b.models.store_bulk_transactions( [tx])
|
||||
b.models.store_bulk_transactions([tx])
|
||||
|
||||
# test that metadata is returned
|
||||
res = client.get(METADATA_ENDPOINT + metadata)
|
||||
@ -44,13 +44,13 @@ def test_get_metadata_limit_tendermint(client, b, alice):
|
||||
tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets1).sign(
|
||||
[alice.private_key]
|
||||
)
|
||||
b.models.store_bulk_transactions( [tx1])
|
||||
b.models.store_bulk_transactions([tx1])
|
||||
|
||||
assets2 = [{"data": multihash(marshal({"msg": "abc 2"}))}]
|
||||
tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets2).sign(
|
||||
[alice.private_key]
|
||||
)
|
||||
b.models.store_bulk_transactions( [tx2])
|
||||
b.models.store_bulk_transactions([tx2])
|
||||
|
||||
# test that both assets are returned without limit
|
||||
res = client.get(METADATA_ENDPOINT + meta)
|
||||
|
@ -91,7 +91,7 @@ def test_get_divisble_transactions_returns_500_phase_one(b, client):
|
||||
TX_ENDPOINT = "/api/v1/transactions"
|
||||
|
||||
def mine(tx_list):
|
||||
b.models.store_bulk_transactions( tx_list)
|
||||
b.models.store_bulk_transactions(tx_list)
|
||||
|
||||
alice_priv, alice_pub = crypto.generate_key_pair()
|
||||
# bob_priv, bob_pub = crypto.generate_key_pair()
|
||||
@ -118,7 +118,7 @@ def test_get_divisble_transactions_returns_500(b, client):
|
||||
TX_ENDPOINT = "/api/v1/transactions"
|
||||
|
||||
def mine(tx_list):
|
||||
b.models.store_bulk_transactions( tx_list)
|
||||
b.models.store_bulk_transactions(tx_list)
|
||||
|
||||
alice_priv, alice_pub = crypto.generate_key_pair()
|
||||
bob_priv, bob_pub = crypto.generate_key_pair()
|
||||
|
@ -486,7 +486,7 @@ def test_post_transaction_compose_valid_wo_abci(b, _bdb):
|
||||
assets=[{"data": "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK97"}],
|
||||
).sign([alice.private_key])
|
||||
validated = b.validate_transaction(tx)
|
||||
b.models.store_bulk_transactions( [validated])
|
||||
b.models.store_bulk_transactions([validated])
|
||||
|
||||
tx_obj = tx
|
||||
tx = tx.to_dict()
|
||||
@ -499,7 +499,7 @@ def test_post_transaction_compose_valid_wo_abci(b, _bdb):
|
||||
compose_dict = signed_compose_tx.to_dict()
|
||||
compose_obj = Transaction.from_dict(compose_dict)
|
||||
validated_compose = b.validate_transaction(compose_obj)
|
||||
b.models.store_bulk_transactions( [validated_compose])
|
||||
b.models.store_bulk_transactions([validated_compose])
|
||||
|
||||
|
||||
@pytest.mark.abci
|
||||
|
Loading…
x
Reference in New Issue
Block a user