big refactor - all tests passing

Signed-off-by: Jürgen Eckel <juergen@riddleandcode.com>
This commit is contained in:
Jürgen Eckel 2023-02-27 11:56:11 +01:00
parent 0f146814ef
commit efc506cc7c
No known key found for this signature in database
52 changed files with 1138 additions and 1094 deletions

View File

@ -30,7 +30,7 @@ def test_bigchain_instance_is_initialized_when_conf_provided():
def test_load_validation_plugin_loads_default_rules_without_name():
from planetmint import config_utils
from planetmint.validation import BaseValidationRules
from planetmint.application.basevalidationrules import BaseValidationRules
assert config_utils.load_validation_plugin() == BaseValidationRules

View File

@ -19,7 +19,7 @@ The `Planetmint` class is defined here. Most node-level operations and database
`Block`, `Transaction`, and `Asset` classes are defined here. The classes mirror the block and transaction structure from the documentation, but also include methods for validation and signing.
### [`validation.py`](./validation.py)
### [`validation.py`](application/basevalidationrules.py)
Base class for validation methods (verification of votes, blocks, and transactions). The actual logic is mostly found in `transaction` and `block` models, defined in [`models.py`](./models.py).

View File

@ -7,7 +7,8 @@ from transactions.common.transaction import Transaction # noqa
from transactions.types.elections.validator_election import ValidatorElection # noqa
from transactions.types.elections.vote import Vote # noqa
from transactions.types.elections.chain_migration_election import ChainMigrationElection
from planetmint.lib import Planetmint
from planetmint.application.validation import Validator
from planetmint.abci.core import App

View File

@ -21,12 +21,13 @@ from tendermint.abci.types_pb2 import (
ResponseEndBlock,
ResponseCommit,
)
from planetmint import Planetmint
from planetmint.application.validation import Validator
from planetmint.model.models import Models
from planetmint.abci.tendermint_utils import decode_transaction, calculate_hash, decode_validator
from planetmint.abci.block import Block
from planetmint.ipc.events import EventTypes, Event
CodeTypeError = 1
logger = logging.getLogger(__name__)
@ -38,18 +39,19 @@ class App(BaseApplication):
transaction logic to Tendermint Core.
"""
def __init__(self, planetmint_node=None, events_queue=None):
def __init__(self, planetmint_node=None, events_queue=None, models: Models = None, validator: Validator = None):
# super().__init__(abci)
logger.debug("Checking values of types")
logger.debug(dir(types_pb2))
self.events_queue = events_queue
self.planetmint_node = planetmint_node or Planetmint()
self.validator = Validator()
self.models = models or Models()
self.block_txn_ids = []
self.block_txn_hash = ""
self.block_transactions = []
self.validators = None
self.new_height = None
self.chain = self.planetmint_node.get_latest_abci_chain()
self.chain = self.models.get_latest_abci_chain()
def log_abci_migration_error(self, chain_id, validators):
logger.error(
@ -61,7 +63,7 @@ class App(BaseApplication):
def abort_if_abci_chain_is_not_synced(self):
if self.chain is None or self.chain["is_synced"]:
return
validators = self.planetmint_node.get_validators()
validators = self.models.get_validators()
self.log_abci_migration_error(self.chain["chain_id"], validators)
sys.exit(1)
@ -69,7 +71,7 @@ class App(BaseApplication):
"""Initialize chain upon genesis or a migration"""
app_hash = ""
height = 0
known_chain = self.planetmint_node.get_latest_abci_chain()
known_chain = self.models.get_latest_abci_chain()
if known_chain is not None:
chain_id = known_chain["chain_id"]
@ -78,23 +80,23 @@ class App(BaseApplication):
logger.error(msg)
sys.exit(1)
if chain_id != genesis.chain_id:
validators = self.planetmint_node.get_validators()
validators = self.models.get_validators()
self.log_abci_migration_error(chain_id, validators)
sys.exit(1)
# set migration values for app hash and height
block = self.planetmint_node.get_latest_block()
block = self.models.get_latest_block()
app_hash = "" if block is None else block["app_hash"]
height = 0 if block is None else block["height"] + 1
known_validators = self.planetmint_node.get_validators()
known_validators = self.models.get_validators()
validator_set = [decode_validator(v) for v in genesis.validators]
if known_validators and known_validators != validator_set:
self.log_abci_migration_error(known_chain["chain_id"], known_validators)
sys.exit(1)
block = Block(app_hash=app_hash, height=height, transactions=[])
self.planetmint_node.store_block(block._asdict())
self.planetmint_node.store_validator_set(height + 1, validator_set)
self.models.store_block(block._asdict())
self.models.store_validator_set(height + 1, validator_set)
abci_chain_height = 0 if known_chain is None else known_chain["height"]
self.planetmint_node.store_abci_chain(abci_chain_height, genesis.chain_id, True)
self.models.store_abci_chain(abci_chain_height, genesis.chain_id, True)
self.chain = {"height": abci_chain_height, "is_synced": True, "chain_id": genesis.chain_id}
return ResponseInitChain()
@ -112,7 +114,7 @@ class App(BaseApplication):
# logger.info(f"Tendermint version: {request.version}")
r = ResponseInfo()
block = self.planetmint_node.get_latest_block()
block = self.models.get_latest_block()
if block:
chain_shift = 0 if self.chain is None else self.chain["height"]
r.last_block_height = block["height"] - chain_shift
@ -134,7 +136,7 @@ class App(BaseApplication):
logger.debug("check_tx: %s", raw_transaction)
transaction = decode_transaction(raw_transaction)
if self.planetmint_node.is_valid_transaction(transaction):
if self.validator.is_valid_transaction(transaction):
logger.debug("check_tx: VALID")
return ResponseCheckTx(code=OkCode)
else:
@ -167,7 +169,7 @@ class App(BaseApplication):
self.abort_if_abci_chain_is_not_synced()
logger.debug("deliver_tx: %s", raw_transaction)
transaction = self.planetmint_node.is_valid_transaction(
transaction = self.validator.is_valid_transaction(
decode_transaction(raw_transaction), self.block_transactions
)
@ -198,10 +200,10 @@ class App(BaseApplication):
# `end_block` or `commit`
logger.debug(f"Updating pre-commit state: {self.new_height}")
pre_commit_state = dict(height=self.new_height, transactions=self.block_txn_ids)
self.planetmint_node.store_pre_commit_state(pre_commit_state)
self.models.store_pre_commit_state(pre_commit_state)
block_txn_hash = calculate_hash(self.block_txn_ids)
block = self.planetmint_node.get_latest_block()
block = self.models.get_latest_block()
logger.debug("BLOCK: ", block)
@ -210,7 +212,7 @@ class App(BaseApplication):
else:
self.block_txn_hash = block["app_hash"]
validator_update = self.planetmint_node.process_block(self.new_height, self.block_transactions)
validator_update = self.validator.process_block(self.new_height, self.block_transactions)
return ResponseEndBlock(validator_updates=validator_update)
@ -223,12 +225,12 @@ class App(BaseApplication):
# register a new block only when new transactions are received
if self.block_txn_ids:
self.planetmint_node.store_bulk_transactions(self.block_transactions)
self.models.store_bulk_transactions( self.block_transactions)
block = Block(app_hash=self.block_txn_hash, height=self.new_height, transactions=self.block_txn_ids)
# NOTE: storing the block should be the last operation during commit
# this effects crash recovery. Refer BEP#8 for details
self.planetmint_node.store_block(block._asdict())
self.models.store_block(block._asdict())
logger.debug(
"Commit-ing new block with hash: apphash=%s ," "height=%s, txn ids=%s",
@ -246,25 +248,3 @@ class App(BaseApplication):
return ResponseCommit(data=data)
def rollback(planetmint):
pre_commit = None
try:
pre_commit = planetmint.get_pre_commit_state()
except Exception as e:
logger.exception("Unexpected error occurred while executing get_pre_commit_state()", e)
if pre_commit is None or len(pre_commit) == 0:
# the pre_commit record is first stored in the first `end_block`
return
latest_block = planetmint.get_latest_block()
if latest_block is None:
logger.error("Found precommit state but no blocks!")
sys.exit(1)
# NOTE: the pre-commit state is always at most 1 block ahead of the commited state
if latest_block["height"] < pre_commit["height"]:
planetmint.rollback_election(pre_commit["height"], pre_commit["transactions"])
planetmint.delete_transactions(pre_commit["transactions"])

View File

@ -7,7 +7,7 @@ import multiprocessing
from collections import defaultdict
from planetmint import App
from planetmint.lib import Planetmint
from planetmint.application.validation import Validator
from planetmint.abci.tendermint_utils import decode_transaction
from abci.application import OkCode
from tendermint.abci.types_pb2 import (
@ -93,7 +93,7 @@ class ValidationWorker:
def __init__(self, in_queue, results_queue):
self.in_queue = in_queue
self.results_queue = results_queue
self.planetmint = Planetmint()
self.validator = Validator()
self.reset()
def reset(self):
@ -112,7 +112,7 @@ class ValidationWorker:
except TypeError:
asset_id = dict_transaction["id"]
transaction = self.planetmint.is_valid_transaction(dict_transaction, self.validated_transactions[asset_id])
transaction = self.validator.is_valid_transaction(dict_transaction, self.validated_transactions[asset_id])
if transaction:
self.validated_transactions[asset_id].append(transaction)

View File

@ -1,19 +1,28 @@
from uuid import uuid4
import requests
from uuid import uuid4
from transactions.common.exceptions import ValidationError
from planetmint.abci.tendermint_utils import encode_transaction
from planetmint.lib import logger
from transactions.common.transaction_mode_types import (
BROADCAST_TX_COMMIT,
BROADCAST_TX_ASYNC,
BROADCAST_TX_SYNC,
)
from planetmint.abci.tendermint_utils import encode_transaction
from planetmint.application.validation import logger
from planetmint.config_utils import autoconfigure
from planetmint.config import Config
MODE_COMMIT = BROADCAST_TX_COMMIT
MODE_LIST = (BROADCAST_TX_ASYNC, BROADCAST_TX_SYNC, MODE_COMMIT)
class ABCI_RPC:
def __init__(self):
autoconfigure()
self.tendermint_host = Config().get()["tendermint"]["host"]
self.tendermint_port = Config().get()["tendermint"]["port"]
self.tendermint_rpc_endpoint = "http://{}:{}/".format(self.tendermint_host, self.tendermint_port)
@staticmethod
def _process_post_response(mode_commit, response, mode):
logger.debug(response)
@ -42,14 +51,12 @@ class ABCI_RPC:
return (202, "")
def write_transaction(self, mode_list, endpoint, mode_commit, transaction, mode):
# This method offers backward compatibility with the Web API.
"""Submit a valid transaction to the mempool."""
response = self.post_transaction(mode_list, endpoint, transaction, mode)
return ABCI_RPC._process_post_response(mode_commit, response.json(), mode)
def post_transaction(self, mode_list, endpoint, transaction, mode):
"""Submit a valid transaction to the mempool."""
if not mode or mode not in mode_list:

View File

View File

@ -1,51 +1,27 @@
# Copyright © 2020 Interplanetary Database Association e.V.,
# Planetmint and IPDB software contributors.
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
# Code is Apache-2.0 and docs are CC-BY-4.0
"""Module containing main contact points with Tendermint and
MongoDB.
"""
import logging
from planetmint.backend.connection import Connection
import json
import rapidjson
from itertools import chain
from collections import OrderedDict
from hashlib import sha3_256
from transactions import Transaction, Vote
from transactions.common.crypto import public_key_from_ed25519_key
from transactions.common.exceptions import (
DoubleSpend,
AssetIdMismatch,
InvalidSignature,
AmountError,
SchemaValidationError,
ValidationError,
DuplicateTransaction,
InvalidSignature,
DoubleSpend,
InputDoesNotExist,
AssetIdMismatch,
AmountError,
MultipleInputsError,
DuplicateTransaction,
InvalidProposer,
UnequalValidatorSet,
InvalidPowerChange,
)
from transactions.common.transaction import VALIDATOR_ELECTION, CHAIN_MIGRATION_ELECTION
from transactions.common.crypto import public_key_from_ed25519_key
from transactions.common.output import Output as TransactionOutput
from transactions.common.transaction import VALIDATOR_ELECTION, CHAIN_MIGRATION_ELECTION
from transactions.types.elections.election import Election
from transactions.types.elections.validator_utils import election_id_to_public_key
from planetmint.backend.models import Output, DbTransaction, Asset, MetaData
from planetmint.backend.tarantool.const import (
TARANT_TABLE_GOVERNANCE,
TARANT_TABLE_TRANSACTION,
)
from planetmint.config import Config
from planetmint import backend, fastquery, config_utils
from planetmint.abci.tendermint_utils import (
merkleroot,
key_from_base64,
@ -53,243 +29,114 @@ from planetmint.abci.tendermint_utils import (
encode_validator,
new_validator_set,
)
from planetmint.validation import BaseValidationRules
from planetmint.const import GOVERNANCE_TRANSACTION_TYPES
from planetmint.application.basevalidationrules import BaseValidationRules
from planetmint.backend.models.output import Output
from planetmint.model.models import Models
from planetmint.config import Config
from planetmint.config_utils import load_validation_plugin
logger = logging.getLogger(__name__)
class Planetmint(object):
"""Planetmint API
class Validator:
def __init__(self):
self.models = Models()
self.validation = Validator._get_validationmethod()
Create, read, sign, write transactions to the database
"""
def __init__(self, connection=None):
"""Initialize the Planetmint instance
A Planetmint instance has several configuration parameters (e.g. host).
If a parameter value is passed as an argument to the Planetmint
__init__ method, then that is the value it will have.
Otherwise, the parameter value will come from an environment variable.
If that environment variable isn't set, then the value
will come from the local configuration file. And if that variable
isn't in the local configuration file, then the parameter will have
its default value (defined in planetmint.__init__).
Args:
connection (:class:`~planetmint.backend.connection.Connection`):
A connection to the database.
"""
config_utils.autoconfigure()
self.tendermint_host = Config().get()["tendermint"]["host"]
self.tendermint_port = Config().get()["tendermint"]["port"]
self.tendermint_rpc_endpoint = "http://{}:{}/".format(self.tendermint_host, self.tendermint_port)
@staticmethod
def _get_validationmethod():
validationPlugin = Config().get().get("validation_plugin")
if validationPlugin:
self.validation = config_utils.load_validation_plugin(validationPlugin)
validation_method = load_validation_plugin(validationPlugin)
else:
self.validation = BaseValidationRules
self.connection = connection if connection is not None else Connection()
validation_method = BaseValidationRules
return validation_method
def store_bulk_transactions(self, transactions):
txns = []
gov_txns = []
@staticmethod
def validate_inputs_distinct(tx: Transaction):
# Validate that all inputs are distinct
links = [i.fulfills.to_uri() for i in tx.inputs]
if len(links) != len(set(links)):
raise DoubleSpend('tx "{}" spends inputs twice'.format(tx.id))
for t in transactions:
transaction = t.tx_dict if t.tx_dict else rapidjson.loads(rapidjson.dumps(t.to_dict()))
if transaction["operation"] in GOVERNANCE_TRANSACTION_TYPES:
gov_txns.append(transaction)
else:
txns.append(transaction)
@staticmethod
def validate_asset_id(tx: Transaction, input_txs: list):
# validate asset
if tx.operation != Transaction.COMPOSE:
asset_id = tx.get_asset_id(input_txs)
if asset_id != Transaction.read_out_asset_id(tx):
raise AssetIdMismatch(
("The asset id of the input does not" " match the asset id of the" " transaction")
)
else:
asset_ids = Transaction.get_asset_ids(input_txs)
if Transaction.read_out_asset_id(tx) in asset_ids:
raise AssetIdMismatch(("The asset ID of the compose must be different to all of its input asset IDs"))
backend.query.store_transactions(self.connection, txns, TARANT_TABLE_TRANSACTION)
backend.query.store_transactions(self.connection, gov_txns, TARANT_TABLE_GOVERNANCE)
@staticmethod
def validate_input_conditions(tx: Transaction, input_conditions: list[Output]):
# convert planetmint.Output objects to transactions.common.Output objects
input_conditions_dict = Output.list_to_dict(input_conditions)
input_conditions_converted = []
for input_cond in input_conditions_dict:
input_conditions_converted.append(TransactionOutput.from_dict(input_cond))
def delete_transactions(self, txs):
return backend.query.delete_transactions(self.connection, txs)
if not tx.inputs_valid(input_conditions_converted):
raise InvalidSignature("Transaction signature is invalid.")
def update_utxoset(self, transaction):
self.updated__ = """Update the UTXO set given ``transaction``. That is, remove
the outputs that the given ``transaction`` spends, and add the
outputs that the given ``transaction`` creates.
def validate_compose_inputs(self, tx, current_transactions=[]) -> bool:
input_txs, input_conditions = self.models.get_input_txs_and_conditions(tx.inputs, current_transactions)
Args:
transaction (:obj:`~planetmint.models.Transaction`): A new
transaction incoming into the system for which the UTXOF
set needs to be updated.
"""
spent_outputs = [spent_output for spent_output in transaction.spent_outputs]
if spent_outputs:
self.delete_unspent_outputs(*spent_outputs)
self.store_unspent_outputs(*[utxo._asdict() for utxo in transaction.unspent_outputs])
Validator.validate_input_conditions(tx, input_conditions)
def store_unspent_outputs(self, *unspent_outputs):
"""Store the given ``unspent_outputs`` (utxos).
Validator.validate_asset_id(tx, input_txs)
Args:
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
length tuple or list of unspent outputs.
"""
if unspent_outputs:
return backend.query.store_unspent_outputs(self.connection, *unspent_outputs)
Validator.validate_inputs_distinct(tx)
def get_utxoset_merkle_root(self):
"""Returns the merkle root of the utxoset. This implies that
the utxoset is first put into a merkle tree.
return True
For now, the merkle tree and its root will be computed each
time. This obviously is not efficient and a better approach
that limits the repetition of the same computation when
unnecesary should be sought. For instance, future optimizations
could simply re-compute the branches of the tree that were
affected by a change.
def validate_transfer_inputs(self, tx, current_transactions=[]) -> bool:
input_txs, input_conditions = self.models.get_input_txs_and_conditions(tx.inputs, current_transactions)
The transaction hash (id) and output index should be sufficient
to uniquely identify a utxo, and consequently only that
information from a utxo record is needed to compute the merkle
root. Hence, each node of the merkle tree should contain the
tuple (txid, output_index).
Validator.validate_input_conditions(tx, input_conditions)
.. important:: The leaves of the tree will need to be sorted in
some kind of lexicographical order.
Validator.validate_asset_id(tx, input_txs)
Returns:
str: Merkle root in hexadecimal form.
"""
utxoset = backend.query.get_unspent_outputs(self.connection)
# TODO Once ready, use the already pre-computed utxo_hash field.
# See common/transactions.py for details.
hashes = [
sha3_256("{}{}".format(utxo["transaction_id"], utxo["output_index"]).encode()).digest() for utxo in utxoset
]
# TODO Notice the sorted call!
return merkleroot(sorted(hashes))
Validator.validate_inputs_distinct(tx)
def get_unspent_outputs(self):
"""Get the utxoset.
input_amount = sum([input_condition.amount for input_condition in input_conditions])
output_amount = sum([output_condition.amount for output_condition in tx.outputs])
Returns:
generator of unspent_outputs.
"""
cursor = backend.query.get_unspent_outputs(self.connection)
return (record for record in cursor)
if output_amount != input_amount:
raise AmountError(
(
"The amount used in the inputs `{}`" " needs to be same as the amount used" " in the outputs `{}`"
).format(input_amount, output_amount)
)
def delete_unspent_outputs(self, *unspent_outputs):
"""Deletes the given ``unspent_outputs`` (utxos).
return True
Args:
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
length tuple or list of unspent outputs.
"""
if unspent_outputs:
return backend.query.delete_unspent_outputs(self.connection, *unspent_outputs)
def validate_create_inputs(self, tx, current_transactions=[]) -> bool:
duplicates = any(txn for txn in current_transactions if txn.id == tx.id)
if self.models.is_committed(tx.id) or duplicates:
raise DuplicateTransaction("transaction `{}` already exists".format(tx.id))
def is_committed(self, transaction_id):
transaction = backend.query.get_transaction_single(self.connection, transaction_id)
return bool(transaction)
fulfilling_inputs = [i for i in tx.inputs if i.fulfills is not None and i.fulfills.txid is not None]
def get_transaction(self, transaction_id):
return backend.query.get_transaction_single(self.connection, transaction_id)
if len(fulfilling_inputs) > 0:
input_txs, input_conditions = self.models.get_input_txs_and_conditions(
fulfilling_inputs, current_transactions
)
create_asset = tx.assets[0]
input_asset = input_txs[0].assets[tx.inputs[0].fulfills.output]["data"]
if create_asset != input_asset:
raise ValidationError("CREATE must have matching asset description with input transaction")
if input_txs[0].operation != Transaction.DECOMPOSE:
raise SchemaValidationError("CREATE can only consume DECOMPOSE outputs")
def get_transactions(self, txn_ids):
return backend.query.get_transactions(self.connection, txn_ids)
def get_transactions_filtered(self, asset_ids, operation=None, last_tx=False):
"""Get a list of transactions filtered on some criteria"""
txids = backend.query.get_txids_filtered(self.connection, asset_ids, operation, last_tx)
for txid in txids:
yield self.get_transaction(txid)
def get_outputs_by_tx_id(self, txid):
return backend.query.get_outputs_by_tx_id(self.connection, txid)
def get_outputs_filtered(self, owner, spent=None):
"""Get a list of output links filtered on some criteria
Args:
owner (str): base58 encoded public_key.
spent (bool): If ``True`` return only the spent outputs. If
``False`` return only unspent outputs. If spent is
not specified (``None``) return all outputs.
Returns:
:obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s
pointing to another transaction's condition
"""
outputs = self.fastquery.get_outputs_by_public_key(owner)
if spent is None:
return outputs
elif spent is True:
return self.fastquery.filter_unspent_outputs(outputs)
elif spent is False:
return self.fastquery.filter_spent_outputs(outputs)
def get_spent(self, txid, output, current_transactions=[]):
transactions = backend.query.get_spent(self.connection, txid, output)
current_spent_transactions = []
for ctxn in current_transactions:
for ctxn_input in ctxn.inputs:
if ctxn_input.fulfills and ctxn_input.fulfills.txid == txid and ctxn_input.fulfills.output == output:
current_spent_transactions.append(ctxn)
transaction = None
if len(transactions) + len(current_spent_transactions) > 1:
raise DoubleSpend('tx "{}" spends inputs twice'.format(txid))
elif transactions:
tx_id = transactions[0].id
tx = backend.query.get_transaction_single(self.connection, tx_id)
transaction = tx.to_dict()
elif current_spent_transactions:
transaction = current_spent_transactions[0]
return transaction
def store_block(self, block):
"""Create a new block."""
return backend.query.store_block(self.connection, block)
def get_latest_block(self) -> dict:
"""Get the block with largest height."""
return backend.query.get_latest_block(self.connection)
def get_block(self, block_id) -> dict:
"""Get the block with the specified `block_id`.
Returns the block corresponding to `block_id` or None if no match is
found.
Args:
block_id (int): block id of the block to get.
"""
block = backend.query.get_block(self.connection, block_id)
latest_block = self.get_latest_block()
latest_block_height = latest_block["height"] if latest_block else 0
if not block and block_id > latest_block_height:
return
return block
def get_block_containing_tx(self, txid):
"""Retrieve the list of blocks (block ids) containing a
transaction with transaction id `txid`
Args:
txid (str): transaction id of the transaction to query
Returns:
Block id list (list(int))
"""
block = backend.query.get_block_with_transaction(self.connection, txid)
return block
return True
def validate_transaction(self, transaction, current_transactions=[]):
"""Validate a transaction against the current status of the database."""
@ -316,248 +163,6 @@ class Planetmint(object):
return transaction
def validate_create_inputs(self, tx, current_transactions=[]) -> bool:
duplicates = any(txn for txn in current_transactions if txn.id == tx.id)
if self.is_committed(tx.id) or duplicates:
raise DuplicateTransaction("transaction `{}` already exists".format(tx.id))
fulfilling_inputs = [i for i in tx.inputs if i.fulfills is not None and i.fulfills.txid is not None]
if len(fulfilling_inputs) > 0:
input_txs, input_conditions = self.get_input_txs_and_conditions(fulfilling_inputs, current_transactions)
create_asset = tx.assets[0]
input_asset = input_txs[0].assets[tx.inputs[0].fulfills.output]["data"]
if create_asset != input_asset:
raise ValidationError("CREATE must have matching asset description with input transaction")
if input_txs[0].operation != Transaction.DECOMPOSE:
raise SchemaValidationError("CREATE can only consume DECOMPOSE outputs")
return True
def validate_transfer_inputs(self, tx, current_transactions=[]) -> bool:
input_txs, input_conditions = self.get_input_txs_and_conditions(tx.inputs, current_transactions)
self.validate_input_conditions(tx, input_conditions)
self.validate_asset_id(tx, input_txs)
self.validate_inputs_distinct(tx)
input_amount = sum([input_condition.amount for input_condition in input_conditions])
output_amount = sum([output_condition.amount for output_condition in tx.outputs])
if output_amount != input_amount:
raise AmountError(
(
"The amount used in the inputs `{}`" " needs to be same as the amount used" " in the outputs `{}`"
).format(input_amount, output_amount)
)
return True
def validate_compose_inputs(self, tx, current_transactions=[]) -> bool:
input_txs, input_conditions = self.get_input_txs_and_conditions(tx.inputs, current_transactions)
self.validate_input_conditions(tx, input_conditions)
self.validate_asset_id(tx, input_txs)
self.validate_inputs_distinct(tx)
return True
def get_input_txs_and_conditions(self, inputs, current_transactions=[]):
# store the inputs so that we can check if the asset ids match
input_txs = []
input_conditions = []
for input_ in inputs:
input_txid = input_.fulfills.txid
input_tx = self.get_transaction(input_txid)
_output = self.get_outputs_by_tx_id(input_txid)
if input_tx is None:
for ctxn in current_transactions:
if ctxn.id == input_txid:
ctxn_dict = ctxn.to_dict()
input_tx = DbTransaction.from_dict(ctxn_dict)
_output = [
Output.from_dict(output, index, ctxn.id)
for index, output in enumerate(ctxn_dict["outputs"])
]
if input_tx is None:
raise InputDoesNotExist("input `{}` doesn't exist".format(input_txid))
spent = self.get_spent(input_txid, input_.fulfills.output, current_transactions)
if spent:
raise DoubleSpend("input `{}` was already spent".format(input_txid))
output = _output[input_.fulfills.output]
input_conditions.append(output)
tx_dict = input_tx.to_dict()
tx_dict["outputs"] = Output.list_to_dict(_output)
tx_dict = DbTransaction.remove_generated_fields(tx_dict)
pm_transaction = Transaction.from_dict(tx_dict, False)
input_txs.append(pm_transaction)
return (input_txs, input_conditions)
def validate_input_conditions(self, tx, input_conditions):
# convert planetmint.Output objects to transactions.common.Output objects
input_conditions_dict = Output.list_to_dict(input_conditions)
input_conditions_converted = []
for input_cond in input_conditions_dict:
input_conditions_converted.append(TransactionOutput.from_dict(input_cond))
if not tx.inputs_valid(input_conditions_converted):
raise InvalidSignature("Transaction signature is invalid.")
def validate_asset_id(self, tx: Transaction, input_txs: list):
# validate asset
if tx.operation != Transaction.COMPOSE:
asset_id = tx.get_asset_id(input_txs)
if asset_id != Transaction.read_out_asset_id(tx):
raise AssetIdMismatch(
("The asset id of the input does not" " match the asset id of the" " transaction")
)
else:
asset_ids = Transaction.get_asset_ids(input_txs)
if Transaction.read_out_asset_id(tx) in asset_ids:
raise AssetIdMismatch(("The asset ID of the compose must be different to all of its input asset IDs"))
def validate_inputs_distinct(self, tx):
# Validate that all inputs are distinct
links = [i.fulfills.to_uri() for i in tx.inputs]
if len(links) != len(set(links)):
raise DoubleSpend('tx "{}" spends inputs twice'.format(tx.id))
def is_valid_transaction(self, tx, current_transactions=[]):
# NOTE: the function returns the Transaction object in case
# the transaction is valid
try:
return self.validate_transaction(tx, current_transactions)
except ValidationError as e:
logger.warning("Invalid transaction (%s): %s", type(e).__name__, e)
return False
def get_assets(self, asset_ids) -> list[Asset]:
"""Return a list of assets that match the asset_ids
Args:
asset_ids (:obj:`list` of :obj:`str`): A list of asset_ids to
retrieve from the database.
Returns:
list: The list of assets returned from the database.
"""
return backend.query.get_assets(self.connection, asset_ids)
def get_assets_by_cid(self, asset_cid, **kwargs) -> list[dict]:
asset_txs = backend.query.get_transactions_by_asset(self.connection, asset_cid, **kwargs)
# flatten and return all found assets
return list(chain.from_iterable([Asset.list_to_dict(tx.assets) for tx in asset_txs]))
def get_metadata(self, txn_ids) -> list[MetaData]:
"""Return a list of metadata that match the transaction ids (txn_ids)
Args:
txn_ids (:obj:`list` of :obj:`str`): A list of txn_ids to
retrieve from the database.
Returns:
list: The list of metadata returned from the database.
"""
return backend.query.get_metadata(self.connection, txn_ids)
def get_metadata_by_cid(self, metadata_cid, **kwargs) -> list[str]:
metadata_txs = backend.query.get_transactions_by_metadata(self.connection, metadata_cid, **kwargs)
return [tx.metadata.metadata for tx in metadata_txs]
@property
def fastquery(self):
return fastquery.FastQuery(self.connection)
def get_validator_set(self, height=None):
return backend.query.get_validator_set(self.connection, height)
def get_validators(self, height=None):
result = self.get_validator_set(height)
return [] if result is None else result["validators"]
def get_election(self, election_id):
return backend.query.get_election(self.connection, election_id)
def get_pre_commit_state(self):
return backend.query.get_pre_commit_state(self.connection)
def store_pre_commit_state(self, state):
return backend.query.store_pre_commit_state(self.connection, state)
def store_validator_set(self, height, validators):
"""Store validator set at a given `height`.
NOTE: If the validator set already exists at that `height` then an
exception will be raised.
"""
return backend.query.store_validator_set(self.connection, {"height": height, "validators": validators})
def delete_validator_set(self, height):
return backend.query.delete_validator_set(self.connection, height)
def store_abci_chain(self, height, chain_id, is_synced=True):
return backend.query.store_abci_chain(self.connection, height, chain_id, is_synced)
def delete_abci_chain(self, height):
return backend.query.delete_abci_chain(self.connection, height)
def get_latest_abci_chain(self):
return backend.query.get_latest_abci_chain(self.connection)
def migrate_abci_chain(self):
"""Generate and record a new ABCI chain ID. New blocks are not
accepted until we receive an InitChain ABCI request with
the matching chain ID and validator set.
Chain ID is generated based on the current chain and height.
`chain-X` => `chain-X-migrated-at-height-5`.
`chain-X-migrated-at-height-5` => `chain-X-migrated-at-height-21`.
If there is no known chain (we are at genesis), the function returns.
"""
latest_chain = self.get_latest_abci_chain()
if latest_chain is None:
return
block = self.get_latest_block()
suffix = "-migrated-at-height-"
chain_id = latest_chain["chain_id"]
block_height_str = str(block["height"])
new_chain_id = chain_id.split(suffix)[0] + suffix + block_height_str
self.store_abci_chain(block["height"] + 1, new_chain_id, False)
def store_election(self, election_id, height, is_concluded):
return backend.query.store_election(self.connection, election_id, height, is_concluded)
def store_elections(self, elections):
return backend.query.store_elections(self.connection, elections)
def delete_elections(self, height):
return backend.query.delete_elections(self.connection, height)
# NOTE: moved here from Election needs to be placed somewhere else
def get_validators_dict(self, height=None):
"""Return a dictionary of validators with key as `public_key` and
value as the `voting_power`
"""
validators = {}
for validator in self.get_validators(height):
# NOTE: we assume that Tendermint encodes public key in base64
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
validators[public_key] = validator["voting_power"]
return validators
def validate_election(self, transaction, current_transactions=[]): # TODO: move somewhere else
"""Validate election transaction
@ -579,10 +184,10 @@ class Planetmint(object):
"""
duplicates = any(txn for txn in current_transactions if txn.id == transaction.id)
if self.is_committed(transaction.id) or duplicates:
if self.models.is_committed(transaction.id) or duplicates:
raise DuplicateTransaction("transaction `{}` already exists".format(transaction.id))
current_validators = self.get_validators_dict()
current_validators = self.models.get_validators_dict()
# NOTE: Proposer should be a single node
if len(transaction.inputs) != 1 or len(transaction.inputs[0].owners_before) != 1:
@ -602,34 +207,48 @@ class Planetmint(object):
return transaction
def is_same_topology(cls, current_topology, election_topology):
voters = {}
for voter in election_topology:
if len(voter.public_keys) > 1:
return False
[public_key] = voter.public_keys
voting_power = voter.amount
voters[public_key] = voting_power
# Check whether the voters and their votes is same to that of the
# validators and their voting power in the network
return current_topology == voters
def validate_validator_election(self, transaction): # TODO: move somewhere else
"""For more details refer BEP-21: https://github.com/planetmint/BEPs/tree/master/21"""
current_validators = self.get_validators_dict()
current_validators = self.models.get_validators_dict()
# NOTE: change more than 1/3 of the current power is not allowed
if transaction.get_assets()[0]["data"]["power"] >= (1 / 3) * sum(current_validators.values()):
raise InvalidPowerChange("`power` change must be less than 1/3 of total power")
def get_election_status(self, transaction):
election = self.get_election(transaction.id)
election = self.models.get_election(transaction.id)
if election and election["is_concluded"]:
return Election.CONCLUDED
return Election.INCONCLUSIVE if self.has_validator_set_changed(transaction) else Election.ONGOING
def has_validator_set_changed(self, transaction): # TODO: move somewhere else
def has_validator_set_changed(self, transaction):
latest_change = self.get_validator_change()
if latest_change is None:
return False
latest_change_height = latest_change["height"]
election = self.get_election(transaction.id)
election = self.models.get_election(transaction.id)
return latest_change_height > election["height"]
def get_validator_change(self): # TODO: move somewhere else
def get_validator_change(self):
"""Return the validator set from the most recent approved block
:return: {
@ -637,32 +256,24 @@ class Planetmint(object):
'validators': <validator_set>
}
"""
latest_block = self.get_latest_block()
latest_block = self.models.get_latest_block()
if latest_block is None:
return None
return self.get_validator_set(latest_block["height"])
return self.models.get_validator_set(latest_block["height"])
def get_validator_dict(self, height=None):
"""Return a dictionary of validators with key as `public_key` and
value as the `voting_power`
"""
validators = {}
for validator in self.get_validators(height):
for validator in self.models.get_validators(height):
# NOTE: we assume that Tendermint encodes public key in base64
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
validators[public_key] = validator["voting_power"]
return validators
def get_recipients_list(self):
"""Convert validator dictionary to a recipient list for `Transaction`"""
recipients = []
for public_key, voting_power in self.get_validator_dict().items():
recipients.append(([public_key], voting_power))
return recipients
# TODO to be moved to planetmint.commands.planetmint
def show_election_status(self, transaction):
data = transaction.assets[0]
data = data.to_dict()["data"]
@ -679,13 +290,14 @@ class Planetmint(object):
return response
# TODO to be moved to planetmint.commands.planetmint
def append_chain_migration_status(self, status):
chain = self.get_latest_abci_chain()
chain = self.models.get_latest_abci_chain()
if chain is None or chain["is_synced"]:
return status
status += f'\nchain_id={chain["chain_id"]}'
block = self.get_latest_block()
block = self.models.get_latest_block()
status += f'\napp_hash={block["app_hash"]}'
validators = [
{
@ -700,23 +312,18 @@ class Planetmint(object):
status += f"\nvalidators={json.dumps(validators, indent=4)}"
return status
def is_same_topology(cls, current_topology, election_topology):
voters = {}
for voter in election_topology:
if len(voter.public_keys) > 1:
return False
def get_recipients_list(self):
"""Convert validator dictionary to a recipient list for `Transaction`"""
[public_key] = voter.public_keys
voting_power = voter.amount
voters[public_key] = voting_power
recipients = []
for public_key, voting_power in self.get_validator_dict().items():
recipients.append(([public_key], voting_power))
# Check whether the voters and their votes is same to that of the
# validators and their voting power in the network
return current_topology == voters
return recipients
def count_votes(self, election_pk, transactions):
votes = 0
for txn in transactions:
for txn in transactions :
if txn.operation == Vote.OPERATION:
for output in txn.outputs:
# NOTE: We enforce that a valid vote to election id will have only
@ -729,7 +336,7 @@ class Planetmint(object):
def get_commited_votes(self, transaction, election_pk=None): # TODO: move somewhere else
if election_pk is None:
election_pk = election_id_to_public_key(transaction.id)
txns = backend.query.get_asset_tokens_for_public_key(self.connection, transaction.id, election_pk)
txns = self.models.get_asset_tokens_for_public_key(transaction.id, election_pk)
return self.count_votes(election_pk, txns)
def _get_initiated_elections(self, height, txns): # TODO: move somewhere else
@ -779,14 +386,14 @@ class Planetmint(object):
initiated_elections = self._get_initiated_elections(new_height, txns)
if initiated_elections:
self.store_elections(initiated_elections)
self.models.store_elections(initiated_elections)
# elections voted for in this block and their votes
elections = self._get_votes(txns)
validator_update = None
for election_id, votes in elections.items():
election = self.get_transaction(election_id)
election = self.models.get_transaction(election_id)
if election is None:
continue
@ -794,7 +401,7 @@ class Planetmint(object):
continue
validator_update = self.approve_election(election, new_height)
self.store_election(election.id, new_height, is_concluded=True)
self.models.store_election(election.id, new_height, is_concluded=True)
return [validator_update] if validator_update else []
@ -829,10 +436,10 @@ class Planetmint(object):
return False
def has_validator_election_concluded(self): # TODO: move somewhere else
latest_block = self.get_latest_block()
latest_block = self.models.get_latest_block()
if latest_block is not None:
latest_block_height = latest_block["height"]
latest_validator_change = self.get_validator_set()["height"]
latest_validator_change = self.models.get_validator_set()["height"]
# TODO change to `latest_block_height + 3` when upgrading to Tendermint 0.24.0.
if latest_validator_change == latest_block_height + 2:
@ -842,7 +449,7 @@ class Planetmint(object):
return True
def has_chain_migration_concluded(self): # TODO: move somewhere else
chain = self.get_latest_abci_chain()
chain = self.models.get_latest_abci_chain()
if chain is not None and not chain["is_synced"]:
# do not conclude the migration election if
# there is another migration in progress
@ -859,20 +466,20 @@ class Planetmint(object):
# delete election records for elections initiated at this height and
# elections concluded at this height
self.delete_elections(new_height)
self.models.delete_elections(new_height)
txns = [self.get_transaction(tx_id) for tx_id in txn_ids]
txns = [self.models.get_transaction(tx_id) for tx_id in txn_ids]
txns = [Transaction.from_dict(tx.to_dict()) for tx in txns if tx]
elections = self._get_votes(txns)
for election_id in elections:
election = self.get_transaction(election_id)
election = self.models.get_transaction(election_id)
if election.operation == VALIDATOR_ELECTION:
# TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0.
self.delete_validator_set(new_height + 1)
self.models.delete_validator_set(new_height + 1)
if election.operation == CHAIN_MIGRATION_ELECTION:
self.delete_abci_chain(new_height)
self.models.delete_abci_chain(new_height)
def approve_election(self, election, new_height):
"""Override to update the database state according to the
@ -883,12 +490,66 @@ class Planetmint(object):
self.migrate_abci_chain()
if election.operation == VALIDATOR_ELECTION:
validator_updates = [election.assets[0].data]
curr_validator_set = self.get_validators(new_height)
curr_validator_set = self.models.get_validators(new_height)
updated_validator_set = new_validator_set(curr_validator_set, validator_updates)
updated_validator_set = [v for v in updated_validator_set if v["voting_power"] > 0]
# TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0.
self.store_validator_set(new_height + 1, updated_validator_set)
self.models.store_validator_set(new_height + 1, updated_validator_set)
return encode_validator(election.assets[0].data)
def is_valid_transaction(self, tx, current_transactions=[]):
# NOTE: the function returns the Transaction object in case
# the transaction is valid
try:
return self.validate_transaction(tx, current_transactions)
except ValidationError as e:
logger.warning("Invalid transaction (%s): %s", type(e).__name__, e)
return False
def migrate_abci_chain(self):
"""Generate and record a new ABCI chain ID. New blocks are not
accepted until we receive an InitChain ABCI request with
the matching chain ID and validator set.
Chain ID is generated based on the current chain and height.
`chain-X` => `chain-X-migrated-at-height-5`.
`chain-X-migrated-at-height-5` => `chain-X-migrated-at-height-21`.
If there is no known chain (we are at genesis), the function returns.
"""
latest_chain = self.models.get_latest_abci_chain()
if latest_chain is None:
return
block = self.models.get_latest_block()
suffix = "-migrated-at-height-"
chain_id = latest_chain["chain_id"]
block_height_str = str(block["height"])
new_chain_id = chain_id.split(suffix)[0] + suffix + block_height_str
self.models.store_abci_chain(block["height"] + 1, new_chain_id, False)
def rollback(self):
pre_commit = None
try:
pre_commit = self.models.get_pre_commit_state()
except Exception as e:
logger.exception("Unexpected error occurred while executing get_pre_commit_state()", e)
if pre_commit is None or len(pre_commit) == 0:
# the pre_commit record is first stored in the first `end_block`
return
latest_block = self.models.get_latest_block()
if latest_block is None:
logger.error("Found precommit state but no blocks!")
sys.exit(1)
# NOTE: the pre-commit state is always at most 1 block ahead of the commited state
if latest_block["height"] < pre_commit["height"]:
self.rollback_election(pre_commit["height"], pre_commit["transactions"])
self.models.delete_transactions(pre_commit["transactions"])

View File

@ -46,7 +46,7 @@ class DbTransaction:
)
@staticmethod
def remove_generated_fields(tx_dict: dict):
def remove_generated_fields(tx_dict: dict) -> dict:
tx_dict["outputs"] = [
DbTransaction.remove_generated_or_none_output_keys(output) for output in tx_dict["outputs"]
]
@ -55,13 +55,19 @@ class DbTransaction:
return tx_dict
@staticmethod
def remove_generated_or_none_output_keys(output):
def remove_generated_or_none_output_keys(output: dict) -> dict:
output["condition"]["details"] = {k: v for k, v in output["condition"]["details"].items() if v is not None}
if "id" in output:
output.pop("id")
return output
def to_dict(self) -> dict:
"""
Returns
-------
object
"""
assets = Asset.list_to_dict(self.assets)
tx = {
"inputs": Input.list_to_dict(self.inputs),

View File

@ -14,16 +14,20 @@ import json
import sys
import planetmint
from planetmint.abci.core import rollback
from planetmint.abci.rpc import ABCI_RPC
from planetmint.abci.utils import load_node_key
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
from transactions.common.exceptions import DatabaseDoesNotExist, ValidationError
from transactions.types.elections.vote import Vote
from transactions.types.elections.chain_migration_election import ChainMigrationElection
from transactions.types.elections.validator_utils import election_id_to_public_key
from transactions.common.transaction import Transaction
from planetmint import ValidatorElection, Planetmint
from planetmint.abci.rpc import ABCI_RPC
from planetmint.abci.utils import load_node_key
from planetmint import ValidatorElection
from planetmint.application.validation import Validator
from planetmint.backend import schema
from planetmint.commands import utils
from planetmint.commands.utils import configure_planetmint, input_on_stderr
@ -33,6 +37,7 @@ from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
from planetmint.commands.election_types import elections
from planetmint.version import __tm_supported_versions__
from planetmint.config import Config
from planetmint.model.models import Models
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@ -133,7 +138,9 @@ def create_new_election(sk, planet, election_class, data):
logger.error(fd_404)
return False
resp = ABCI_RPC().write_transaction(MODE_LIST, planet.tendermint_rpc_endpoint, MODE_COMMIT, election, BROADCAST_TX_COMMIT)
resp = ABCI_RPC().write_transaction(
MODE_LIST, ABCI_RPC().tendermint_rpc_endpoint, MODE_COMMIT, election, BROADCAST_TX_COMMIT
)
if resp == (202, ""):
logger.info("[SUCCESS] Submitted proposal with id: {}".format(election.id))
return election.id
@ -183,7 +190,7 @@ def run_election_new_chain_migration(args, planet):
return create_new_election(args.sk, planet, ChainMigrationElection, [{"data": {}}])
def run_election_approve(args, planet):
def run_election_approve(args, validator: Validator):
"""Approve an election
:param args: dict
@ -196,7 +203,7 @@ def run_election_approve(args, planet):
"""
key = load_node_key(args.sk)
tx = planet.get_transaction(args.election_id)
tx = validator.models.get_transaction(args.election_id)
voting_powers = [v.amount for v in tx.outputs if key.public_key in v.public_keys]
if len(voting_powers) > 0:
voting_power = voting_powers[0]
@ -208,9 +215,11 @@ def run_election_approve(args, planet):
inputs = [i for i in tx_converted.to_inputs() if key.public_key in i.owners_before]
election_pub_key = election_id_to_public_key(tx.id)
approval = Vote.generate(inputs, [([election_pub_key], voting_power)], [tx.id]).sign([key.private_key])
planet.validate_transaction(approval)
validator.validate_transaction(approval)
resp = ABCI_RPC().write_transaction(MODE_LIST, planet.tendermint_rpc_endpoint, MODE_COMMIT, approval, BROADCAST_TX_COMMIT)
resp = ABCI_RPC().write_transaction(
MODE_LIST, ABCI_RPC().tendermint_rpc_endpoint, MODE_COMMIT, approval, BROADCAST_TX_COMMIT
)
if resp == (202, ""):
logger.info("[SUCCESS] Your vote has been submitted")
@ -220,7 +229,7 @@ def run_election_approve(args, planet):
return False
def run_election_show(args, planet):
def run_election_show(args, validator:Validator):
"""Retrieves information about an election
:param args: dict
@ -230,12 +239,12 @@ def run_election_show(args, planet):
:param planet: an instance of Planetmint
"""
election = planet.get_transaction(args.election_id)
election = validator.models.get_transaction(args.election_id)
if not election:
logger.error(f"No election found with election_id {args.election_id}")
return
response = planet.show_election_status(election)
response = validator.show_election_status(election)
logger.info(response)
@ -243,8 +252,8 @@ def run_election_show(args, planet):
def _run_init():
bdb = planetmint.Planetmint()
schema.init_database(bdb.connection)
validator = Validator()
schema.init_database(validator.models.connection)
@configure_planetmint
@ -271,23 +280,20 @@ def run_drop(args):
print("Drop was executed, but spaces doesn't exist.", file=sys.stderr)
def run_recover(b):
rollback(b)
@configure_planetmint
def run_start(args):
"""Start the processes to run the node"""
logger.info("Planetmint Version %s", planetmint.version.__version__)
# Configure Logging
setup_logging()
if not args.skip_initialize_database:
logger.info("Initializing database")
_run_init()
logger.info("Planetmint Version %s", planetmint.version.__version__)
run_recover(planetmint.lib.Planetmint())
validator = Validator()
validator.rollback()
logger.info("Starting Planetmint main process.")
from planetmint.start import start

View File

@ -29,7 +29,7 @@ from pkg_resources import iter_entry_points, ResolutionError
from transactions.common.exceptions import ConfigurationError
from planetmint.config import Config, DEFAULT_LOGGING_CONFIG
from planetmint.validation import BaseValidationRules
from planetmint.application.basevalidationrules import BaseValidationRules
from transactions.common import exceptions
# TODO: move this to a proper configuration file for logging

View File

@ -35,6 +35,3 @@ class Event:
self.type = event_type
self.data = event_data

View File

@ -4,6 +4,7 @@ import multiprocessing
from planetmint.ipc.events import EventTypes, POISON_PILL
class Exchange:
"""Dispatch events to subscribers."""
@ -68,4 +69,4 @@ class Exchange:
if event == POISON_PILL:
return
else:
self.dispatch(event)
self.dispatch(event)

View File

359
planetmint/model/models.py Normal file
View File

@ -0,0 +1,359 @@
import rapidjson
from itertools import chain
from transactions import Transaction
from transactions.common.exceptions import DoubleSpend
from transactions.common.crypto import public_key_from_ed25519_key
from transactions.common.exceptions import InputDoesNotExist
from planetmint import config_utils, backend
from planetmint.const import GOVERNANCE_TRANSACTION_TYPES
from planetmint.backend.connection import Connection
from planetmint.backend.tarantool.const import TARANT_TABLE_TRANSACTION, TARANT_TABLE_GOVERNANCE
from planetmint.model.fastquery import FastQuery
from planetmint.abci.tendermint_utils import key_from_base64
from planetmint.abci.tendermint_utils import merkleroot
from hashlib import sha3_256
from planetmint.backend.models.block import Block
from planetmint.backend.models.output import Output
from planetmint.backend.models.asset import Asset
from planetmint.backend.models.input import Input
from planetmint.backend.models.metadata import MetaData
from planetmint.backend.models.dbtransaction import DbTransaction
class Models:
def __init__(self, database_connection = None):
config_utils.autoconfigure()
self.connection = database_connection if database_connection is not None else Connection()
def store_bulk_transactions(self, transactions):
txns = []
gov_txns = []
for t in transactions:
transaction = t.tx_dict if t.tx_dict else rapidjson.loads(rapidjson.dumps(t.to_dict()))
if transaction["operation"] in GOVERNANCE_TRANSACTION_TYPES:
gov_txns.append(transaction)
else:
txns.append(transaction)
backend.query.store_transactions(self.connection, txns, TARANT_TABLE_TRANSACTION)
backend.query.store_transactions(self.connection, gov_txns, TARANT_TABLE_GOVERNANCE)
def delete_transactions(self, txs):
return backend.query.delete_transactions(self.connection, txs)
def is_committed(self, transaction_id):
transaction = backend.query.get_transaction_single(self.connection, transaction_id)
return bool(transaction)
def get_transaction(self, transaction_id):
return backend.query.get_transaction_single(self.connection, transaction_id)
def get_transactions(self, txn_ids):
return backend.query.get_transactions(self.connection, txn_ids)
def get_transactions_filtered(self, asset_ids, operation=None, last_tx=False):
"""Get a list of transactions filtered on some criteria"""
txids = backend.query.get_txids_filtered(self.connection, asset_ids, operation, last_tx)
for txid in txids:
yield self.get_transaction(txid)
def get_outputs_by_tx_id(self, txid):
return backend.query.get_outputs_by_tx_id(self.connection, txid)
def get_outputs_filtered(self, owner, spent=None):
"""Get a list of output links filtered on some criteria
Args:
owner (str): base58 encoded public_key.
spent (bool): If ``True`` return only the spent outputs. If
``False`` return only unspent outputs. If spent is
not specified (``None``) return all outputs.
Returns:
:obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s
pointing to another transaction's condition
"""
outputs = self.fastquery.get_outputs_by_public_key(owner)
if spent is None:
return outputs
elif spent is True:
return self.fastquery.filter_unspent_outputs(outputs)
elif spent is False:
return self.fastquery.filter_spent_outputs(outputs)
def store_block(self, block):
"""Create a new block."""
return backend.query.store_block(self.connection, block)
def get_latest_block(self) -> dict:
"""Get the block with largest height."""
return backend.query.get_latest_block(self.connection)
def get_block(self, block_id) -> dict:
"""Get the block with the specified `block_id`.
Returns the block corresponding to `block_id` or None if no match is
found.
Args:
block_id (int): block id of the block to get.
"""
block = backend.query.get_block(self.connection, block_id)
latest_block = self.get_latest_block()
latest_block_height = latest_block["height"] if latest_block else 0
if not block and block_id > latest_block_height:
return
return block
def delete_abci_chain(self, height):
return backend.query.delete_abci_chain(self.connection, height)
def get_latest_abci_chain(self):
return backend.query.get_latest_abci_chain(self.connection)
def store_election(self, election_id, height, is_concluded):
return backend.query.store_election(self.connection, election_id, height, is_concluded)
def store_elections(self, elections):
return backend.query.store_elections(self.connection, elections)
def delete_elections(self, height):
return backend.query.delete_elections(self.connection, height)
# NOTE: moved here from Election needs to be placed somewhere else
def get_validators_dict(self, height=None):
"""Return a dictionary of validators with key as `public_key` and
value as the `voting_power`
"""
validators = {}
for validator in self.get_validators(height):
# NOTE: we assume that Tendermint encodes public key in base64
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
validators[public_key] = validator["voting_power"]
return validators
def tests_update_utxoset(self, transaction):
self.updated__ = """Update the UTXO set given ``transaction``. That is, remove
the outputs that the given ``transaction`` spends, and add the
outputs that the given ``transaction`` creates.
Args:
transaction (:obj:`~planetmint.models.Transaction`): A new
transaction incoming into the system for which the UTXOF
set needs to be updated.
"""
spent_outputs = [spent_output for spent_output in transaction.spent_outputs]
if spent_outputs:
self.tests_delete_unspent_outputs(*spent_outputs)
self.tests_store_unspent_outputs(*[utxo._asdict() for utxo in transaction.unspent_outputs])
def tests_store_unspent_outputs(self, *unspent_outputs):
"""Store the given ``unspent_outputs`` (utxos).
Args:
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
length tuple or list of unspent outputs.
"""
if unspent_outputs:
return backend.query.store_unspent_outputs(self.connection, *unspent_outputs)
def tests_get_utxoset_merkle_root(self):
"""Returns the merkle root of the utxoset. This implies that
the utxoset is first put into a merkle tree.
For now, the merkle tree and its root will be computed each
time. This obviously is not efficient and a better approach
that limits the repetition of the same computation when
unnecesary should be sought. For instance, future optimizations
could simply re-compute the branches of the tree that were
affected by a change.
The transaction hash (id) and output index should be sufficient
to uniquely identify a utxo, and consequently only that
information from a utxo record is needed to compute the merkle
root. Hence, each node of the merkle tree should contain the
tuple (txid, output_index).
.. important:: The leaves of the tree will need to be sorted in
some kind of lexicographical order.
Returns:
str: Merkle root in hexadecimal form.
"""
utxoset = backend.query.get_unspent_outputs(self.connection)
# TODO Once ready, use the already pre-computed utxo_hash field.
# See common/transactions.py for details.
hashes = [
sha3_256("{}{}".format(utxo["transaction_id"], utxo["output_index"]).encode()).digest() for utxo in utxoset
]
# TODO Notice the sorted call!
return merkleroot(sorted(hashes))
def tests_delete_unspent_outputs(self, *unspent_outputs):
"""Deletes the given ``unspent_outputs`` (utxos).
Args:
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
length tuple or list of unspent outputs.
"""
if unspent_outputs:
return backend.query.delete_unspent_outputs(self.connection, *unspent_outputs)
def get_spent(self, txid, output, current_transactions=[]) -> DbTransaction:
transactions = backend.query.get_spent(self.connection, txid, output)
current_spent_transactions = []
for ctxn in current_transactions:
for ctxn_input in ctxn.inputs:
if ctxn_input.fulfills and ctxn_input.fulfills.txid == txid and ctxn_input.fulfills.output == output:
current_spent_transactions.append(ctxn)
transaction = None
if len(transactions) + len(current_spent_transactions) > 1:
raise DoubleSpend('tx "{}" spends inputs twice'.format(txid))
elif transactions:
tx_id = transactions[0].id
tx = backend.query.get_transaction_single(self.connection, tx_id)
transaction = tx.to_dict()
elif current_spent_transactions:
transaction = current_spent_transactions[0]
return transaction
def get_block_containing_tx(self, txid) -> Block:
"""
Retrieve the list of blocks (block ids) containing a
transaction with transaction id `txid`
Args:
txid (str): transaction id of the transaction to query
Returns:
Block id list (list(int))
"""
block = backend.query.get_block_with_transaction(self.connection, txid)
return block
def get_input_txs_and_conditions(self, inputs, current_transactions=[]):
# store the inputs so that we can check if the asset ids match
input_txs = []
input_conditions = []
for input_ in inputs:
input_txid = input_.fulfills.txid
input_tx = self.get_transaction(input_txid)
_output = self.get_outputs_by_tx_id(input_txid)
if input_tx is None:
for ctxn in current_transactions:
if ctxn.id == input_txid:
ctxn_dict = ctxn.to_dict()
input_tx = DbTransaction.from_dict(ctxn_dict)
_output = [
Output.from_dict(output, index, ctxn.id)
for index, output in enumerate(ctxn_dict["outputs"])
]
if input_tx is None:
raise InputDoesNotExist("input `{}` doesn't exist".format(input_txid))
spent = self.get_spent(input_txid, input_.fulfills.output, current_transactions)
if spent:
raise DoubleSpend("input `{}` was already spent".format(input_txid))
output = _output[input_.fulfills.output]
input_conditions.append(output)
tx_dict = input_tx.to_dict()
tx_dict["outputs"] = Output.list_to_dict(_output)
tx_dict = DbTransaction.remove_generated_fields(tx_dict)
pm_transaction = Transaction.from_dict(tx_dict, False)
input_txs.append(pm_transaction)
return input_txs, input_conditions
def get_assets(self, asset_ids) -> list[Asset]:
"""Return a list of assets that match the asset_ids
Args:
asset_ids (:obj:`list` of :obj:`str`): A list of asset_ids to
retrieve from the database.
Returns:
list: The list of assets returned from the database.
"""
return backend.query.get_assets(self.connection, asset_ids)
def get_assets_by_cid(self, asset_cid, **kwargs) -> list[dict]:
asset_txs = backend.query.get_transactions_by_asset(self.connection, asset_cid, **kwargs)
# flatten and return all found assets
return list(chain.from_iterable([Asset.list_to_dict(tx.assets) for tx in asset_txs]))
def get_metadata(self, txn_ids) -> list[MetaData]:
"""Return a list of metadata that match the transaction ids (txn_ids)
Args:
txn_ids (:obj:`list` of :obj:`str`): A list of txn_ids to
retrieve from the database.
Returns:
list: The list of metadata returned from the database.
"""
return backend.query.get_metadata(self.connection, txn_ids)
def get_metadata_by_cid(self, metadata_cid, **kwargs) -> list[str]:
metadata_txs = backend.query.get_transactions_by_metadata(self.connection, metadata_cid, **kwargs)
return [tx.metadata.metadata for tx in metadata_txs]
def get_validator_set(self, height=None):
return backend.query.get_validator_set(self.connection, height)
def get_validators(self, height=None):
result = self.get_validator_set(height)
return [] if result is None else result["validators"]
def get_election(self, election_id):
return backend.query.get_election(self.connection, election_id)
def get_pre_commit_state(self):
return backend.query.get_pre_commit_state(self.connection)
def store_pre_commit_state(self, state):
return backend.query.store_pre_commit_state(self.connection, state)
def store_validator_set(self, height, validators):
"""
Store validator set at a given `height`.
NOTE: If the validator set already exists at that `height` then an
exception will be raised.
"""
return backend.query.store_validator_set(self.connection, {"height": height, "validators": validators})
def delete_validator_set(self, height):
return backend.query.delete_validator_set(self.connection, height)
def store_abci_chain(self, height, chain_id, is_synced=True):
return backend.query.store_abci_chain(self.connection, height, chain_id, is_synced)
def get_asset_tokens_for_public_key(self, transaction_id, election_pk ):
txns = backend.query.get_asset_tokens_for_public_key(self.connection, transaction_id, election_pk)
return txns
@property
def fastquery(self):
return FastQuery(self.connection)

View File

@ -0,0 +1,2 @@
class

View File

@ -7,7 +7,7 @@ import logging
import setproctitle
from planetmint.config import Config
from planetmint.lib import Planetmint
from planetmint.application.validation import Validator
from planetmint.abci.core import App
from planetmint.abci.parallel_validation import ParallelValidationApp
from planetmint.web import server, websocket_server
@ -41,7 +41,7 @@ def start(args):
exchange = Exchange()
# start the web api
app_server = server.create_server(
settings=Config().get()["server"], log_config=Config().get()["log"], planetmint_factory=Planetmint
settings=Config().get()["server"], log_config=Config().get()["log"], planetmint_factory=Validator
)
p_webapi = Process(name="planetmint_webapi", target=app_server.run, daemon=True)
p_webapi.start()

View File

@ -145,4 +145,3 @@ class Lazy:
self.stack = []
return last

View File

@ -15,7 +15,7 @@ import gunicorn.app.base
from flask import Flask
from flask_cors import CORS
from planetmint import utils
from planetmint import Planetmint
from planetmint.application.validation import Validator
from planetmint.web.routes import add_routes
from planetmint.web.strip_content_type_middleware import StripContentTypeMiddleware
@ -72,7 +72,7 @@ def create_app(*, debug=False, threads=1, planetmint_factory=None):
"""
if not planetmint_factory:
planetmint_factory = Planetmint
planetmint_factory = Validator
app = Flask(__name__)
app.wsgi_app = StripContentTypeMiddleware(app.wsgi_app)

View File

@ -28,8 +28,8 @@ class AssetListApi(Resource):
pool = current_app.config["bigchain_pool"]
with pool() as planet:
assets = planet.get_assets_by_cid(cid, **args)
with pool() as validator:
assets = validator.models.get_assets_by_cid(cid, **args)
try:
# This only works with MongoDB as the backend

View File

@ -22,8 +22,8 @@ class LatestBlock(Resource):
pool = current_app.config["bigchain_pool"]
with pool() as planet:
block = planet.get_latest_block()
with pool() as validator:
block = validator.models.get_latest_block()
if not block:
return make_error(404)
@ -44,8 +44,8 @@ class BlockApi(Resource):
pool = current_app.config["bigchain_pool"]
with pool() as planet:
block = planet.get_block(block_id=block_id)
with pool() as validator:
block = validator.models.get_block(block_id=block_id)
if not block:
return make_error(404)
@ -70,8 +70,8 @@ class BlockListApi(Resource):
pool = current_app.config["bigchain_pool"]
with pool() as planet:
block = planet.get_block_containing_tx(tx_id)
with pool() as validator:
block = validator.models.get_block_containing_tx(tx_id)
if not block:
return make_error(404, "Block containing transaction with id: {} not found.".format(tx_id))

View File

@ -36,8 +36,8 @@ class MetadataApi(Resource):
pool = current_app.config["bigchain_pool"]
with pool() as planet:
metadata = planet.get_metadata_by_cid(cid, **args)
with pool() as validator:
metadata = validator.models.get_metadata_by_cid(cid, **args)
try:
return metadata

View File

@ -23,9 +23,9 @@ class OutputListApi(Resource):
args = parser.parse_args(strict=True)
pool = current_app.config["bigchain_pool"]
with pool() as planet:
with pool() as validator:
try:
outputs = planet.get_outputs_filtered(args["public_key"], args["spent"])
outputs = validator.models.get_outputs_filtered(args["public_key"], args["spent"])
except Exception as e:
return make_error(
500,

View File

@ -38,8 +38,8 @@ class TransactionApi(Resource):
"""
pool = current_app.config["bigchain_pool"]
with pool() as planet:
tx = planet.get_transaction(tx_id)
with pool() as validator:
tx = validator.models.get_transaction(tx_id)
if not tx:
return make_error(404)
@ -54,8 +54,8 @@ class TransactionListApi(Resource):
parser.add_argument("asset_ids", type=parameters.valid_txid_list, required=True)
parser.add_argument("last_tx", type=parameters.valid_bool, required=False)
args = parser.parse_args()
with current_app.config["bigchain_pool"]() as planet:
txs = planet.get_transactions_filtered(**args)
with current_app.config["bigchain_pool"]() as validator:
txs = validator.models.get_transactions_filtered(**args)
return [tx.to_dict() for tx in txs]
@ -106,8 +106,9 @@ class TransactionListApi(Resource):
but this node only accepts transaction with higher \
schema version number.",
)
status_code, message = ABCI_RPC().write_transaction(MODE_LIST, planet.tendermint_rpc_endpoint, MODE_COMMIT, tx_obj,
mode)
status_code, message = ABCI_RPC().write_transaction(
MODE_LIST, ABCI_RPC().tendermint_rpc_endpoint, MODE_COMMIT, tx_obj, mode
)
if status_code == 202:
response = jsonify(tx)

View File

@ -17,7 +17,7 @@ class ValidatorsApi(Resource):
pool = current_app.config["bigchain_pool"]
with pool() as planet:
validators = planet.get_validators()
with pool() as validator:
validators = validator.models.get_validators()
return validators

View File

@ -10,12 +10,11 @@ from transactions.types.assets.transfer import Transfer
from transactions.types.assets.compose import Compose
from transactions.types.assets.decompose import Decompose
def test_asset_transfer(b, signed_create_tx, user_pk, user_sk, _bdb):
tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], [signed_create_tx.id])
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([signed_create_tx])
b.models.store_bulk_transactions( [signed_create_tx])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert tx_transfer_signed.assets[0]["id"] == signed_create_tx.id
@ -33,7 +32,7 @@ def test_validate_transfer_asset_id_mismatch(b, signed_create_tx, user_pk, user_
tx_transfer.assets[0]["id"] = "a" * 64
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([signed_create_tx])
b.models.store_bulk_transactions( [signed_create_tx])
with pytest.raises(AssetIdMismatch):
b.validate_transaction(tx_transfer_signed)
@ -73,7 +72,7 @@ def test_asset_id_mismatch(alice, user_pk):
def test_compose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
validated = b.validate_transaction(signed_create_tx)
b.store_bulk_transactions([validated])
b.models.store_bulk_transactions( [validated])
inputs = signed_create_tx.to_inputs()
assets = [signed_create_tx.id, "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK8n"]
@ -84,7 +83,7 @@ def test_compose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx
def test_decompose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
validated = b.validate_transaction(signed_create_tx)
b.store_bulk_transactions([validated])
b.models.store_bulk_transactions( [validated])
inputs = signed_create_tx.to_inputs()
assets = [
@ -102,7 +101,7 @@ def test_decompose_valid_transactions(b, user_pk, user_sk, alice, signed_create_
def test_create_decompose_output(b, user_pk, user_sk, signed_create_tx, _bdb):
validated = b.validate_transaction(signed_create_tx)
b.store_bulk_transactions([validated])
b.models.store_bulk_transactions( [validated])
inputs = signed_create_tx.to_inputs()
assets = [
@ -116,7 +115,7 @@ def test_create_decompose_output(b, user_pk, user_sk, signed_create_tx, _bdb):
)
decompose_transaction.sign([user_sk])
validated_decompose = b.validate_transaction(decompose_transaction)
b.store_bulk_transactions([validated_decompose])
b.models.store_bulk_transactions( [validated_decompose])
create_inputs = decompose_transaction.to_inputs([0])
create_tx = Create.generate([user_pk], recipients=[([user_pk], 1)], assets=[assets[0]], inputs=create_inputs)
@ -141,6 +140,6 @@ def test_v_2_0_validation_create_invalid(b, signed_2_0_create_tx_assets, _bdb):
def test_v_2_0_validation_transfer(b, signed_2_0_create_tx, signed_2_0_transfer_tx, _bdb):
validated = b.validate_transaction(signed_2_0_create_tx)
b.store_bulk_transactions([validated])
b.models.store_bulk_transactions( [validated])
assert validated.to_dict() == signed_2_0_create_tx
assert b.validate_transaction(signed_2_0_transfer_tx).to_dict() == signed_2_0_transfer_tx

View File

@ -137,7 +137,7 @@ def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk,
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 1
@ -163,7 +163,7 @@ def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk
)
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 2
@ -190,7 +190,7 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk
)
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 1
@ -201,7 +201,7 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk
assert len(condition["condition"]["details"]["subconditions"]) == 2
assert len(tx_transfer_signed.inputs) == 1
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -227,7 +227,7 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
)
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 2
@ -240,7 +240,7 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
assert len(tx_transfer_signed.inputs) == 1
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -265,7 +265,7 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 1
@ -276,7 +276,7 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk
assert "subconditions" in ffill
assert len(ffill["subconditions"]) == 2
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -299,14 +299,14 @@ def test_multiple_in_single_own_single_out_single_own_transfer(alice, b, user_pk
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 1
assert tx_transfer_signed.outputs[0].amount == 100
assert len(tx_transfer_signed.inputs) == 2
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -331,7 +331,7 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 1
@ -345,7 +345,7 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_
assert len(ffill_fid0["subconditions"]) == 2
assert len(ffill_fid1["subconditions"]) == 2
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -371,7 +371,7 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 1
assert tx_transfer_signed.outputs[0].amount == 100
@ -383,7 +383,7 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk
assert "subconditions" in ffill_fid1
assert len(ffill_fid1["subconditions"]) == 2
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -410,7 +410,7 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
tx_create.to_inputs(), [([alice.public_key], 50), ([alice.public_key, user_pk], 50)], asset_ids=[tx_create.id]
)
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 2
@ -430,7 +430,7 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
assert "subconditions" in ffill_fid1
assert len(ffill_fid1["subconditions"]) == 2
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -466,7 +466,7 @@ def test_multiple_in_different_transactions(alice, b, user_pk, user_sk):
)
tx_transfer2_signed = tx_transfer2.sign([user_sk])
b.store_bulk_transactions([tx_create_signed, tx_transfer1_signed])
b.models.store_bulk_transactions( [tx_create_signed, tx_transfer1_signed])
assert b.validate_transaction(tx_transfer2_signed) == tx_transfer2_signed
assert len(tx_transfer2_signed.outputs) == 1
@ -491,7 +491,7 @@ def test_amount_error_transfer(alice, b, user_pk, user_sk):
)
tx_create_signed = tx_create.sign([alice.private_key])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
# TRANSFER
# output amount less than input amount
@ -529,12 +529,12 @@ def test_threshold_same_public_key(alice, b, user_pk, user_sk):
# TRANSFER
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
tx_transfer_signed = tx_transfer.sign([user_sk, user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
# assert tx_transfer_signed.validate(b) == tx_transfer_signed
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
# tx_transfer_signed.validate(b)
b.validate_transaction(tx_transfer_signed)
@ -554,13 +554,13 @@ def test_sum_amount(alice, b, user_pk, user_sk):
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 3)], asset_ids=[tx_create.id])
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 1
assert tx_transfer_signed.outputs[0].amount == 3
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)
@ -581,13 +581,13 @@ def test_divide(alice, b, user_pk, user_sk):
)
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
assert len(tx_transfer_signed.outputs) == 3
for output in tx_transfer_signed.outputs:
assert output.amount == 1
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
with pytest.raises(DoubleSpend):
b.validate_transaction(tx_transfer_signed)

View File

@ -51,7 +51,7 @@ SCRIPT_INPUT = {
metadata = {"units": 300, "type": "KG"}
def test_zenroom_signing():
def test_zenroom_signing(b):
biolabs = generate_key_pair()
version = "3.0"
@ -149,7 +149,6 @@ def test_zenroom_signing():
tx["id"] = shared_creation_txid
from transactions.common.transaction import Transaction
from planetmint.lib import Planetmint
from transactions.common.exceptions import (
SchemaValidationError,
ValidationError,
@ -164,9 +163,9 @@ def test_zenroom_signing():
except ValidationError as e:
print(e)
assert ()
planet = Planetmint()
try:
planet.validate_transaction(tx_obj)
b.validate_transaction(tx_obj)
except ValidationError as e:
print("Invalid transaction ({}): {}".format(type(e).__name__, e))
assert ()

View File

@ -11,12 +11,14 @@ from unittest.mock import Mock, patch
from argparse import Namespace
from planetmint.abci.rpc import ABCI_RPC
from planetmint.abci.block import Block
from planetmint.config import Config
from planetmint import ValidatorElection
from planetmint.commands.planetmint import run_election_show
from planetmint.commands.planetmint import run_election_new_chain_migration
from planetmint.commands.planetmint import run_election_approve
from planetmint.backend.connection import Connection
from planetmint.abci.block import Block
from transactions.types.elections.chain_migration_election import ChainMigrationElection
from tests.utils import generate_election, generate_validators
@ -247,21 +249,20 @@ def test_calling_main(start_mock, monkeypatch):
assert start_mock.called is True
@patch("planetmint.commands.planetmint.run_recover")
@patch("planetmint.application.validation.Validator.rollback")
@patch("planetmint.start.start")
def test_recover_db_on_start(mock_run_recover, mock_start, mocked_setup_logging):
def test_recover_db_on_start(mock_rollback, mock_start, mocked_setup_logging):
from planetmint.commands.planetmint import run_start
args = Namespace(config=None, yes=True, skip_initialize_database=False)
run_start(args)
assert mock_run_recover.called
assert mock_rollback.called
assert mock_start.called
@pytest.mark.bdb
def test_run_recover(b, alice, bob):
from planetmint.commands.planetmint import run_recover
def test_run_recover(b, alice, bob, test_models):
from transactions.types.assets.create import Create
from planetmint.abci.block import Block
from planetmint.backend import query
@ -280,23 +281,23 @@ def test_run_recover(b, alice, bob):
).sign([bob.private_key])
# store the transactions
b.store_bulk_transactions([tx1, tx2])
b.models.store_bulk_transactions( [tx1, tx2])
# create a random block
block8 = Block(app_hash="random_app_hash1", height=8, transactions=["txid_doesnt_matter"])._asdict()
b.store_block(block8)
b.models.store_block(block8)
# create the next block
block9 = Block(app_hash="random_app_hash1", height=9, transactions=[tx1.id])._asdict()
b.store_block(block9)
b.models.store_block(block9)
# create a pre_commit state which is ahead of the commit state
pre_commit_state = dict(height=10, transactions=[tx2.id])
b.store_pre_commit_state(pre_commit_state)
b.models.store_pre_commit_state(pre_commit_state)
run_recover(b)
b.rollback()
assert not query.get_transaction(b.connection, tx2.id)
assert not query.get_transaction(b.models.connection, tx2.id)
# Helper
@ -324,20 +325,21 @@ def test_election_new_upsert_validator_with_tendermint(b, priv_validator_path, u
election_id = run_election_new_upsert_validator(new_args, b)
assert b.get_transaction(election_id)
assert b.models.get_transaction(election_id)
@pytest.mark.bdb
@pytest.mark.skip(reason="mock_write overwrite doesn't work")
def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_validator_path, user_sk):
from planetmint.commands.planetmint import run_election_new_upsert_validator
#from planetmint.abci.rpc import write_transaction
# from planetmint.abci.rpc import write_transaction
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
b.store_bulk_transactions([transaction])
b.models.store_bulk_transactions( [transaction])
return (202, "")
b.get_validators = mock_get_validators
b.models.get_validators = mock_get_validators
ABCI_RPC().write_transaction = mock_write
args = Namespace(
@ -353,7 +355,7 @@ def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_valida
with caplog.at_level(logging.INFO):
election_id = run_election_new_upsert_validator(args, b)
assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id
assert b.get_transaction(election_id)
assert b.models.get_transaction(election_id)
@pytest.mark.abci
@ -362,25 +364,25 @@ def test_election_new_chain_migration_with_tendermint(b, priv_validator_path, us
election_id = run_election_new_chain_migration(new_args, b)
assert b.get_transaction(election_id)
assert b.models.get_transaction(election_id)
@pytest.mark.bdb
@pytest.mark.skip(reason="mock_write overwrite doesn't work")
def test_election_new_chain_migration_without_tendermint(caplog, b, priv_validator_path, user_sk):
def mock_write(tx, mode):
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
return (202, "")
b.get_validators = mock_get_validators
b.write_transaction = mock_write
b.models.get_validators = mock_get_validators
ABCI_RPC().write_transaction = mock_write
args = Namespace(action="new", election_type="migration", sk=priv_validator_path, config={})
with caplog.at_level(logging.INFO):
election_id = run_election_new_chain_migration(args, b)
assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id
assert b.get_transaction(election_id)
assert b.models.get_transaction(election_id)
@pytest.mark.bdb
@ -408,11 +410,11 @@ def test_election_new_upsert_validator_invalid_power(caplog, b, priv_validator_p
from transactions.common.exceptions import InvalidPowerChange
def mock_write(tx, mode):
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions([tx])
return (400, "")
b.write_transaction = mock_write
b.get_validators = mock_get_validators
ABCI_RPC().write_transaction = mock_write
b.models.get_validators = mock_get_validators
args = Namespace(
action="new",
election_type="upsert-validator",
@ -425,12 +427,12 @@ def test_election_new_upsert_validator_invalid_power(caplog, b, priv_validator_p
with caplog.at_level(logging.ERROR):
assert not run_election_new_upsert_validator(args, b)
assert caplog.records[0].msg.__class__ == InvalidPowerChange
assert caplog.records[1].msg.__class__ == InvalidPowerChange
@pytest.mark.abci
def test_election_approve_with_tendermint(b, priv_validator_path, user_sk, validators):
from planetmint.commands.planetmint import run_election_new_upsert_validator, run_election_approve
from planetmint.commands.planetmint import run_election_new_upsert_validator
public_key = "CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg="
new_args = Namespace(
@ -449,7 +451,7 @@ def test_election_approve_with_tendermint(b, priv_validator_path, user_sk, valid
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
approve = run_election_approve(args, b)
assert b.get_transaction(approve)
assert b.models.get_transaction(approve)
@pytest.mark.bdb
@ -467,7 +469,7 @@ def test_election_approve_without_tendermint(caplog, b, priv_validator_path, new
with caplog.at_level(logging.INFO):
approval_id = run_election_approve(args, b)
assert caplog.records[0].msg == "[SUCCESS] Your vote has been submitted"
assert b.get_transaction(approval_id)
assert b.models.get_transaction(approval_id)
@pytest.mark.bdb
@ -479,10 +481,10 @@ def test_election_approve_failure(caplog, b, priv_validator_path, new_validator,
b, election_id = call_election(b, new_validator, node_key)
def mock_write(tx, mode):
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions([tx])
return (400, "")
b.write_transaction = mock_write
ABCI_RPC().write_transaction = mock_write
# call run_upsert_validator_approve with args that point to the election
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
@ -505,7 +507,7 @@ def test_election_approve_called_with_bad_key(caplog, b, bad_validator_path, new
with caplog.at_level(logging.ERROR):
assert not run_election_approve(args, b)
assert (
caplog.records[0].msg == "The key you provided does not match any of "
caplog.records[1].msg == "The key you provided does not match any of "
"the eligible voters in this election."
)
@ -513,7 +515,7 @@ def test_election_approve_called_with_bad_key(caplog, b, bad_validator_path, new
@pytest.mark.bdb
def test_chain_migration_election_show_shows_inconclusive(b):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
public_key = validators[0]["public_key"]
private_key = validators[0]["private_key"]
@ -524,16 +526,16 @@ def test_chain_migration_election_show_shows_inconclusive(b):
assert not run_election_show(Namespace(election_id=election.id), b)
b.process_block(1, [election])
b.store_bulk_transactions([election])
b.models.store_bulk_transactions( [election])
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
b.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
b.store_validator_set(2, [v["storage"] for v in validators])
b.models.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
b.models.store_validator_set(2, [v["storage"] for v in validators])
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
b.store_block(Block(height=2, transactions=[], app_hash="")._asdict())
b.models.store_block(Block(height=2, transactions=[], app_hash="")._asdict())
# TODO insert yet another block here when upgrading to Tendermint 0.22.4.
assert run_election_show(Namespace(election_id=election.id), b) == "status=inconclusive"
@ -542,7 +544,7 @@ def test_chain_migration_election_show_shows_inconclusive(b):
@pytest.mark.bdb
def test_chain_migration_election_show_shows_concluded(b):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
public_key = validators[0]["public_key"]
private_key = validators[0]["private_key"]
@ -552,13 +554,13 @@ def test_chain_migration_election_show_shows_concluded(b):
assert not run_election_show(Namespace(election_id=election.id), b)
b.store_bulk_transactions([election])
b.models.store_bulk_transactions( [election])
b.process_block(1, [election])
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
b.store_abci_chain(1, "chain-X")
b.store_block(Block(height=1, transactions=[v.id for v in votes], app_hash="last_app_hash")._asdict())
b.models.store_abci_chain(1, "chain-X")
b.models.store_block(Block(height=1, transactions=[v.id for v in votes], app_hash="last_app_hash")._asdict())
b.process_block(2, votes)
assert (
@ -602,12 +604,12 @@ def mock_get_validators(height):
def call_election(b, new_validator, node_key):
def mock_write(tx, mode):
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
return (202, "")
# patch the validator set. We now have one validator with power 10
b.get_validators = mock_get_validators
b.write_transaction = mock_write
b.models.get_validators = mock_get_validators
ABCI_RPC().write_transaction = mock_write
# our voters is a list of length 1, populated from our mocked validator
voters = b.get_recipients_list()
@ -617,6 +619,6 @@ def call_election(b, new_validator, node_key):
# patch in an election with a vote issued to the user
election_id = valid_election.id
b.store_bulk_transactions([valid_election])
b.models.store_bulk_transactions( [valid_election])
return b, election_id

View File

@ -31,6 +31,7 @@ from planetmint.backend import schema, query
from transactions.common.crypto import key_pair_from_ed25519_key, public_key_from_ed25519_key
from planetmint.abci.block import Block
from planetmint.abci.rpc import MODE_LIST
from planetmint.model.models import Models
from tests.utils import gen_vote
from planetmint.config import Config
from transactions.types.elections.validator_election import ValidatorElection # noqa
@ -249,10 +250,25 @@ def abci_fixture():
@pytest.fixture
def b():
from planetmint import Planetmint
def test_models():
from planetmint.model.models import Models
return Planetmint()
return Models()
@pytest.fixture
def test_validator():
from planetmint.application.validation import Validator
return Validator()
@pytest.fixture
def test_abci_rpc():
from planetmint.abci.rpc import ABCI_RPC
return ABCI_RPC()
@pytest.fixture
def b():
from planetmint.application.validation import Validator
return Validator()
@pytest.fixture
@ -264,7 +280,7 @@ def eventqueue_fixture():
@pytest.fixture
def b_mock(b, network_validators):
b.get_validators = mock_get_validators(network_validators)
b.models.get_validators = mock_get_validators(network_validators)
return b
@ -293,8 +309,8 @@ def signed_create_tx(alice, create_tx):
@pytest.fixture
def posted_create_tx(b, signed_create_tx):
res = ABCI_RPC().post_transaction(MODE_LIST, b.tendermint_rpc_endpoint, signed_create_tx, BROADCAST_TX_COMMIT)
def posted_create_tx(b, signed_create_tx, test_abci_rpc):
res = test_abci_rpc.post_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, signed_create_tx, BROADCAST_TX_COMMIT)
assert res.status_code == 200
return signed_create_tx
@ -318,7 +334,7 @@ def double_spend_tx(signed_create_tx, carol_pubkey, user_sk):
def _get_height(b):
maybe_block = b.get_latest_block()
maybe_block = b.models.get_latest_block()
return 0 if maybe_block is None else maybe_block["height"]
@ -336,8 +352,8 @@ def inputs(user_pk, b, alice):
]
tx_ids = [tx.id for tx in transactions]
block = Block(app_hash="hash" + str(height), height=height, transactions=tx_ids)
b.store_block(block._asdict())
b.store_bulk_transactions(transactions)
b.models.store_block(block._asdict())
b.models.store_bulk_transactions( transactions)
@pytest.fixture
@ -633,7 +649,7 @@ def validators(b, node_keys):
height = get_block_height(b)
original_validators = b.get_validators()
original_validators = b.models.get_validators()
(public_key, private_key) = list(node_keys.items())[0]
@ -647,7 +663,7 @@ def validators(b, node_keys):
validator_update = {"validators": validator_set, "height": height + 1, "election_id": f"setup_at_{timestamp()}"}
query.store_validator_set(b.connection, validator_update)
query.store_validator_set(b.models.connection, validator_update)
yield
@ -659,12 +675,12 @@ def validators(b, node_keys):
"election_id": f"teardown_at_{timestamp()}",
}
query.store_validator_set(b.connection, validator_update)
query.store_validator_set(b.models.connection, validator_update)
def get_block_height(b):
if b.get_latest_block():
height = b.get_latest_block()["height"]
if b.models.get_latest_block():
height = b.models.get_latest_block()["height"]
else:
height = 0
@ -696,25 +712,25 @@ def valid_upsert_validator_election_2(b_mock, node_key, new_validator):
@pytest.fixture
def ongoing_validator_election(b, valid_upsert_validator_election, ed25519_node_keys):
validators = b.get_validators(height=1)
validators = b.models.get_validators(height=1)
genesis_validators = {"validators": validators, "height": 0}
query.store_validator_set(b.connection, genesis_validators)
b.store_bulk_transactions([valid_upsert_validator_election])
query.store_election(b.connection, valid_upsert_validator_election.id, 1, is_concluded=False)
query.store_validator_set(b.models.connection, genesis_validators)
b.models.store_bulk_transactions( [valid_upsert_validator_election])
query.store_election(b.models.connection, valid_upsert_validator_election.id, 1, is_concluded=False)
block_1 = Block(app_hash="hash_1", height=1, transactions=[valid_upsert_validator_election.id])
b.store_block(block_1._asdict())
b.models.store_block(block_1._asdict())
return valid_upsert_validator_election
@pytest.fixture
def ongoing_validator_election_2(b, valid_upsert_validator_election_2, ed25519_node_keys):
validators = b.get_validators(height=1)
validators = b.models.get_validators(height=1)
genesis_validators = {"validators": validators, "height": 0, "election_id": None}
query.store_validator_set(b.connection, genesis_validators)
query.store_validator_set(b.models.connection, genesis_validators)
b.store_bulk_transactions([valid_upsert_validator_election_2])
b.models.store_bulk_transactions( [valid_upsert_validator_election_2])
block_1 = Block(app_hash="hash_2", height=1, transactions=[valid_upsert_validator_election_2.id])
b.store_block(block_1._asdict())
b.models.store_block(block_1._asdict())
return valid_upsert_validator_election_2

View File

@ -5,19 +5,16 @@
import random
import warnings
from unittest.mock import patch
import pytest
from base58 import b58decode
from ipld import marshal, multihash
from transactions.common import crypto
from transactions.common.output import Output as TransactionOutput
from transactions.common.transaction import TransactionLink
from transactions.common.transaction import Transaction
from transactions.types.assets.create import Create
from transactions.types.assets.transfer import Transfer
from planetmint.backend.models import Output
from planetmint.model.fastquery import FastQuery
from planetmint.exceptions import CriticalDoubleSpend
pytestmark = pytest.mark.bdb
@ -32,7 +29,7 @@ class TestBigchainApi(object):
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
transfer_tx = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], asset_ids=[tx.id])
transfer_tx = transfer_tx.sign([alice.private_key])
@ -42,13 +39,13 @@ class TestBigchainApi(object):
with pytest.raises(DoubleSpend):
b.validate_transaction(transfer_tx2, [transfer_tx])
b.store_bulk_transactions([transfer_tx])
b.models.store_bulk_transactions( [transfer_tx])
with pytest.raises(DoubleSpend):
b.validate_transaction(transfer_tx2)
with pytest.raises(CriticalDoubleSpend):
b.store_bulk_transactions([transfer_tx2])
b.models.store_bulk_transactions( [transfer_tx2])
def test_double_inclusion(self, b, alice):
from tarantool.error import DatabaseError
@ -59,13 +56,13 @@ class TestBigchainApi(object):
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
if isinstance(b.connection, TarantoolDBConnection):
b.models.store_bulk_transactions( [tx])
if isinstance(b.models.connection, TarantoolDBConnection):
with pytest.raises(CriticalDoubleSpend):
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
else:
with pytest.raises(OperationError):
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
@pytest.mark.usefixtures("inputs")
def test_non_create_input_not_found(self, b, user_pk):
@ -85,9 +82,9 @@ class TestBigchainApi(object):
asset1 = {"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=[asset1]).sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
tx_from_db = b.get_transaction(tx.id)
tx_from_db = b.models.get_transaction(tx.id)
before = tx.to_dict()
after = tx_from_db.to_dict()
@ -112,8 +109,8 @@ class TestTransactionValidation(object):
from transactions.common.crypto import generate_key_pair
from transactions.common.exceptions import InvalidSignature
input_tx = b.fastquery.get_outputs_by_public_key(user_pk).pop()
input_transaction = b.get_transaction(input_tx.txid)
input_tx = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
input_transaction = b.models.get_transaction(input_tx.txid)
sk, pk = generate_key_pair()
tx = Create.generate([pk], [([user_pk], 1)])
tx.operation = "TRANSFER"
@ -127,7 +124,7 @@ class TestTransactionValidation(object):
def test_non_create_double_spend(self, b, signed_create_tx, signed_transfer_tx, double_spend_tx):
from transactions.common.exceptions import DoubleSpend
b.store_bulk_transactions([signed_create_tx, signed_transfer_tx])
b.models.store_bulk_transactions( [signed_create_tx, signed_transfer_tx])
with pytest.raises(DoubleSpend):
b.validate_transaction(double_spend_tx)
@ -136,8 +133,8 @@ class TestTransactionValidation(object):
class TestMultipleInputs(object):
def test_transfer_single_owner_single_input(self, b, inputs, user_pk, user_sk):
user2_sk, user2_pk = crypto.generate_key_pair()
tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop()
input_tx = b.get_transaction(tx_link.txid)
tx_link = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
input_tx = b.models.get_transaction(tx_link.txid)
tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
tx = Transfer.generate(tx_converted.to_inputs(), [([user2_pk], 1)], asset_ids=[input_tx.id])
@ -151,9 +148,9 @@ class TestMultipleInputs(object):
def test_single_owner_before_multiple_owners_after_single_input(self, b, user_sk, user_pk, inputs):
user2_sk, user2_pk = crypto.generate_key_pair()
user3_sk, user3_pk = crypto.generate_key_pair()
tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop()
tx_link = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
input_tx = b.get_transaction(tx_link.txid)
input_tx = b.models.get_transaction(tx_link.txid)
tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
tx = Transfer.generate(tx_converted.to_inputs(), [([user2_pk, user3_pk], 1)], asset_ids=[input_tx.id])
@ -170,10 +167,10 @@ class TestMultipleInputs(object):
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
owned_input = b.fastquery.get_outputs_by_public_key(user_pk).pop()
input_tx = b.get_transaction(owned_input.txid)
owned_input = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
input_tx = b.models.get_transaction(owned_input.txid)
input_tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
transfer_tx = Transfer.generate(input_tx_converted.to_inputs(), [([user3_pk], 1)], asset_ids=[input_tx.id])
@ -192,11 +189,11 @@ class TestMultipleInputs(object):
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
# get input
tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop()
tx_input = b.get_transaction(tx_link.txid)
tx_link = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
tx_input = b.models.get_transaction(tx_link.txid)
input_tx_converted = Transaction.from_dict(tx_input.to_dict(), True)
tx = Transfer.generate(input_tx_converted.to_inputs(), [([user3_pk, user4_pk], 1)], asset_ids=[tx_input.id])
@ -211,19 +208,19 @@ class TestMultipleInputs(object):
tx = Create.generate([alice.public_key], [([user_pk], 1)])
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
assert owned_inputs_user1 == [TransactionLink(tx.id, 0)]
assert owned_inputs_user2 == []
tx_transfer = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
tx_transfer = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_transfer])
b.models.store_bulk_transactions( [tx_transfer])
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
assert owned_inputs_user1 == [TransactionLink(tx.id, 0)]
assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0)]
@ -234,11 +231,11 @@ class TestMultipleInputs(object):
# create divisible asset
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1)])
tx_create_signed = tx_create.sign([alice.private_key])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
# get input
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
expected_owned_inputs_user1 = [TransactionLink(tx_create.id, 0), TransactionLink(tx_create.id, 1)]
assert owned_inputs_user1 == expected_owned_inputs_user1
@ -249,10 +246,10 @@ class TestMultipleInputs(object):
tx_create.to_inputs(), [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
)
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
assert owned_inputs_user1 == expected_owned_inputs_user1
assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0), TransactionLink(tx_transfer.id, 1)]
@ -263,10 +260,10 @@ class TestMultipleInputs(object):
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user_pk)
expected_owned_inputs_user1 = [TransactionLink(tx.id, 0)]
assert owned_inputs_user1 == owned_inputs_user2
@ -274,11 +271,11 @@ class TestMultipleInputs(object):
tx = Transfer.generate(tx.to_inputs(), [([user3_pk], 1)], asset_ids=[tx.id])
tx = tx.sign([user_sk, user2_sk])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
spent_user1 = b.get_spent(tx.id, 0)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
spent_user1 = b.models.get_spent(tx.id, 0)
assert owned_inputs_user1 == owned_inputs_user2
assert not spent_user1
@ -288,21 +285,21 @@ class TestMultipleInputs(object):
tx = Create.generate([alice.public_key], [([user_pk], 1)])
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk).pop()
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
# check spents
input_txid = owned_inputs_user1.txid
spent_inputs_user1 = b.get_spent(input_txid, 0)
spent_inputs_user1 = b.models.get_spent(input_txid, 0)
assert spent_inputs_user1 is None
# create a transaction and send it
tx = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
tx = tx.sign([user_sk])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
spent_inputs_user1 = b.get_spent(input_txid, 0)
spent_inputs_user1 = b.models.get_spent(input_txid, 0)
assert spent_inputs_user1 == tx.to_dict()
def test_get_spent_single_tx_multiple_outputs(self, b, user_sk, user_pk, alice):
@ -312,29 +309,29 @@ class TestMultipleInputs(object):
# create a divisible asset with 3 outputs
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1), ([user_pk], 1)])
tx_create_signed = tx_create.sign([alice.private_key])
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
# check spents
for input_tx in owned_inputs_user1:
assert b.get_spent(input_tx.txid, input_tx.output) is None
assert b.models.get_spent(input_tx.txid, input_tx.output) is None
# transfer the first 2 inputs
tx_transfer = Transfer.generate(
tx_create.to_inputs()[:2], [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
)
tx_transfer_signed = tx_transfer.sign([user_sk])
b.store_bulk_transactions([tx_transfer_signed])
b.models.store_bulk_transactions( [tx_transfer_signed])
# check that used inputs are marked as spent
for ffill in tx_create.to_inputs()[:2]:
spent_tx = b.get_spent(ffill.fulfills.txid, ffill.fulfills.output)
spent_tx = b.models.get_spent(ffill.fulfills.txid, ffill.fulfills.output)
assert spent_tx == tx_transfer_signed.to_dict()
# check if remaining transaction that was unspent is also perceived
# spendable by Planetmint
assert b.get_spent(tx_create.to_inputs()[2].fulfills.txid, 2) is None
assert b.models.get_spent(tx_create.to_inputs()[2].fulfills.txid, 2) is None
def test_get_spent_multiple_owners(self, b, user_sk, user_pk, alice):
user2_sk, user2_pk = crypto.generate_key_pair()
@ -347,71 +344,66 @@ class TestMultipleInputs(object):
tx = tx.sign([alice.private_key])
transactions.append(tx)
b.store_bulk_transactions(transactions)
b.models.store_bulk_transactions( transactions)
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
# check spents
for input_tx in owned_inputs_user1:
assert b.get_spent(input_tx.txid, input_tx.output) is None
assert b.models.get_spent(input_tx.txid, input_tx.output) is None
# create a transaction
tx = Transfer.generate(transactions[0].to_inputs(), [([user3_pk], 1)], asset_ids=[transactions[0].id])
tx = tx.sign([user_sk, user2_sk])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
# check that used inputs are marked as spent
assert b.get_spent(transactions[0].id, 0) == tx.to_dict()
assert b.models.get_spent(transactions[0].id, 0) == tx.to_dict()
# check that the other remain marked as unspent
for unspent in transactions[1:]:
assert b.get_spent(unspent.id, 0) is None
assert b.models.get_spent(unspent.id, 0) is None
def test_get_outputs_filtered_only_unspent():
def test_get_outputs_filtered_only_unspent(b):
from transactions.common.transaction import TransactionLink
from planetmint.lib import Planetmint
go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key"
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
with patch(go) as get_outputs:
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
fs = "planetmint.fastquery.FastQuery.filter_spent_outputs"
fs = "planetmint.model.fastquery.FastQuery.filter_spent_outputs"
with patch(fs) as filter_spent:
filter_spent.return_value = [TransactionLink("b", 2)]
out = Planetmint().get_outputs_filtered("abc", spent=False)
out = b.models.get_outputs_filtered("abc", spent=False)
get_outputs.assert_called_once_with("abc")
assert out == [TransactionLink("b", 2)]
def test_get_outputs_filtered_only_spent():
def test_get_outputs_filtered_only_spent(b):
from transactions.common.transaction import TransactionLink
from planetmint.lib import Planetmint
go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key"
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
with patch(go) as get_outputs:
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
fs = "planetmint.fastquery.FastQuery.filter_unspent_outputs"
fs = "planetmint.model.fastquery.FastQuery.filter_unspent_outputs"
with patch(fs) as filter_spent:
filter_spent.return_value = [TransactionLink("b", 2)]
out = Planetmint().get_outputs_filtered("abc", spent=True)
out = b.models.get_outputs_filtered("abc", spent=True)
get_outputs.assert_called_once_with("abc")
assert out == [TransactionLink("b", 2)]
@patch("planetmint.fastquery.FastQuery.filter_unspent_outputs")
@patch("planetmint.fastquery.FastQuery.filter_spent_outputs")
def test_get_outputs_filtered(filter_spent, filter_unspent):
#@patch("planetmint.model.fastquery.FastQuery.filter_unspent_outputs")
#@patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
def test_get_outputs_filtered(b, mocker,):
from transactions.common.transaction import TransactionLink
mock_filter_spent_outputs = mocker.patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
mock_filter_unspent_outputs = mocker.patch("planetmint.model.fastquery.FastQuery.filter_unspent_outputs")
from planetmint.lib import Planetmint
go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key"
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
with patch(go) as get_outputs:
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
out = Planetmint().get_outputs_filtered("abc")
out = b.models.get_outputs_filtered("abc")
get_outputs.assert_called_once_with("abc")
filter_spent.assert_not_called()
filter_unspent.assert_not_called()
mock_filter_spent_outputs.assert_not_called()
mock_filter_unspent_outputs.assert_not_called()
assert out == get_outputs.return_value
@ -425,7 +417,7 @@ def test_cant_spend_same_input_twice_in_tx(b, alice):
tx_create = Create.generate([alice.public_key], [([alice.public_key], 100)])
tx_create_signed = tx_create.sign([alice.private_key])
assert b.validate_transaction(tx_create_signed) == tx_create_signed
b.store_bulk_transactions([tx_create_signed])
b.models.store_bulk_transactions( [tx_create_signed])
# Create a transfer transaction with duplicated fulfillments
dup_inputs = tx_create.to_inputs() + tx_create.to_inputs()
@ -450,6 +442,6 @@ def test_transaction_unicode(b, alice):
)
tx_1 = copy.deepcopy(tx)
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
assert beer_json["data"] in serialize(tx_1.to_dict())

View File

@ -10,7 +10,7 @@ from transactions.types.elections.validator_election import ValidatorElection
@pytest.mark.bdb
def test_process_block_concludes_all_elections(b):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
new_validator = generate_validators([1])[0]
@ -29,18 +29,18 @@ def test_process_block_concludes_all_elections(b):
txs += [election]
total_votes += votes
b.store_abci_chain(1, "chain-X")
b.models.store_abci_chain(1, "chain-X")
b.process_block(1, txs)
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.store_bulk_transactions(txs)
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.models.store_bulk_transactions( txs)
b.process_block(2, total_votes)
validators = b.get_validators()
validators = b.models.get_validators()
assert len(validators) == 5
assert new_validator["storage"] in validators
chain = b.get_latest_abci_chain()
chain = b.models.get_latest_abci_chain()
assert chain
assert chain == {
"height": 2,
@ -49,13 +49,13 @@ def test_process_block_concludes_all_elections(b):
}
for tx in txs:
assert b.get_election(tx.id)["is_concluded"]
assert b.models.get_election(tx.id)["is_concluded"]
@pytest.mark.bdb
def test_process_block_approves_only_one_validator_update(b):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
new_validator = generate_validators([1])[0]
@ -78,24 +78,24 @@ def test_process_block_approves_only_one_validator_update(b):
total_votes += votes
b.process_block(1, txs)
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.store_bulk_transactions(txs)
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.models.store_bulk_transactions( txs)
b.process_block(2, total_votes)
validators = b.get_validators()
validators = b.models.get_validators()
assert len(validators) == 5
assert new_validator["storage"] in validators
assert another_validator["storage"] not in validators
assert b.get_election(txs[0].id)["is_concluded"]
assert not b.get_election(txs[1].id)["is_concluded"]
assert b.models.get_election(txs[0].id)["is_concluded"]
assert not b.models.get_election(txs[1].id)["is_concluded"]
@pytest.mark.bdb
def test_process_block_approves_after_pending_validator_update(b):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
new_validator = generate_validators([1])[0]
@ -122,29 +122,29 @@ def test_process_block_approves_after_pending_validator_update(b):
txs += [election]
total_votes += votes
b.store_abci_chain(1, "chain-X")
b.models.store_abci_chain(1, "chain-X")
b.process_block(1, txs)
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.store_bulk_transactions(txs)
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.models.store_bulk_transactions( txs)
b.process_block(2, total_votes)
validators = b.get_validators()
validators = b.models.get_validators()
assert len(validators) == 5
assert new_validator["storage"] in validators
assert another_validator["storage"] not in validators
assert b.get_election(txs[0].id)["is_concluded"]
assert not b.get_election(txs[1].id)["is_concluded"]
assert b.get_election(txs[2].id)["is_concluded"]
assert b.models.get_election(txs[0].id)["is_concluded"]
assert not b.models.get_election(txs[1].id)["is_concluded"]
assert b.models.get_election(txs[2].id)["is_concluded"]
assert b.get_latest_abci_chain() == {"height": 2, "chain_id": "chain-X-migrated-at-height-1", "is_synced": False}
assert b.models.get_latest_abci_chain() == {"height": 2, "chain_id": "chain-X-migrated-at-height-1", "is_synced": False}
@pytest.mark.bdb
def test_process_block_does_not_approve_after_validator_update(b):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
new_validator = generate_validators([1])[0]
@ -158,9 +158,9 @@ def test_process_block_does_not_approve_after_validator_update(b):
txs = [election]
total_votes = votes
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.process_block(1, txs)
b.store_bulk_transactions(txs)
b.models.store_bulk_transactions( txs)
second_election, second_votes = generate_election(
b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys
@ -168,19 +168,19 @@ def test_process_block_does_not_approve_after_validator_update(b):
b.process_block(2, total_votes + [second_election])
b.store_block(Block(height=2, transactions=[v.id for v in total_votes + [second_election]], app_hash="")._asdict())
b.models.store_block(Block(height=2, transactions=[v.id for v in total_votes + [second_election]], app_hash="")._asdict())
b.store_abci_chain(1, "chain-X")
b.models.store_abci_chain(1, "chain-X")
b.process_block(3, second_votes)
assert not b.get_election(second_election.id)["is_concluded"]
assert b.get_latest_abci_chain() == {"height": 1, "chain_id": "chain-X", "is_synced": True}
assert not b.models.get_election(second_election.id)["is_concluded"]
assert b.models.get_latest_abci_chain() == {"height": 1, "chain_id": "chain-X", "is_synced": True}
@pytest.mark.bdb
def test_process_block_applies_only_one_migration(b):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
public_key = validators[0]["public_key"]
private_key = validators[0]["private_key"]
@ -195,13 +195,13 @@ def test_process_block_applies_only_one_migration(b):
txs += [election]
total_votes += votes
b.store_abci_chain(1, "chain-X")
b.models.store_abci_chain(1, "chain-X")
b.process_block(1, txs)
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.store_bulk_transactions(txs)
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
b.models.store_bulk_transactions( txs)
b.process_block(1, total_votes)
chain = b.get_latest_abci_chain()
chain = b.models.get_latest_abci_chain()
assert chain
assert chain == {
"height": 2,
@ -209,8 +209,8 @@ def test_process_block_applies_only_one_migration(b):
"chain_id": "chain-X-migrated-at-height-1",
}
assert b.get_election(txs[0].id)["is_concluded"]
assert not b.get_election(txs[1].id)["is_concluded"]
assert b.models.get_election(txs[0].id)["is_concluded"]
assert not b.models.get_election(txs[1].id)["is_concluded"]
def test_process_block_gracefully_handles_empty_block(b):

View File

@ -17,7 +17,7 @@ from transactions.types.assets.create import Create
from transactions.types.assets.transfer import Transfer
from planetmint import App
from planetmint.backend import query
from planetmint.abci.core import OkCode, CodeTypeError, rollback
from planetmint.abci.core import OkCode, CodeTypeError
from planetmint.abci.block import Block
from planetmint.abci.tendermint_utils import new_validator_set
from planetmint.abci.tendermint_utils import public_key_to_base64
@ -51,9 +51,9 @@ def test_init_chain_successfully_registers_chain(b):
request = generate_init_chain_request("chain-XYZ")
res = App(b).init_chain(request)
assert res == types.ResponseInitChain()
chain = query.get_latest_abci_chain(b.connection)
chain = query.get_latest_abci_chain(b.models.connection)
assert chain == {"height": 0, "chain_id": "chain-XYZ", "is_synced": True}
assert query.get_latest_block(b.connection) == {
assert query.get_latest_block(b.models.connection) == {
"height": 0,
"app_hash": "",
"transaction_ids": [],
@ -66,7 +66,7 @@ def test_init_chain_ignores_invalid_init_chain_requests(b):
res = App(b).init_chain(request)
assert res == types.ResponseInitChain()
validator_set = query.get_validator_set(b.connection)
validator_set = query.get_validator_set(b.models.connection)
invalid_requests = [
request, # the same request again
@ -79,11 +79,11 @@ def test_init_chain_ignores_invalid_init_chain_requests(b):
with pytest.raises(SystemExit):
App(b).init_chain(r)
# assert nothing changed - neither validator set, nor chain ID
new_validator_set = query.get_validator_set(b.connection)
new_validator_set = query.get_validator_set(b.models.connection)
assert new_validator_set == validator_set
new_chain_id = query.get_latest_abci_chain(b.connection)["chain_id"]
new_chain_id = query.get_latest_abci_chain(b.models.connection)["chain_id"]
assert new_chain_id == "chain-XYZ"
assert query.get_latest_block(b.connection) == {
assert query.get_latest_block(b.models.connection) == {
"height": 0,
"app_hash": "",
"transaction_ids": [],
@ -96,10 +96,10 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
res = App(b).init_chain(request)
assert res == types.ResponseInitChain()
validator_set = query.get_validator_set(b.connection)["validators"]
validator_set = query.get_validator_set(b.models.connection)["validators"]
# simulate a migration
query.store_block(b.connection, Block(app_hash="", height=1, transactions=[])._asdict())
query.store_block(b.models.connection, Block(app_hash="", height=1, transactions=[])._asdict())
b.migrate_abci_chain()
# the same or other mismatching requests are ignored
@ -112,12 +112,12 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
for r in invalid_requests:
with pytest.raises(SystemExit):
App(b).init_chain(r)
assert query.get_latest_abci_chain(b.connection) == {
assert query.get_latest_abci_chain(b.models.connection) == {
"chain_id": "chain-XYZ-migrated-at-height-1",
"is_synced": False,
"height": 2,
}
new_validator_set = query.get_validator_set(b.connection)["validators"]
new_validator_set = query.get_validator_set(b.models.connection)["validators"]
assert new_validator_set == validator_set
# a request with the matching chain ID and matching validator set
@ -125,12 +125,12 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
request = generate_init_chain_request("chain-XYZ-migrated-at-height-1", validators)
res = App(b).init_chain(request)
assert res == types.ResponseInitChain()
assert query.get_latest_abci_chain(b.connection) == {
assert query.get_latest_abci_chain(b.models.connection) == {
"chain_id": "chain-XYZ-migrated-at-height-1",
"is_synced": True,
"height": 2,
}
assert query.get_latest_block(b.connection) == {
assert query.get_latest_block(b.models.connection) == {
"height": 2,
"app_hash": "",
"transaction_ids": [],
@ -145,14 +145,14 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
for r in invalid_requests:
with pytest.raises(SystemExit):
App(b).init_chain(r)
assert query.get_latest_abci_chain(b.connection) == {
assert query.get_latest_abci_chain(b.models.connection) == {
"chain_id": "chain-XYZ-migrated-at-height-1",
"is_synced": True,
"height": 2,
}
new_validator_set = query.get_validator_set(b.connection)["validators"]
new_validator_set = query.get_validator_set(b.models.connection)["validators"]
assert new_validator_set == validator_set
assert query.get_latest_block(b.connection) == {
assert query.get_latest_block(b.models.connection) == {
"height": 2,
"app_hash": "",
"transaction_ids": [],
@ -167,28 +167,28 @@ def test_info(b):
assert res.last_block_height == 0
assert res.last_block_app_hash == b""
b.store_block(Block(app_hash="1", height=1, transactions=[])._asdict())
b.models.store_block(Block(app_hash="1", height=1, transactions=[])._asdict())
res = app.info(r)
assert res.last_block_height == 1
assert res.last_block_app_hash == b"1"
# simulate a migration and assert the height is shifted
b.store_abci_chain(2, "chain-XYZ")
b.models.store_abci_chain(2, "chain-XYZ")
app = App(b)
b.store_block(Block(app_hash="2", height=2, transactions=[])._asdict())
b.models.store_block(Block(app_hash="2", height=2, transactions=[])._asdict())
res = app.info(r)
assert res.last_block_height == 0
assert res.last_block_app_hash == b"2"
b.store_block(Block(app_hash="3", height=3, transactions=[])._asdict())
b.models.store_block(Block(app_hash="3", height=3, transactions=[])._asdict())
res = app.info(r)
assert res.last_block_height == 1
assert res.last_block_app_hash == b"3"
# it's always the latest migration that is taken into account
b.store_abci_chain(4, "chain-XYZ-new")
b.models.store_abci_chain(4, "chain-XYZ-new")
app = App(b)
b.store_block(Block(app_hash="4", height=4, transactions=[])._asdict())
b.models.store_block(Block(app_hash="4", height=4, transactions=[])._asdict())
res = app.info(r)
assert res.last_block_height == 0
assert res.last_block_app_hash == b"4"
@ -235,7 +235,7 @@ def test_deliver_tx__valid_create_updates_db_and_emits_event(b, init_chain_reque
app.end_block(types.RequestEndBlock(height=99))
app.commit()
assert b.get_transaction(tx.id).id == tx.id
assert b.models.get_transaction(tx.id).id == tx.id
block_event = events.get()
assert block_event.data["transactions"] == [tx]
@ -264,7 +264,7 @@ def test_deliver_tx__double_spend_fails(b, init_chain_request):
app.end_block(types.RequestEndBlock(height=99))
app.commit()
assert b.get_transaction(tx.id).id == tx.id
assert b.models.get_transaction(tx.id).id == tx.id
result = app.deliver_tx(encode_tx_to_bytes(tx))
assert result.code == CodeTypeError
@ -311,7 +311,7 @@ def test_end_block_return_validator_updates(b, init_chain_request):
# generate a block containing a concluded validator election
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_validator_set(1, [v["storage"] for v in validators])
new_validator = generate_validators([1])[0]
@ -322,8 +322,8 @@ def test_end_block_return_validator_updates(b, init_chain_request):
election, votes = generate_election(
b, ValidatorElection, public_key, private_key, [{"data": new_validator["election"]}], voter_keys
)
b.store_block(Block(height=1, transactions=[election.id], app_hash="")._asdict())
b.store_bulk_transactions([election])
b.models.store_block(Block(height=1, transactions=[election.id], app_hash="")._asdict())
b.models.store_bulk_transactions([election])
b.process_block(1, [election])
app.block_transactions = votes
@ -352,32 +352,32 @@ def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request):
app.deliver_tx(encode_tx_to_bytes(tx))
app.end_block(types.RequestEndBlock(height=99))
resp = query.get_pre_commit_state(b.connection)
resp = query.get_pre_commit_state(b.models.connection)
assert resp["height"] == 99
assert resp["transactions"] == [tx.id]
app.begin_block(begin_block)
app.deliver_tx(encode_tx_to_bytes(tx))
app.end_block(types.RequestEndBlock(height=100))
resp = query.get_pre_commit_state(b.connection)
resp = query.get_pre_commit_state(b.models.connection)
assert resp["height"] == 100
assert resp["transactions"] == [tx.id]
# simulate a chain migration and assert the height is shifted
b.store_abci_chain(100, "new-chain")
b.models.store_abci_chain(100, "new-chain")
app = App(b)
app.begin_block(begin_block)
app.deliver_tx(encode_tx_to_bytes(tx))
app.end_block(types.RequestEndBlock(height=1))
resp = query.get_pre_commit_state(b.connection)
resp = query.get_pre_commit_state(b.models.connection)
assert resp["height"] == 101
assert resp["transactions"] == [tx.id]
def test_rollback_pre_commit_state_after_crash(b):
def test_rollback_pre_commit_state_after_crash(b, test_models):
validators = generate_validators([1] * 4)
b.store_validator_set(1, [v["storage"] for v in validators])
b.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
b.models.store_validator_set(1, [v["storage"] for v in validators])
b.models.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
public_key = validators[0]["public_key"]
private_key = validators[0]["private_key"]
@ -398,35 +398,35 @@ def test_rollback_pre_commit_state_after_crash(b):
total_votes += votes
txs += [validator_election, *votes]
b.store_bulk_transactions(txs)
b.store_abci_chain(2, "new_chain")
b.store_validator_set(2, [v["storage"] for v in validators])
b.models.store_bulk_transactions( txs)
b.models.store_abci_chain(2, "new_chain")
b.models.store_validator_set(2, [v["storage"] for v in validators])
# TODO change to `4` when upgrading to Tendermint 0.22.4.
b.store_validator_set(3, [new_validator["storage"]])
b.store_election(migration_election.id, 2, is_concluded=False)
b.store_election(validator_election.id, 2, is_concluded=True)
b.models.store_validator_set(3, [new_validator["storage"]])
b.models.store_election(migration_election.id, 2, is_concluded=False)
b.models.store_election(validator_election.id, 2, is_concluded=True)
# no pre-commit state
rollback(b)
b.rollback()
for tx in txs:
assert b.get_transaction(tx.id)
assert b.get_latest_abci_chain()
assert len(b.get_validator_set()["validators"]) == 1
assert b.get_election(migration_election.id)
assert b.get_election(validator_election.id)
assert b.models.get_transaction(tx.id)
assert b.models.get_latest_abci_chain()
assert len(b.models.get_validator_set()["validators"]) == 1
assert b.models.get_election(migration_election.id)
assert b.models.get_election(validator_election.id)
b.store_pre_commit_state({"height": 2, "transactions": [tx.id for tx in txs]})
b.models.store_pre_commit_state({"height": 2, "transactions": [tx.id for tx in txs]})
rollback(b)
b.rollback()
for tx in txs:
assert not b.get_transaction(tx.id)
assert not b.get_latest_abci_chain()
assert len(b.get_validator_set()["validators"]) == 4
assert len(b.get_validator_set(2)["validators"]) == 4
assert not b.get_election(migration_election.id)
assert not b.get_election(validator_election.id)
assert not b.models.get_transaction(tx.id)
assert not b.models.get_latest_abci_chain()
assert len(b.models.get_validator_set()["validators"]) == 4
assert len(b.models.get_validator_set(2)["validators"]) == 4
assert not b.models.get_election(migration_election.id)
assert not b.models.get_election(validator_election.id)
def test_new_validator_set(b):
@ -451,8 +451,8 @@ def test_new_validator_set(b):
validators = [node1]
updates = [node1_new_power, node2]
b.store_validator_set(1, validators)
updated_validator_set = new_validator_set(b.get_validators(1), updates)
b.models.store_validator_set(1, validators)
updated_validator_set = new_validator_set(b.models.get_validators(1), updates)
updated_validators = []
for u in updates:
@ -467,42 +467,42 @@ def test_new_validator_set(b):
def test_info_aborts_if_chain_is_not_synced(b):
b.store_abci_chain(0, "chain-XYZ", False)
b.models.store_abci_chain(0, "chain-XYZ", False)
with pytest.raises(SystemExit):
App(b).info(types.RequestInfo())
def test_check_tx_aborts_if_chain_is_not_synced(b):
b.store_abci_chain(0, "chain-XYZ", False)
b.models.store_abci_chain(0, "chain-XYZ", False)
with pytest.raises(SystemExit):
App(b).check_tx("some bytes")
def test_begin_aborts_if_chain_is_not_synced(b):
b.store_abci_chain(0, "chain-XYZ", False)
b.models.store_abci_chain(0, "chain-XYZ", False)
with pytest.raises(SystemExit):
App(b).info(types.RequestBeginBlock())
def test_deliver_tx_aborts_if_chain_is_not_synced(b):
b.store_abci_chain(0, "chain-XYZ", False)
b.models.store_abci_chain(0, "chain-XYZ", False)
with pytest.raises(SystemExit):
App(b).deliver_tx("some bytes")
def test_end_block_aborts_if_chain_is_not_synced(b):
b.store_abci_chain(0, "chain-XYZ", False)
b.models.store_abci_chain(0, "chain-XYZ", False)
with pytest.raises(SystemExit):
App(b).info(types.RequestEndBlock())
def test_commit_aborts_if_chain_is_not_synced(b):
b.store_abci_chain(0, "chain-XYZ", False)
b.models.store_abci_chain(0, "chain-XYZ", False)
with pytest.raises(SystemExit):
App(b).commit()

View File

@ -13,40 +13,40 @@ pytestmark = pytest.mark.bdb
@pytest.fixture
def txns(b, user_pk, user_sk, user2_pk, user2_sk):
def txns(b, user_pk, user_sk, user2_pk, user2_sk, test_models):
txs = [
Create.generate([user_pk], [([user2_pk], 1)]).sign([user_sk]),
Create.generate([user2_pk], [([user_pk], 1)]).sign([user2_sk]),
Create.generate([user_pk], [([user_pk], 1), ([user2_pk], 1)]).sign([user_sk]),
]
b.store_bulk_transactions(txs)
b.models.store_bulk_transactions(txs)
return txs
def test_get_outputs_by_public_key(b, user_pk, user2_pk, txns):
def test_get_outputs_by_public_key(b, user_pk, user2_pk, txns, test_models):
expected = [TransactionLink(txns[1].id, 0), TransactionLink(txns[2].id, 0)]
actual = b.fastquery.get_outputs_by_public_key(user_pk)
actual = test_models.fastquery.get_outputs_by_public_key(user_pk)
_all_txs = set([tx.txid for tx in expected + actual])
assert len(_all_txs) == 2
# assert b.fastquery.get_outputs_by_public_key(user_pk) == [ # OLD VERIFICATION
# assert b.models.fastquery.get_outputs_by_public_key(user_pk) == [ # OLD VERIFICATION
# TransactionLink(txns[1].id, 0),
# TransactionLink(txns[2].id, 0)
# ]
actual_1 = b.fastquery.get_outputs_by_public_key(user2_pk)
actual_1 = test_models.fastquery.get_outputs_by_public_key(user2_pk)
expected_1 = [
TransactionLink(txns[0].id, 0),
TransactionLink(txns[2].id, 1),
]
_all_tx_1 = set([tx.txid for tx in actual_1 + expected_1])
assert len(_all_tx_1) == 2
# assert b.fastquery.get_outputs_by_public_key(user2_pk) == [ # OLD VERIFICATION
# assert b.models.fastquery.get_outputs_by_public_key(user2_pk) == [ # OLD VERIFICATION
# TransactionLink(txns[0].id, 0),
# TransactionLink(txns[2].id, 1),
# ]
def test_filter_spent_outputs(b, user_pk, user_sk):
def test_filter_spent_outputs(b, user_pk, user_sk, test_models):
out = [([user_pk], 1)]
tx1 = Create.generate([user_pk], out * 2)
tx1.sign([user_sk])
@ -57,10 +57,10 @@ def test_filter_spent_outputs(b, user_pk, user_sk):
tx2.sign([user_sk])
# tx2 produces a new unspent. inputs[1] remains unspent.
b.store_bulk_transactions([tx1, tx2])
b.models.store_bulk_transactions([tx1, tx2])
outputs = b.fastquery.get_outputs_by_public_key(user_pk)
unspents = b.fastquery.filter_spent_outputs(outputs)
outputs = test_models.fastquery.get_outputs_by_public_key(user_pk)
unspents = test_models.fastquery.filter_spent_outputs(outputs)
assert set(unsp for unsp in unspents) == {
inputs[1].fulfills,
@ -68,7 +68,7 @@ def test_filter_spent_outputs(b, user_pk, user_sk):
}
def test_filter_unspent_outputs(b, user_pk, user_sk):
def test_filter_unspent_outputs(b, user_pk, user_sk, test_models):
out = [([user_pk], 1)]
tx1 = Create.generate([user_pk], out * 2)
tx1.sign([user_sk])
@ -79,27 +79,27 @@ def test_filter_unspent_outputs(b, user_pk, user_sk):
tx2.sign([user_sk])
# tx2 produces a new unspent. input[1] remains unspent.
b.store_bulk_transactions([tx1, tx2])
b.models.store_bulk_transactions([tx1, tx2])
outputs = b.fastquery.get_outputs_by_public_key(user_pk)
spents = b.fastquery.filter_unspent_outputs(outputs)
outputs = test_models.fastquery.get_outputs_by_public_key(user_pk)
spents = test_models.fastquery.filter_unspent_outputs(outputs)
assert set(sp for sp in spents) == {
inputs[0].fulfills,
}
def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk, test_models, test_validator):
from planetmint import backend
from planetmint.backend.connection import Connection
from planetmint.backend import query
tx1 = Create.generate([user_pk], [([user_pk], 3), ([user_pk], 2), ([user_pk], 1)]).sign([user_sk])
b.store_bulk_transactions([tx1])
b.models.store_bulk_transactions([tx1])
inputs = tx1.to_inputs()
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], [tx1.id]).sign([user_sk])
assert b.validate_transaction(tx2)
assert test_validator.validate_transaction(tx2)
tx2_dict = tx2.to_dict()
fulfills = tx2_dict["inputs"][0]["fulfills"]
@ -107,28 +107,28 @@ def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
"transaction_id": fulfills["transaction_id"],
"output_index": fulfills["output_index"],
}
backend.query.store_transactions(b.connection, [tx2_dict])
backend.query.store_transactions(test_models.connection, [tx2_dict])
outputs = b.get_outputs_filtered(user_pk, spent=False)
outputs = test_models.get_outputs_filtered(user_pk, spent=False)
assert len(outputs) == 2
outputs = b.get_outputs_filtered(user2_pk, spent=False)
outputs = test_models.get_outputs_filtered(user2_pk, spent=False)
assert len(outputs) == 1
# clean the transaction, metdata and asset collection
connection = Connection()
query.delete_transactions(connection, txn_ids=[tx1.id, tx2.id])
query.delete_transactions(test_models.connection, txn_ids=[tx1.id, tx2.id])
b.store_bulk_transactions([tx1])
b.models.store_bulk_transactions([tx1])
tx2_dict = tx2.to_dict()
tx2_dict["inputs"][0]["fulfills"] = {
"output_index": fulfills["output_index"],
"transaction_id": fulfills["transaction_id"],
}
backend.query.store_transactions(b.connection, [tx2_dict])
outputs = b.get_outputs_filtered(user_pk, spent=False)
backend.query.store_transactions(test_models.connection, [tx2_dict])
outputs = test_models.get_outputs_filtered(user_pk, spent=False)
assert len(outputs) == 2
outputs = b.get_outputs_filtered(user2_pk, spent=False)
outputs = test_models.get_outputs_filtered(user2_pk, spent=False)
assert len(outputs) == 1

View File

@ -34,16 +34,16 @@ def test_app(b, eventqueue_fixture, init_chain_request):
assert res
assert res.info.last_block_app_hash == b""
assert res.info.last_block_height == 0
assert not b.get_latest_block()
assert not b.models.get_latest_block()
p.process("init_chain", types.Request(init_chain=init_chain_request))
block0 = b.get_latest_block()
block0 = b.models.get_latest_block()
assert block0
assert block0["height"] == 0
assert block0["app_hash"] == ""
pk = codecs.encode(init_chain_request.validators[0].pub_key.ed25519, "base64").decode().strip("\n")
[validator] = b.get_validators(height=1)
[validator] = b.models.get_validators(height=1)
assert validator["public_key"]["value"] == pk
assert validator["voting_power"] == 10
@ -81,9 +81,9 @@ def test_app(b, eventqueue_fixture, init_chain_request):
data = p.process("commit", None)
res = next(read_messages(BytesIO(data), types.Response))
assert res.commit.data == new_block_hash.encode("utf-8")
assert b.get_transaction(tx.id).id == tx.id
assert b.models.get_transaction(tx.id).id == tx.id
block0 = b.get_latest_block()
block0 = b.models.get_latest_block()
assert block0
assert block0["height"] == 1
assert block0["app_hash"] == new_block_hash
@ -101,7 +101,7 @@ def test_app(b, eventqueue_fixture, init_chain_request):
res = next(read_messages(BytesIO(data), types.Response))
assert res.commit.data == new_block_hash.encode("utf-8")
block0 = b.get_latest_block()
block0 = b.models.get_latest_block()
assert block0
assert block0["height"] == 2
@ -110,7 +110,7 @@ def test_app(b, eventqueue_fixture, init_chain_request):
@pytest.mark.abci
def test_post_transaction_responses(tendermint_ws_url, b):
def test_post_transaction_responses(tendermint_ws_url, b, test_abci_rpc):
from transactions.common.crypto import generate_key_pair
alice = generate_key_pair()
@ -121,14 +121,18 @@ def test_post_transaction_responses(tendermint_ws_url, b):
assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}],
).sign([alice.private_key])
code, message = ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_COMMIT)
code, message = test_abci_rpc.write_transaction(
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_COMMIT
)
assert code == 202
tx_transfer = Transfer.generate(tx.to_inputs(), [([bob.public_key], 1)], asset_ids=[tx.id]).sign(
[alice.private_key]
)
code, message = ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, tx_transfer, BROADCAST_TX_COMMIT)
code, message = test_abci_rpc.write_transaction(
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx_transfer, BROADCAST_TX_COMMIT
)
assert code == 202
carly = generate_key_pair()
@ -138,6 +142,8 @@ def test_post_transaction_responses(tendermint_ws_url, b):
asset_ids=[tx.id],
).sign([alice.private_key])
for mode in (BROADCAST_TX_SYNC, BROADCAST_TX_COMMIT):
code, message = ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, double_spend, mode)
code, message = test_abci_rpc.write_transaction(
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, double_spend, mode
)
assert code == 500
assert message == "Transaction validation failed"

View File

@ -24,14 +24,13 @@ from uuid import uuid4
from planetmint.abci.rpc import ABCI_RPC
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
@pytest.mark.bdb
def test_asset_is_separated_from_transaciton(b):
import copy
from transactions.common.crypto import generate_key_pair
from planetmint.backend.tarantool.connection import TarantoolDBConnection
if isinstance(b.connection, TarantoolDBConnection):
if isinstance(b.models.connection, TarantoolDBConnection):
pytest.skip("This specific function is skipped because, assets are stored differently if using Tarantool")
alice = generate_key_pair()
@ -61,16 +60,16 @@ def test_asset_is_separated_from_transaciton(b):
[alice.private_key]
)
# with store_bulk_transactions we use `insert_many` where PyMongo
# with b.models.store_bulk_transactions we use `insert_many` where PyMongo
# automatically adds an `_id` field to the tx, therefore we need the
# deepcopy, for more info see:
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
tx_dict = copy.deepcopy(tx.to_dict())
b.store_bulk_transactions([tx])
assert "asset" not in backend.query.get_transaction_single(b.connection, tx.id)
assert backend.query.get_asset(b.connection, tx.id).data == assets[0]
assert b.get_transaction(tx.id).to_dict() == tx_dict
b.models.store_bulk_transactions( [tx])
assert "asset" not in backend.query.get_transaction_single(b.models.connection, tx.id)
assert backend.query.get_asset(b.models.connection, tx.id).data == assets[0]
assert b.models.get_transaction(tx.id).to_dict() == tx_dict
@pytest.mark.bdb
@ -81,9 +80,9 @@ def test_get_latest_block(b):
app_hash = os.urandom(16).hex()
txn_id = os.urandom(16).hex()
block = Block(app_hash=app_hash, height=i, transactions=[txn_id])._asdict()
b.store_block(block)
b.models.store_block(block)
block = b.get_latest_block()
block = b.models.get_latest_block()
assert block["height"] == 9
@ -100,7 +99,7 @@ def test_validation_error(b):
@patch("requests.post")
def test_write_and_post_transaction(mock_post, b):
def test_write_and_post_transaction(mock_post, b, test_abci_rpc):
from transactions.common.crypto import generate_key_pair
from planetmint.abci.tendermint_utils import encode_transaction
@ -110,7 +109,7 @@ def test_write_and_post_transaction(mock_post, b):
)
tx = b.validate_transaction(tx)
ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_ASYNC)
test_abci_rpc.write_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_ASYNC)
assert mock_post.called
args, kwargs = mock_post.call_args
@ -121,7 +120,7 @@ def test_write_and_post_transaction(mock_post, b):
@patch("requests.post")
@pytest.mark.parametrize("mode", [BROADCAST_TX_SYNC, BROADCAST_TX_ASYNC, BROADCAST_TX_COMMIT])
def test_post_transaction_valid_modes(mock_post, b, mode):
def test_post_transaction_valid_modes(mock_post, b, mode, test_abci_rpc):
from transactions.common.crypto import generate_key_pair
alice = generate_key_pair()
@ -129,13 +128,13 @@ def test_post_transaction_valid_modes(mock_post, b, mode):
Create.generate([alice.public_key], [([alice.public_key], 1)], assets=None).sign([alice.private_key]).to_dict()
)
tx = b.validate_transaction(tx)
ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, tx, mode)
test_abci_rpc.write_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, mode)
args, kwargs = mock_post.call_args
assert mode == kwargs["json"]["method"]
def test_post_transaction_invalid_mode(b):
def test_post_transaction_invalid_mode(b, test_abci_rpc):
from transactions.common.crypto import generate_key_pair
from transactions.common.exceptions import ValidationError
@ -145,18 +144,18 @@ def test_post_transaction_invalid_mode(b):
)
tx = b.validate_transaction(tx)
with pytest.raises(ValidationError):
ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, tx, "nope")
test_abci_rpc.write_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, "nope")
@pytest.mark.bdb
def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn):
b.update_utxoset(signed_create_tx)
b.models.tests_update_utxoset(signed_create_tx)
utxoset = db_conn.get_space("utxos")
assert utxoset.select().rowcount == 1
utxo = utxoset.select().data
assert utxo[0][1] == signed_create_tx.id
assert utxo[0][2] == 0
b.update_utxoset(signed_transfer_tx)
b.models.tests_update_utxoset(signed_transfer_tx)
assert utxoset.select().rowcount == 1
utxo = utxoset.select().data
assert utxo[0][1] == signed_transfer_tx.id
@ -166,26 +165,26 @@ def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn):
@pytest.mark.bdb
def test_store_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
mocked_store_transaction = mocker.patch("planetmint.backend.query.store_transactions")
b.store_bulk_transactions([signed_create_tx])
mocked_store_transaction.assert_any_call(b.connection, [signed_create_tx.to_dict()], "transactions")
b.models.store_bulk_transactions( [signed_create_tx])
mocked_store_transaction.assert_any_call(b.models.connection, [signed_create_tx.to_dict()], "transactions")
mocked_store_transaction.reset_mock()
b.store_bulk_transactions([signed_transfer_tx])
b.models.store_bulk_transactions( [signed_transfer_tx])
@pytest.mark.bdb
def test_store_bulk_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
mocked_store_transactions = mocker.patch("planetmint.backend.query.store_transactions")
b.store_bulk_transactions((signed_create_tx,))
mocked_store_transactions.assert_any_call(b.connection, [signed_create_tx.to_dict()], "transactions")
b.models.store_bulk_transactions( (signed_create_tx,))
mocked_store_transactions.assert_any_call(b.models.connection, [signed_create_tx.to_dict()], "transactions")
mocked_store_transactions.reset_mock()
b.store_bulk_transactions((signed_transfer_tx,))
b.models.store_bulk_transactions( (signed_transfer_tx,))
@pytest.mark.bdb
def test_delete_zero_unspent_outputs(b, utxoset):
unspent_outputs, utxo_collection = utxoset
num_rows_before_operation = utxo_collection.select().rowcount
delete_res = b.delete_unspent_outputs() # noqa: F841
delete_res = b.models.tests_delete_unspent_outputs() # noqa: F841
num_rows_after_operation = utxo_collection.select().rowcount
# assert delete_res is None
assert num_rows_before_operation == num_rows_after_operation
@ -193,12 +192,12 @@ def test_delete_zero_unspent_outputs(b, utxoset):
@pytest.mark.bdb
def test_delete_one_unspent_outputs(b, dummy_unspent_outputs):
utxo_space = b.connection.get_space("utxos")
utxo_space = b.models.connection.get_space("utxos")
for utxo in dummy_unspent_outputs:
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
assert res
b.delete_unspent_outputs(dummy_unspent_outputs[0])
b.models.tests_delete_unspent_outputs(dummy_unspent_outputs[0])
res1 = utxo_space.select(["a", 1], index="utxo_by_transaction_id_and_output_index").data
res2 = utxo_space.select(["b", 0], index="utxo_by_transaction_id_and_output_index").data
assert len(res1) + len(res2) == 2
@ -208,12 +207,12 @@ def test_delete_one_unspent_outputs(b, dummy_unspent_outputs):
@pytest.mark.bdb
def test_delete_many_unspent_outputs(b, dummy_unspent_outputs):
utxo_space = b.connection.get_space("utxos")
utxo_space = b.models.connection.get_space("utxos")
for utxo in dummy_unspent_outputs:
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
assert res
b.delete_unspent_outputs(*dummy_unspent_outputs[::2])
b.models.tests_delete_unspent_outputs(*dummy_unspent_outputs[::2])
res1 = utxo_space.select(["a", 0], index="utxo_by_transaction_id_and_output_index").data
res2 = utxo_space.select(["b", 0], index="utxo_by_transaction_id_and_output_index").data
assert len(res1) + len(res2) == 0
@ -223,9 +222,9 @@ def test_delete_many_unspent_outputs(b, dummy_unspent_outputs):
@pytest.mark.bdb
def test_store_zero_unspent_output(b):
utxos = b.connection.get_space("utxos")
utxos = b.models.connection.get_space("utxos")
num_rows_before_operation = utxos.select().rowcount
res = b.store_unspent_outputs()
res = b.models.tests_store_unspent_outputs()
num_rows_after_operation = utxos.select().rowcount
assert res is None
assert num_rows_before_operation == num_rows_after_operation
@ -235,8 +234,8 @@ def test_store_zero_unspent_output(b):
def test_store_one_unspent_output(b, unspent_output_1, utxo_collection):
from planetmint.backend.tarantool.connection import TarantoolDBConnection
res = b.store_unspent_outputs(unspent_output_1)
if not isinstance(b.connection, TarantoolDBConnection):
res = b.models.tests_store_unspent_outputs(unspent_output_1)
if not isinstance(b.models.connection, TarantoolDBConnection):
assert res.acknowledged
assert len(list(res)) == 1
assert (
@ -249,7 +248,7 @@ def test_store_one_unspent_output(b, unspent_output_1, utxo_collection):
== 1
)
else:
utx_space = b.connection.get_space("utxos")
utx_space = b.models.connection.get_space("utxos")
res = utx_space.select(
[unspent_output_1["transaction_id"], unspent_output_1["output_index"]],
index="utxo_by_transaction_id_and_output_index",
@ -259,25 +258,25 @@ def test_store_one_unspent_output(b, unspent_output_1, utxo_collection):
@pytest.mark.bdb
def test_store_many_unspent_outputs(b, unspent_outputs):
b.store_unspent_outputs(*unspent_outputs)
utxo_space = b.connection.get_space("utxos")
b.models.tests_store_unspent_outputs(*unspent_outputs)
utxo_space = b.models.connection.get_space("utxos")
res = utxo_space.select([unspent_outputs[0]["transaction_id"]], index="utxos_by_transaction_id")
assert len(res.data) == 3
def test_get_utxoset_merkle_root_when_no_utxo(b):
assert b.get_utxoset_merkle_root() == sha3_256(b"").hexdigest()
assert b.models.tests_get_utxoset_merkle_root() == sha3_256(b"").hexdigest()
@pytest.mark.bdb
def test_get_utxoset_merkle_root(b, dummy_unspent_outputs):
utxo_space = b.connection.get_space("utxos")
utxo_space = b.models.connection.get_space("utxos")
for utxo in dummy_unspent_outputs:
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
assert res
expected_merkle_root = "86d311c03115bf4d287f8449ca5828505432d69b82762d47077b1c00fe426eac"
merkle_root = b.get_utxoset_merkle_root()
merkle_root = b.models.tests_get_utxoset_merkle_root()
assert merkle_root == expected_merkle_root
@ -301,20 +300,20 @@ def test_get_spent_transaction_double_spend(b, alice, bob, carol):
tx.to_inputs() + tx.to_inputs(), [([bob.public_key], 1)], asset_ids=[tx.id]
).sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
with pytest.raises(DoubleSpend):
b.validate_transaction(same_input_double_spend)
assert b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer])
assert b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer])
with pytest.raises(DoubleSpend):
b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer, double_spend])
b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer, double_spend])
b.store_bulk_transactions([tx_transfer])
b.models.store_bulk_transactions( [tx_transfer])
with pytest.raises(DoubleSpend):
b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [double_spend])
b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [double_spend])
def test_validation_with_transaction_buffer(b):
@ -341,7 +340,7 @@ def test_validation_with_transaction_buffer(b):
@pytest.mark.bdb
def test_migrate_abci_chain_yields_on_genesis(b):
b.migrate_abci_chain()
latest_chain = b.get_latest_abci_chain()
latest_chain = b.models.get_latest_abci_chain()
assert latest_chain is None
@ -362,10 +361,10 @@ def test_migrate_abci_chain_yields_on_genesis(b):
],
)
def test_migrate_abci_chain_generates_new_chains(b, chain, block_height, expected):
b.store_abci_chain(*chain)
b.store_block(Block(app_hash="", height=block_height, transactions=[])._asdict())
b.models.store_abci_chain(*chain)
b.models.store_block(Block(app_hash="", height=block_height, transactions=[])._asdict())
b.migrate_abci_chain()
latest_chain = b.get_latest_abci_chain()
latest_chain = b.models.get_latest_abci_chain()
assert latest_chain == expected
@ -378,13 +377,13 @@ def test_get_spent_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
bob = generate_key_pair()
tx1 = Create.generate([user_pk], [([alice.public_key], 3), ([user_pk], 2)]).sign([user_sk])
b.store_bulk_transactions([tx1])
b.models.store_bulk_transactions( [tx1])
inputs = tx1.to_inputs()
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], [tx1.id]).sign([user_sk])
assert b.validate_transaction(tx2)
b.store_bulk_transactions([tx2])
b.models.store_bulk_transactions( [tx2])
tx3 = Transfer.generate([inputs[1]], [([bob.public_key], 2)], [tx1.id]).sign([user_sk])

View File

@ -5,7 +5,6 @@
import pytest
from planetmint.version import __tm_supported_versions__
from transactions.types.assets.create import Create
from transactions.types.assets.transfer import Transfer
@ -39,13 +38,13 @@ def config(request, monkeypatch):
def test_bigchain_class_default_initialization(config):
from planetmint import Planetmint
from planetmint.validation import BaseValidationRules
from planetmint.application.validation import Validator
from planetmint.application.basevalidationrules import BaseValidationRules
planet = Planetmint()
assert planet.connection.host == config["database"]["host"]
assert planet.connection.port == config["database"]["port"]
assert planet.validation == BaseValidationRules
validator = Validator()
assert validator.models.connection.host == config["database"]["host"]
assert validator.models.connection.port == config["database"]["port"]
assert validator.validation == BaseValidationRules
@pytest.mark.bdb
@ -55,7 +54,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
[([carol.public_key], 8)],
).sign([carol.private_key])
assert b.validate_transaction(tx_1)
b.store_bulk_transactions([tx_1])
b.models.store_bulk_transactions( [tx_1])
tx_2 = Transfer.generate(
tx_1.to_inputs(),
@ -63,7 +62,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
asset_ids=[tx_1.id],
).sign([carol.private_key])
assert b.validate_transaction(tx_2)
b.store_bulk_transactions([tx_2])
b.models.store_bulk_transactions( [tx_2])
tx_3 = Transfer.generate(
tx_2.to_inputs()[2:3],
@ -71,7 +70,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
asset_ids=[tx_1.id],
).sign([carol.private_key])
assert b.validate_transaction(tx_3)
b.store_bulk_transactions([tx_3])
b.models.store_bulk_transactions( [tx_3])
tx_4 = Transfer.generate(
tx_2.to_inputs()[1:2] + tx_3.to_inputs()[0:1],
@ -79,7 +78,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
asset_ids=[tx_1.id],
).sign([alice.private_key])
assert b.validate_transaction(tx_4)
b.store_bulk_transactions([tx_4])
b.models.store_bulk_transactions( [tx_4])
tx_5 = Transfer.generate(
tx_2.to_inputs()[0:1],
@ -88,8 +87,8 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
).sign([bob.private_key])
assert b.validate_transaction(tx_5)
b.store_bulk_transactions([tx_5])
assert b.get_spent(tx_2.id, 0) == tx_5.to_dict()
assert not b.get_spent(tx_5.id, 0)
assert b.get_outputs_filtered(alice.public_key)
assert b.get_outputs_filtered(alice.public_key, spent=False)
b.models.store_bulk_transactions( [tx_5])
assert b.models.get_spent(tx_2.id, 0) == tx_5.to_dict()
assert not b.models.get_spent(tx_5.id, 0)
assert b.models.get_outputs_filtered(alice.public_key)
assert b.models.get_outputs_filtered(alice.public_key, spent=False)

View File

@ -7,6 +7,8 @@ import pytest
from planetmint.ipc.events import EventTypes, Event, POISON_PILL
from planetmint.ipc.exchange import Exchange
def test_event_handler():
# create and event
event_data = {"msg": "some data"}

View File

@ -23,7 +23,7 @@ def txlist(b, user_pk, user2_pk, user_sk, user2_sk):
# Create a TRANSFER transactions
transfer1 = Transfer.generate(create1.to_inputs(), [([user_pk], 8)], [create1.id]).sign([user2_sk])
b.store_bulk_transactions([create1, create2, transfer1])
b.models.store_bulk_transactions( [create1, create2, transfer1])
return type(
"",
@ -37,11 +37,11 @@ def txlist(b, user_pk, user2_pk, user_sk, user2_sk):
@pytest.mark.bdb
def test_get_txlist_by_asset(b, txlist):
res = b.get_transactions_filtered([txlist.create1.id])
res = b.models.get_transactions_filtered([txlist.create1.id])
assert sorted(set(tx.id for tx in res)) == sorted(set([txlist.transfer1.id, txlist.create1.id]))
@pytest.mark.bdb
def test_get_txlist_by_operation(b, txlist):
res = b.get_transactions_filtered([txlist.create1.id], operation="CREATE")
res = b.models.get_transactions_filtered([txlist.create1.id], operation="CREATE")
assert set(tx.id for tx in res) == {txlist.create1.id}

View File

@ -25,15 +25,15 @@ def fixed_seed_election(b_mock, node_key, new_validator):
@pytest.fixture
def concluded_election(b, ongoing_validator_election, ed25519_node_keys):
query.store_election(b.connection, ongoing_validator_election.id, 2, is_concluded=True)
query.store_election(b.models.connection, ongoing_validator_election.id, 2, is_concluded=True)
return ongoing_validator_election
@pytest.fixture
def inconclusive_election(b, ongoing_validator_election, new_validator):
validators = b.get_validators(height=1)
validators = b.models.get_validators(height=1)
validators[0]["voting_power"] = 15
validator_update = {"validators": validators, "height": 2, "election_id": "some_other_election"}
query.store_validator_set(b.connection, validator_update)
query.store_validator_set(b.models.connection, validator_update)
return ongoing_validator_election

View File

@ -16,6 +16,7 @@ from transactions.common.exceptions import ValidationError
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
from transactions.types.elections.vote import Vote
from transactions.types.elections.validator_utils import election_id_to_public_key
from tests.utils import generate_block, gen_vote
pytestmark = [pytest.mark.execute]
@ -23,7 +24,7 @@ pytestmark = [pytest.mark.execute]
@pytest.mark.bdb
def test_upsert_validator_valid_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
b_mock.store_bulk_transactions([valid_upsert_validator_election])
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
input0 = valid_upsert_validator_election.to_inputs()[0]
votes = valid_upsert_validator_election.outputs[0].amount
@ -40,7 +41,7 @@ def test_upsert_validator_valid_election_vote(b_mock, valid_upsert_validator_ele
@pytest.mark.bdb
def test_upsert_validator_valid_non_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
b_mock.store_bulk_transactions([valid_upsert_validator_election])
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
input0 = valid_upsert_validator_election.to_inputs()[0]
votes = valid_upsert_validator_election.outputs[0].amount
@ -60,7 +61,7 @@ def test_upsert_validator_valid_non_election_vote(b_mock, valid_upsert_validator
def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
alice = generate_key_pair()
b_mock.store_bulk_transactions([valid_upsert_validator_election])
b_mock.models.store_bulk_transactions( [valid_upsert_validator_election])
input0 = valid_upsert_validator_election.to_inputs()[0]
votes = valid_upsert_validator_election.outputs[0].amount
@ -75,7 +76,7 @@ def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_
assert b_mock.validate_transaction(delegate_vote)
b_mock.store_bulk_transactions([delegate_vote])
b_mock.models.store_bulk_transactions([delegate_vote])
election_pub_key = election_id_to_public_key(valid_upsert_validator_election.id)
alice_votes = delegate_vote.to_inputs()[0]
@ -93,7 +94,7 @@ def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_
@pytest.mark.bdb
def test_upsert_validator_invalid_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
b_mock.store_bulk_transactions([valid_upsert_validator_election])
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
input0 = valid_upsert_validator_election.to_inputs()[0]
votes = valid_upsert_validator_election.outputs[0].amount
@ -113,7 +114,7 @@ def test_upsert_validator_invalid_election_vote(b_mock, valid_upsert_validator_e
@pytest.mark.bdb
def test_valid_election_votes_received(b_mock, valid_upsert_validator_election, ed25519_node_keys):
alice = generate_key_pair()
b_mock.store_bulk_transactions([valid_upsert_validator_election])
b_mock.models.store_bulk_transactions( [valid_upsert_validator_election])
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 0
input0 = valid_upsert_validator_election.to_inputs()[0]
@ -127,7 +128,7 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
[([alice.public_key], 4), ([key0.public_key], votes - 4)],
election_ids=[valid_upsert_validator_election.id],
).sign([key0.private_key])
b_mock.store_bulk_transactions([delegate_vote])
b_mock.models.store_bulk_transactions([delegate_vote])
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 0
election_public_key = election_id_to_public_key(valid_upsert_validator_election.id)
@ -141,7 +142,7 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
).sign([alice.private_key])
assert b_mock.validate_transaction(alice_casted_vote)
b_mock.store_bulk_transactions([alice_casted_vote])
b_mock.models.store_bulk_transactions( [alice_casted_vote])
# Check if the delegated vote is count as valid vote
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 2
@ -151,7 +152,7 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
).sign([key0.private_key])
assert b_mock.validate_transaction(key0_casted_vote)
b_mock.store_bulk_transactions([key0_casted_vote])
b_mock.models.store_bulk_transactions([key0_casted_vote])
assert b_mock.get_commited_votes(valid_upsert_validator_election) == votes - 2
@ -165,7 +166,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
assert b_mock.validate_transaction(tx_vote0)
# store election
b_mock.store_bulk_transactions([valid_upsert_validator_election])
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
# cannot conclude election as not votes exist
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
@ -173,7 +174,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
assert b_mock.validate_transaction(tx_vote0)
assert not b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote0])
b_mock.store_bulk_transactions([tx_vote0])
b_mock.models.store_bulk_transactions( [tx_vote0])
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
# Node 1: cast vote
@ -191,7 +192,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
# 2/3 is achieved in the same block so the election can be.has_concludedd
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote1, tx_vote2])
b_mock.store_bulk_transactions([tx_vote1])
b_mock.models.store_bulk_transactions([tx_vote1])
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
assert b_mock.validate_transaction(tx_vote2)
@ -201,7 +202,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2])
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2, tx_vote3])
b_mock.store_bulk_transactions([tx_vote2])
b_mock.models.store_bulk_transactions( [tx_vote2])
# Once the blockchain records >2/3 of the votes the election is assumed to be.has_concludedd
# so any invocation of `.has_concluded` for that election should return False
@ -214,18 +215,18 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
@pytest.mark.abci
def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys):
if b.get_latest_block()["height"] == 0:
generate_block(b)
def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys, test_abci_rpc):
if b.models.get_latest_block()["height"] == 0:
generate_block(b, test_abci_rpc)
(node_pub, _) = list(node_keys.items())[0]
validators = [{"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10}]
latest_block = b.get_latest_block()
latest_block = b.models.get_latest_block()
# reset the validator set
b.store_validator_set(latest_block["height"], validators)
generate_block(b)
b.models.store_validator_set(latest_block["height"], validators)
generate_block(b, test_abci_rpc)
power = 1
public_key = "9B3119650DF82B9A5D8A12E38953EA47475C09F0C48A4E6A0ECE182944B24403"
@ -244,22 +245,26 @@ def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys):
election = ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign(
[node_key.private_key]
)
code, message = ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, election, BROADCAST_TX_COMMIT)
code, message = test_abci_rpc.write_transaction(
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, election, BROADCAST_TX_COMMIT
)
assert code == 202
assert b.get_transaction(election.id)
assert b.models.get_transaction(election.id)
tx_vote = gen_vote(election, 0, ed25519_node_keys)
assert b.validate_transaction(tx_vote)
code, message = ABCI_RPC().write_transaction(MODE_LIST, b.tendermint_rpc_endpoint, MODE_COMMIT, tx_vote, BROADCAST_TX_COMMIT)
code, message = test_abci_rpc.write_transaction(
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx_vote, BROADCAST_TX_COMMIT
)
assert code == 202
resp = b.get_validators()
resp = b.models.get_validators()
validator_pub_keys = []
for v in resp:
validator_pub_keys.append(v["public_key"]["value"])
assert public_key64 in validator_pub_keys
new_validator_set = b.get_validators()
new_validator_set = b.models.get_validators()
validator_pub_keys = []
for v in new_validator_set:
validator_pub_keys.append(v["public_key"]["value"])
@ -286,7 +291,7 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
voters = b.get_recipients_list()
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
# store election
b.store_bulk_transactions([election])
b.models.store_bulk_transactions( [election])
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
@ -318,13 +323,13 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
voters = b.get_recipients_list()
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
# store election
b.store_bulk_transactions([election])
b.models.store_bulk_transactions( [election])
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
tx_vote2 = gen_vote(election, 2, ed25519_node_keys)
b.store_bulk_transactions([tx_vote0, tx_vote1])
b.models.store_bulk_transactions( [tx_vote0, tx_vote1])
update = b.process_block(9, [tx_vote2])
assert len(update) == 1
@ -332,7 +337,7 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
assert update_public_key == public_key64
# assert that the public key is not a part of the current validator set
for v in b.get_validators(10):
for v in b.models.get_validators(10):
assert not v["public_key"]["value"] == public_key64
@ -345,4 +350,4 @@ def reset_validator_set(b, node_keys, height):
validators = []
for node_pub, _ in node_keys.items():
validators.append({"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10})
b.store_validator_set(height, validators)
b.models.store_validator_set(height, validators)

View File

@ -17,6 +17,7 @@ from transactions.common.exceptions import (
InvalidPowerChange,
)
pytestmark = pytest.mark.bdb
@ -82,7 +83,7 @@ def test_upsert_validator_invalid_election(b_mock, new_validator, node_key, fixe
with pytest.raises(DuplicateTransaction):
b_mock.validate_election(fixed_seed_election, [duplicate_election])
b_mock.store_bulk_transactions([fixed_seed_election])
b_mock.models.store_bulk_transactions([fixed_seed_election])
with pytest.raises(DuplicateTransaction):
b_mock.validate_election(duplicate_election)
@ -166,8 +167,8 @@ def test_get_status_inconclusive(b, inconclusive_election, new_validator):
},
]
b.get_validators = custom_mock_get_validators
b.get_latest_block = set_block_height_to_3
b.models.get_validators = custom_mock_get_validators
b.models.get_latest_block = set_block_height_to_3
status = ValidatorElection.INCONCLUSIVE
resp = b.get_election_status(inconclusive_election)
assert resp == status

View File

@ -21,6 +21,7 @@ from transactions.types.elections.validator_utils import election_id_to_public_k
from planetmint.abci.tendermint_utils import key_to_base64
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
@singledispatch
def flush_db(connection, dbname):
raise NotImplementedError
@ -38,7 +39,7 @@ def flush_tarantool_db(connection, dbname):
connection.connect().call("init")
def generate_block(planet):
def generate_block(planet, test_abci_rpc):
from transactions.common.crypto import generate_key_pair
alice = generate_key_pair()
@ -46,7 +47,9 @@ def generate_block(planet):
[alice.private_key]
)
code, message = ABCI_RPC().write_transaction(MODE_LIST, planet.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_COMMIT)
code, message = test_abci_rpc.write_transaction(
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_COMMIT
)
assert code == 202

View File

@ -9,10 +9,10 @@ import pytest
@pytest.fixture
def app(request):
from planetmint.web import server
from planetmint.lib import Planetmint
from planetmint.application.validation import Validator
if request.config.getoption("--database-backend") == "localmongodb":
app = server.create_app(debug=True, planetmint_factory=Planetmint)
app = server.create_app(debug=True, planetmint_factory=Validator)
else:
app = server.create_app(debug=True)

View File

@ -8,6 +8,7 @@ import pytest
from transactions.types.assets.create import Create
from ipld import marshal, multihash
ASSETS_ENDPOINT = "/api/v1/assets/"
@ -17,7 +18,7 @@ def test_get_assets_tendermint(client, b, alice):
assets = [{"data": multihash(marshal({"msg": "abc"}))}]
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
res = client.get(ASSETS_ENDPOINT + assets[0]["data"])
assert res.status_code == 200
@ -32,7 +33,7 @@ def test_get_assets_tendermint_limit(client, b, alice, bob):
tx_1 = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
tx_2 = Create.generate([bob.public_key], [([bob.public_key], 1)], assets=assets).sign([bob.private_key])
b.store_bulk_transactions([tx_1, tx_2])
b.models.store_bulk_transactions( [tx_1, tx_2])
res = client.get(ASSETS_ENDPOINT + assets[0]["data"] + "?limit=1")
assert res.status_code == 200

View File

@ -9,6 +9,7 @@ from transactions.types.assets.create import Create
from planetmint.abci.block import Block
from ipld import marshal, multihash
BLOCKS_ENDPOINT = "/api/v1/blocks/"
@ -22,15 +23,15 @@ def test_get_block_endpoint(b, client, alice):
)
tx = tx.sign([alice.private_key])
# with store_bulk_transactions we use `insert_many` where PyMongo
# with b.models.store_bulk_transactions we use `insert_many` where PyMongo
# automatically adds an `_id` field to the tx, therefore we need the
# deepcopy, for more info see:
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
tx_dict = copy.deepcopy(tx.to_dict())
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
block = Block(app_hash="random_utxo", height=31, transactions=[tx.id])
b.store_block(block._asdict())
b.models.store_block(block._asdict())
res = client.get(BLOCKS_ENDPOINT + str(block.height))
expected_response = {"app_hash": "random_utxo", "height": block.height, "transaction_ids": [tx.id]}
@ -54,10 +55,10 @@ def test_get_block_containing_transaction(b, client, alice):
[alice.public_key], [([alice.public_key], 1)], assets=[{"data": multihash(marshal({"cycle": "hero"}))}]
)
tx = tx.sign([alice.private_key])
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
block = Block(app_hash="random_utxo", height=13, transactions=[tx.id])
b.store_block(block._asdict())
b.models.store_block(block._asdict())
res = client.get("{}?transaction_id={}".format(BLOCKS_ENDPOINT, tx.id))
expected_height = block.height
assert res.json["height"] == expected_height

View File

@ -8,6 +8,7 @@ import pytest
from transactions.types.assets.create import Create
from ipld import marshal, multihash
METADATA_ENDPOINT = "/api/v1/metadata/"
@ -26,7 +27,7 @@ def test_get_metadata_tendermint(client, b, alice):
[alice.private_key]
)
b.store_bulk_transactions([tx])
b.models.store_bulk_transactions( [tx])
# test that metadata is returned
res = client.get(METADATA_ENDPOINT + metadata)
@ -43,13 +44,13 @@ def test_get_metadata_limit_tendermint(client, b, alice):
tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets1).sign(
[alice.private_key]
)
b.store_bulk_transactions([tx1])
b.models.store_bulk_transactions( [tx1])
assets2 = [{"data": multihash(marshal({"msg": "abc 2"}))}]
tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets2).sign(
[alice.private_key]
)
b.store_bulk_transactions([tx2])
b.models.store_bulk_transactions( [tx2])
# test that both assets are returned without limit
res = client.get(METADATA_ENDPOINT + meta)

View File

@ -9,7 +9,6 @@ from transactions.types.assets.create import Create
from transactions.types.assets.transfer import Transfer
from unittest.mock import MagicMock, patch
OUTPUTS_ENDPOINT = "/api/v1/outputs/"
@ -19,7 +18,7 @@ def test_get_outputs_endpoint(client, user_pk):
m = MagicMock()
m.txid = "a"
m.output = 0
with patch("planetmint.Planetmint.get_outputs_filtered") as gof:
with patch("planetmint.model.models.Models.get_outputs_filtered") as gof:
gof.return_value = [m, m]
res = client.get(OUTPUTS_ENDPOINT + "?public_key={}".format(user_pk))
assert res.json == [{"transaction_id": "a", "output_index": 0}, {"transaction_id": "a", "output_index": 0}]
@ -31,7 +30,7 @@ def test_get_outputs_endpoint_unspent(client, user_pk):
m = MagicMock()
m.txid = "a"
m.output = 0
with patch("planetmint.Planetmint.get_outputs_filtered") as gof:
with patch("planetmint.model.models.Models.get_outputs_filtered") as gof:
gof.return_value = [m]
params = "?spent=False&public_key={}".format(user_pk)
res = client.get(OUTPUTS_ENDPOINT + params)
@ -46,7 +45,7 @@ def test_get_outputs_endpoint_spent(client, user_pk):
m = MagicMock()
m.txid = "a"
m.output = 0
with patch("planetmint.Planetmint.get_outputs_filtered") as gof:
with patch("planetmint.model.models.Models.get_outputs_filtered") as gof:
gof.return_value = [m]
params = "?spent=true&public_key={}".format(user_pk)
res = client.get(OUTPUTS_ENDPOINT + params)
@ -92,7 +91,7 @@ def test_get_divisble_transactions_returns_500_phase_one(b, client):
TX_ENDPOINT = "/api/v1/transactions"
def mine(tx_list):
b.store_bulk_transactions(tx_list)
b.models.store_bulk_transactions( tx_list)
alice_priv, alice_pub = crypto.generate_key_pair()
# bob_priv, bob_pub = crypto.generate_key_pair()
@ -119,7 +118,7 @@ def test_get_divisble_transactions_returns_500(b, client):
TX_ENDPOINT = "/api/v1/transactions"
def mine(tx_list):
b.store_bulk_transactions(tx_list)
b.models.store_bulk_transactions( tx_list)
alice_priv, alice_pub = crypto.generate_key_pair()
bob_priv, bob_pub = crypto.generate_key_pair()

View File

@ -30,7 +30,6 @@ from transactions.common.transaction import (
from transactions.common.utils import _fulfillment_from_details
from transactions.common.crypto import generate_key_pair
TX_ENDPOINT = "/api/v1/transactions/"
@ -107,7 +106,7 @@ def test_post_create_transaction_endpoint(b, client):
def test_post_create_transaction_with_language(b, client, nested, language, expected_status_code):
from planetmint.backend.localmongodb.connection import LocalMongoDBConnection
if isinstance(b.connection, LocalMongoDBConnection):
if isinstance(b.models.connection, LocalMongoDBConnection):
user_priv, user_pub = crypto.generate_key_pair()
lang_obj = {"language": language}
@ -148,7 +147,7 @@ def test_post_create_transaction_with_invalid_key(b, client, field, value, err_k
user_priv, user_pub = crypto.generate_key_pair()
if isinstance(b.connection, LocalMongoDBConnection):
if isinstance(b.models.connection, LocalMongoDBConnection):
if field == "asset":
tx = Create.generate([user_pub], [([user_pub], 1)], assets=value)
elif field == "metadata":
@ -405,7 +404,7 @@ def test_transactions_get_list_good(client):
asset_ids = ["1" * 64]
with patch("planetmint.Planetmint.get_transactions_filtered", get_txs_patched):
with patch("planetmint.model.models.Models.get_transactions_filtered", get_txs_patched):
url = TX_ENDPOINT + "?asset_ids=" + ",".join(asset_ids)
assert client.get(url).json == [
["asset_ids", asset_ids],
@ -431,7 +430,7 @@ def test_transactions_get_list_bad(client):
assert False
with patch(
"planetmint.Planetmint.get_transactions_filtered",
"planetmint.model.models.Models.get_transactions_filtered",
lambda *_, **__: should_not_be_called(),
):
# Test asset id validated
@ -487,7 +486,7 @@ def test_post_transaction_compose_valid_wo_abci(b, _bdb):
assets=[{"data": "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK97"}],
).sign([alice.private_key])
validated = b.validate_transaction(tx)
b.store_bulk_transactions([validated])
b.models.store_bulk_transactions( [validated])
tx_obj = tx
tx = tx.to_dict()
@ -500,7 +499,7 @@ def test_post_transaction_compose_valid_wo_abci(b, _bdb):
compose_dict = signed_compose_tx.to_dict()
compose_obj = Transaction.from_dict(compose_dict)
validated_compose = b.validate_transaction(compose_obj)
b.store_bulk_transactions([validated_compose])
b.models.store_bulk_transactions( [validated_compose])
@pytest.mark.abci

View File

@ -14,7 +14,7 @@ def test_get_validators_endpoint(b, client):
"voting_power": 10,
}
]
b.store_validator_set(23, validator_set)
b.models.store_validator_set(23, validator_set)
res = client.get(VALIDATORS_ENDPOINT)
assert is_validator(res.json[0])