mirror of
https://github.com/planetmint/planetmint.git
synced 2025-06-07 06:36:38 +00:00
331 refactor a certain module gets a specific driver type flask sync driver abci server async driver first we stick to the current tarantool driver (#337)
* created ABCI_RPC class to seperate RPC interaction from the other ABCI interactions * renamed validation.py to validator.py * simplified planetmint/__init__.py * moved methods used by testing to tests/utils.py * making planetmint/__init__.py lean * moved ProcessGroup object to tests as it is only used there * reintegrated disabled tests Signed-off-by: Jürgen Eckel <juergen@riddleandcode.com>
This commit is contained in:
parent
77ab922eed
commit
0b0c954d34
@ -30,7 +30,7 @@ def test_bigchain_instance_is_initialized_when_conf_provided():
|
|||||||
|
|
||||||
def test_load_validation_plugin_loads_default_rules_without_name():
|
def test_load_validation_plugin_loads_default_rules_without_name():
|
||||||
from planetmint import config_utils
|
from planetmint import config_utils
|
||||||
from planetmint.validation import BaseValidationRules
|
from planetmint.application.basevalidationrules import BaseValidationRules
|
||||||
|
|
||||||
assert config_utils.load_validation_plugin() == BaseValidationRules
|
assert config_utils.load_validation_plugin() == BaseValidationRules
|
||||||
|
|
||||||
@ -321,4 +321,4 @@ def test_database_envs(env_name, env_value, config_key, monkeypatch):
|
|||||||
expected_config = Config().get()
|
expected_config = Config().get()
|
||||||
expected_config["database"][config_key] = env_value
|
expected_config["database"][config_key] = env_value
|
||||||
|
|
||||||
assert planetmint.config == expected_config
|
assert planetmint.config.Config().get() == expected_config
|
@ -11,7 +11,9 @@ import os.path
|
|||||||
|
|
||||||
from transactions.common.input import Input
|
from transactions.common.input import Input
|
||||||
from transactions.common.transaction_link import TransactionLink
|
from transactions.common.transaction_link import TransactionLink
|
||||||
from planetmint import lib
|
|
||||||
|
import planetmint.abci.block
|
||||||
|
|
||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from transactions.types.assets.transfer import Transfer
|
from transactions.types.assets.transfer import Transfer
|
||||||
from planetmint.web import server
|
from planetmint.web import server
|
||||||
@ -210,7 +212,7 @@ def main():
|
|||||||
signature = "53wxrEQDYk1dXzmvNSytbCfmNVnPqPkDQaTnAe8Jf43s6ssejPxezkCvUnGTnduNUmaLjhaan1iRLi3peu6s5DzA"
|
signature = "53wxrEQDYk1dXzmvNSytbCfmNVnPqPkDQaTnAe8Jf43s6ssejPxezkCvUnGTnduNUmaLjhaan1iRLi3peu6s5DzA"
|
||||||
|
|
||||||
app_hash = "f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056"
|
app_hash = "f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056"
|
||||||
block = lib.Block(height=1, transactions=[tx.to_dict()], app_hash=app_hash)
|
block = planetmint.abci.block.Block(height=1, transactions=[tx.to_dict()], app_hash=app_hash)
|
||||||
block_dict = block._asdict()
|
block_dict = block._asdict()
|
||||||
block_dict.pop("app_hash")
|
block_dict.pop("app_hash")
|
||||||
ctx["block"] = pretty_json(block_dict)
|
ctx["block"] = pretty_json(block_dict)
|
||||||
|
@ -19,7 +19,7 @@ The `Planetmint` class is defined here. Most node-level operations and database
|
|||||||
|
|
||||||
`Block`, `Transaction`, and `Asset` classes are defined here. The classes mirror the block and transaction structure from the documentation, but also include methods for validation and signing.
|
`Block`, `Transaction`, and `Asset` classes are defined here. The classes mirror the block and transaction structure from the documentation, but also include methods for validation and signing.
|
||||||
|
|
||||||
### [`validation.py`](./validation.py)
|
### [`validation.py`](application/basevalidationrules.py)
|
||||||
|
|
||||||
Base class for validation methods (verification of votes, blocks, and transactions). The actual logic is mostly found in `transaction` and `block` models, defined in [`models.py`](./models.py).
|
Base class for validation methods (verification of votes, blocks, and transactions). The actual logic is mostly found in `transaction` and `block` models, defined in [`models.py`](./models.py).
|
||||||
|
|
||||||
@ -27,7 +27,7 @@ Base class for validation methods (verification of votes, blocks, and transactio
|
|||||||
|
|
||||||
Entry point for the Planetmint process, after initialization. All subprocesses are started here: processes to handle new blocks, votes, etc.
|
Entry point for the Planetmint process, after initialization. All subprocesses are started here: processes to handle new blocks, votes, etc.
|
||||||
|
|
||||||
### [`config_utils.py`](./config_utils.py)
|
### [`config_utils.py`](config_utils.py)
|
||||||
|
|
||||||
Methods for managing the configuration, including loading configuration files, automatically generating the configuration, and keeping the configuration consistent across Planetmint instances.
|
Methods for managing the configuration, including loading configuration files, automatically generating the configuration, and keeping the configuration consistent across Planetmint instances.
|
||||||
|
|
||||||
|
@ -2,17 +2,3 @@
|
|||||||
# Planetmint and IPDB software contributors.
|
# Planetmint and IPDB software contributors.
|
||||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||||
|
|
||||||
from transactions.common.transaction import Transaction # noqa
|
|
||||||
from transactions.types.elections.validator_election import ValidatorElection # noqa
|
|
||||||
from transactions.types.elections.vote import Vote # noqa
|
|
||||||
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
|
||||||
from planetmint.lib import Planetmint
|
|
||||||
from planetmint.core import App
|
|
||||||
|
|
||||||
|
|
||||||
Transaction.register_type(Transaction.CREATE, Transaction)
|
|
||||||
Transaction.register_type(Transaction.TRANSFER, Transaction)
|
|
||||||
Transaction.register_type(ValidatorElection.OPERATION, ValidatorElection)
|
|
||||||
Transaction.register_type(ChainMigrationElection.OPERATION, ChainMigrationElection)
|
|
||||||
Transaction.register_type(Vote.OPERATION, Vote)
|
|
||||||
|
0
planetmint/abci/__init__.py
Normal file
0
planetmint/abci/__init__.py
Normal file
3
planetmint/abci/block.py
Normal file
3
planetmint/abci/block.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
Block = namedtuple("Block", ("app_hash", "height", "transactions"))
|
@ -21,11 +21,12 @@ from tendermint.abci.types_pb2 import (
|
|||||||
ResponseEndBlock,
|
ResponseEndBlock,
|
||||||
ResponseCommit,
|
ResponseCommit,
|
||||||
)
|
)
|
||||||
from planetmint import Planetmint
|
|
||||||
from planetmint.tendermint_utils import decode_transaction, calculate_hash, decode_validator
|
|
||||||
from planetmint.lib import Block
|
|
||||||
from planetmint.events import EventTypes, Event
|
|
||||||
|
|
||||||
|
from planetmint.application.validator import Validator
|
||||||
|
from planetmint.model.models import Models
|
||||||
|
from planetmint.abci.tendermint_utils import decode_transaction, calculate_hash, decode_validator
|
||||||
|
from planetmint.abci.block import Block
|
||||||
|
from planetmint.ipc.events import EventTypes, Event
|
||||||
|
|
||||||
CodeTypeError = 1
|
CodeTypeError = 1
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -38,18 +39,19 @@ class App(BaseApplication):
|
|||||||
transaction logic to Tendermint Core.
|
transaction logic to Tendermint Core.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, planetmint_node=None, events_queue=None):
|
def __init__(self, planetmint_node=None, events_queue=None, models: Models = None, validator: Validator = None):
|
||||||
# super().__init__(abci)
|
# super().__init__(abci)
|
||||||
logger.debug("Checking values of types")
|
logger.debug("Checking values of types")
|
||||||
logger.debug(dir(types_pb2))
|
logger.debug(dir(types_pb2))
|
||||||
self.events_queue = events_queue
|
self.events_queue = events_queue
|
||||||
self.planetmint_node = planetmint_node or Planetmint()
|
self.validator = Validator()
|
||||||
|
self.models = models or Models()
|
||||||
self.block_txn_ids = []
|
self.block_txn_ids = []
|
||||||
self.block_txn_hash = ""
|
self.block_txn_hash = ""
|
||||||
self.block_transactions = []
|
self.block_transactions = []
|
||||||
self.validators = None
|
self.validators = None
|
||||||
self.new_height = None
|
self.new_height = None
|
||||||
self.chain = self.planetmint_node.get_latest_abci_chain()
|
self.chain = self.models.get_latest_abci_chain()
|
||||||
|
|
||||||
def log_abci_migration_error(self, chain_id, validators):
|
def log_abci_migration_error(self, chain_id, validators):
|
||||||
logger.error(
|
logger.error(
|
||||||
@ -61,7 +63,7 @@ class App(BaseApplication):
|
|||||||
def abort_if_abci_chain_is_not_synced(self):
|
def abort_if_abci_chain_is_not_synced(self):
|
||||||
if self.chain is None or self.chain["is_synced"]:
|
if self.chain is None or self.chain["is_synced"]:
|
||||||
return
|
return
|
||||||
validators = self.planetmint_node.get_validators()
|
validators = self.models.get_validators()
|
||||||
self.log_abci_migration_error(self.chain["chain_id"], validators)
|
self.log_abci_migration_error(self.chain["chain_id"], validators)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
@ -69,7 +71,7 @@ class App(BaseApplication):
|
|||||||
"""Initialize chain upon genesis or a migration"""
|
"""Initialize chain upon genesis or a migration"""
|
||||||
app_hash = ""
|
app_hash = ""
|
||||||
height = 0
|
height = 0
|
||||||
known_chain = self.planetmint_node.get_latest_abci_chain()
|
known_chain = self.models.get_latest_abci_chain()
|
||||||
if known_chain is not None:
|
if known_chain is not None:
|
||||||
chain_id = known_chain["chain_id"]
|
chain_id = known_chain["chain_id"]
|
||||||
|
|
||||||
@ -78,23 +80,23 @@ class App(BaseApplication):
|
|||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
if chain_id != genesis.chain_id:
|
if chain_id != genesis.chain_id:
|
||||||
validators = self.planetmint_node.get_validators()
|
validators = self.models.get_validators()
|
||||||
self.log_abci_migration_error(chain_id, validators)
|
self.log_abci_migration_error(chain_id, validators)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
# set migration values for app hash and height
|
# set migration values for app hash and height
|
||||||
block = self.planetmint_node.get_latest_block()
|
block = self.models.get_latest_block()
|
||||||
app_hash = "" if block is None else block["app_hash"]
|
app_hash = "" if block is None else block["app_hash"]
|
||||||
height = 0 if block is None else block["height"] + 1
|
height = 0 if block is None else block["height"] + 1
|
||||||
known_validators = self.planetmint_node.get_validators()
|
known_validators = self.models.get_validators()
|
||||||
validator_set = [decode_validator(v) for v in genesis.validators]
|
validator_set = [decode_validator(v) for v in genesis.validators]
|
||||||
if known_validators and known_validators != validator_set:
|
if known_validators and known_validators != validator_set:
|
||||||
self.log_abci_migration_error(known_chain["chain_id"], known_validators)
|
self.log_abci_migration_error(known_chain["chain_id"], known_validators)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
block = Block(app_hash=app_hash, height=height, transactions=[])
|
block = Block(app_hash=app_hash, height=height, transactions=[])
|
||||||
self.planetmint_node.store_block(block._asdict())
|
self.models.store_block(block._asdict())
|
||||||
self.planetmint_node.store_validator_set(height + 1, validator_set)
|
self.models.store_validator_set(height + 1, validator_set)
|
||||||
abci_chain_height = 0 if known_chain is None else known_chain["height"]
|
abci_chain_height = 0 if known_chain is None else known_chain["height"]
|
||||||
self.planetmint_node.store_abci_chain(abci_chain_height, genesis.chain_id, True)
|
self.models.store_abci_chain(abci_chain_height, genesis.chain_id, True)
|
||||||
self.chain = {"height": abci_chain_height, "is_synced": True, "chain_id": genesis.chain_id}
|
self.chain = {"height": abci_chain_height, "is_synced": True, "chain_id": genesis.chain_id}
|
||||||
return ResponseInitChain()
|
return ResponseInitChain()
|
||||||
|
|
||||||
@ -112,7 +114,7 @@ class App(BaseApplication):
|
|||||||
# logger.info(f"Tendermint version: {request.version}")
|
# logger.info(f"Tendermint version: {request.version}")
|
||||||
|
|
||||||
r = ResponseInfo()
|
r = ResponseInfo()
|
||||||
block = self.planetmint_node.get_latest_block()
|
block = self.models.get_latest_block()
|
||||||
if block:
|
if block:
|
||||||
chain_shift = 0 if self.chain is None else self.chain["height"]
|
chain_shift = 0 if self.chain is None else self.chain["height"]
|
||||||
r.last_block_height = block["height"] - chain_shift
|
r.last_block_height = block["height"] - chain_shift
|
||||||
@ -134,7 +136,7 @@ class App(BaseApplication):
|
|||||||
|
|
||||||
logger.debug("check_tx: %s", raw_transaction)
|
logger.debug("check_tx: %s", raw_transaction)
|
||||||
transaction = decode_transaction(raw_transaction)
|
transaction = decode_transaction(raw_transaction)
|
||||||
if self.planetmint_node.is_valid_transaction(transaction):
|
if self.validator.is_valid_transaction(transaction):
|
||||||
logger.debug("check_tx: VALID")
|
logger.debug("check_tx: VALID")
|
||||||
return ResponseCheckTx(code=OkCode)
|
return ResponseCheckTx(code=OkCode)
|
||||||
else:
|
else:
|
||||||
@ -167,9 +169,7 @@ class App(BaseApplication):
|
|||||||
self.abort_if_abci_chain_is_not_synced()
|
self.abort_if_abci_chain_is_not_synced()
|
||||||
|
|
||||||
logger.debug("deliver_tx: %s", raw_transaction)
|
logger.debug("deliver_tx: %s", raw_transaction)
|
||||||
transaction = self.planetmint_node.is_valid_transaction(
|
transaction = self.validator.is_valid_transaction(decode_transaction(raw_transaction), self.block_transactions)
|
||||||
decode_transaction(raw_transaction), self.block_transactions
|
|
||||||
)
|
|
||||||
|
|
||||||
if not transaction:
|
if not transaction:
|
||||||
logger.debug("deliver_tx: INVALID")
|
logger.debug("deliver_tx: INVALID")
|
||||||
@ -198,10 +198,10 @@ class App(BaseApplication):
|
|||||||
# `end_block` or `commit`
|
# `end_block` or `commit`
|
||||||
logger.debug(f"Updating pre-commit state: {self.new_height}")
|
logger.debug(f"Updating pre-commit state: {self.new_height}")
|
||||||
pre_commit_state = dict(height=self.new_height, transactions=self.block_txn_ids)
|
pre_commit_state = dict(height=self.new_height, transactions=self.block_txn_ids)
|
||||||
self.planetmint_node.store_pre_commit_state(pre_commit_state)
|
self.models.store_pre_commit_state(pre_commit_state)
|
||||||
|
|
||||||
block_txn_hash = calculate_hash(self.block_txn_ids)
|
block_txn_hash = calculate_hash(self.block_txn_ids)
|
||||||
block = self.planetmint_node.get_latest_block()
|
block = self.models.get_latest_block()
|
||||||
|
|
||||||
logger.debug("BLOCK: ", block)
|
logger.debug("BLOCK: ", block)
|
||||||
|
|
||||||
@ -210,7 +210,7 @@ class App(BaseApplication):
|
|||||||
else:
|
else:
|
||||||
self.block_txn_hash = block["app_hash"]
|
self.block_txn_hash = block["app_hash"]
|
||||||
|
|
||||||
validator_update = self.planetmint_node.process_block(self.new_height, self.block_transactions)
|
validator_update = self.validator.process_block(self.new_height, self.block_transactions)
|
||||||
|
|
||||||
return ResponseEndBlock(validator_updates=validator_update)
|
return ResponseEndBlock(validator_updates=validator_update)
|
||||||
|
|
||||||
@ -223,12 +223,12 @@ class App(BaseApplication):
|
|||||||
|
|
||||||
# register a new block only when new transactions are received
|
# register a new block only when new transactions are received
|
||||||
if self.block_txn_ids:
|
if self.block_txn_ids:
|
||||||
self.planetmint_node.store_bulk_transactions(self.block_transactions)
|
self.models.store_bulk_transactions(self.block_transactions)
|
||||||
|
|
||||||
block = Block(app_hash=self.block_txn_hash, height=self.new_height, transactions=self.block_txn_ids)
|
block = Block(app_hash=self.block_txn_hash, height=self.new_height, transactions=self.block_txn_ids)
|
||||||
# NOTE: storing the block should be the last operation during commit
|
# NOTE: storing the block should be the last operation during commit
|
||||||
# this effects crash recovery. Refer BEP#8 for details
|
# this effects crash recovery. Refer BEP#8 for details
|
||||||
self.planetmint_node.store_block(block._asdict())
|
self.models.store_block(block._asdict())
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Commit-ing new block with hash: apphash=%s ," "height=%s, txn ids=%s",
|
"Commit-ing new block with hash: apphash=%s ," "height=%s, txn ids=%s",
|
||||||
@ -245,26 +245,3 @@ class App(BaseApplication):
|
|||||||
self.events_queue.put(event)
|
self.events_queue.put(event)
|
||||||
|
|
||||||
return ResponseCommit(data=data)
|
return ResponseCommit(data=data)
|
||||||
|
|
||||||
|
|
||||||
def rollback(planetmint):
|
|
||||||
pre_commit = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
pre_commit = planetmint.get_pre_commit_state()
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception("Unexpected error occurred while executing get_pre_commit_state()", e)
|
|
||||||
|
|
||||||
if pre_commit is None or len(pre_commit) == 0:
|
|
||||||
# the pre_commit record is first stored in the first `end_block`
|
|
||||||
return
|
|
||||||
|
|
||||||
latest_block = planetmint.get_latest_block()
|
|
||||||
if latest_block is None:
|
|
||||||
logger.error("Found precommit state but no blocks!")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
# NOTE: the pre-commit state is always at most 1 block ahead of the commited state
|
|
||||||
if latest_block["height"] < pre_commit["height"]:
|
|
||||||
planetmint.rollback_election(pre_commit["height"], pre_commit["transactions"])
|
|
||||||
planetmint.delete_transactions(pre_commit["transactions"])
|
|
@ -6,9 +6,9 @@
|
|||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from planetmint import App
|
from planetmint.abci.core import App
|
||||||
from planetmint.lib import Planetmint
|
from planetmint.application.validator import Validator
|
||||||
from planetmint.tendermint_utils import decode_transaction
|
from planetmint.abci.tendermint_utils import decode_transaction
|
||||||
from abci.application import OkCode
|
from abci.application import OkCode
|
||||||
from tendermint.abci.types_pb2 import (
|
from tendermint.abci.types_pb2 import (
|
||||||
ResponseCheckTx,
|
ResponseCheckTx,
|
||||||
@ -93,7 +93,7 @@ class ValidationWorker:
|
|||||||
def __init__(self, in_queue, results_queue):
|
def __init__(self, in_queue, results_queue):
|
||||||
self.in_queue = in_queue
|
self.in_queue = in_queue
|
||||||
self.results_queue = results_queue
|
self.results_queue = results_queue
|
||||||
self.planetmint = Planetmint()
|
self.validator = Validator()
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
@ -112,7 +112,7 @@ class ValidationWorker:
|
|||||||
except TypeError:
|
except TypeError:
|
||||||
asset_id = dict_transaction["id"]
|
asset_id = dict_transaction["id"]
|
||||||
|
|
||||||
transaction = self.planetmint.is_valid_transaction(dict_transaction, self.validated_transactions[asset_id])
|
transaction = self.validator.is_valid_transaction(dict_transaction, self.validated_transactions[asset_id])
|
||||||
|
|
||||||
if transaction:
|
if transaction:
|
||||||
self.validated_transactions[asset_id].append(transaction)
|
self.validated_transactions[asset_id].append(transaction)
|
74
planetmint/abci/rpc.py
Normal file
74
planetmint/abci/rpc.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
import requests
|
||||||
|
from uuid import uuid4
|
||||||
|
from transactions.common.exceptions import ValidationError
|
||||||
|
from transactions.common.transaction_mode_types import (
|
||||||
|
BROADCAST_TX_COMMIT,
|
||||||
|
BROADCAST_TX_ASYNC,
|
||||||
|
BROADCAST_TX_SYNC,
|
||||||
|
)
|
||||||
|
|
||||||
|
from planetmint.utils import Singleton
|
||||||
|
from planetmint.abci.tendermint_utils import encode_transaction
|
||||||
|
from planetmint.application.validator import logger
|
||||||
|
from planetmint.config_utils import autoconfigure
|
||||||
|
from planetmint.config import Config
|
||||||
|
|
||||||
|
MODE_COMMIT = BROADCAST_TX_COMMIT
|
||||||
|
MODE_LIST = (BROADCAST_TX_ASYNC, BROADCAST_TX_SYNC, MODE_COMMIT)
|
||||||
|
|
||||||
|
|
||||||
|
class ABCI_RPC:
|
||||||
|
def __init__(self):
|
||||||
|
autoconfigure()
|
||||||
|
self.tendermint_host = Config().get()["tendermint"]["host"]
|
||||||
|
self.tendermint_port = Config().get()["tendermint"]["port"]
|
||||||
|
self.tendermint_rpc_endpoint = "http://{}:{}/".format(self.tendermint_host, self.tendermint_port)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _process_post_response(mode_commit, response, mode):
|
||||||
|
logger.debug(response)
|
||||||
|
|
||||||
|
error = response.get("error")
|
||||||
|
if error:
|
||||||
|
status_code = 500
|
||||||
|
message = error.get("message", "Internal Error")
|
||||||
|
data = error.get("data", "")
|
||||||
|
|
||||||
|
if "Tx already exists in cache" in data:
|
||||||
|
status_code = 400
|
||||||
|
|
||||||
|
return (status_code, message + " - " + data)
|
||||||
|
|
||||||
|
result = response["result"]
|
||||||
|
if mode == mode_commit:
|
||||||
|
check_tx_code = result.get("check_tx", {}).get("code", 0)
|
||||||
|
deliver_tx_code = result.get("deliver_tx", {}).get("code", 0)
|
||||||
|
error_code = check_tx_code or deliver_tx_code
|
||||||
|
else:
|
||||||
|
error_code = result.get("code", 0)
|
||||||
|
|
||||||
|
if error_code:
|
||||||
|
return (500, "Transaction validation failed")
|
||||||
|
|
||||||
|
return (202, "")
|
||||||
|
|
||||||
|
def write_transaction(self, mode_list, endpoint, mode_commit, transaction, mode):
|
||||||
|
# This method offers backward compatibility with the Web API.
|
||||||
|
"""Submit a valid transaction to the mempool."""
|
||||||
|
response = self.post_transaction(mode_list, endpoint, transaction, mode)
|
||||||
|
return ABCI_RPC._process_post_response(mode_commit, response.json(), mode)
|
||||||
|
|
||||||
|
def post_transaction(self, mode_list, endpoint, transaction, mode):
|
||||||
|
"""Submit a valid transaction to the mempool."""
|
||||||
|
if not mode or mode not in mode_list:
|
||||||
|
raise ValidationError("Mode must be one of the following {}.".format(", ".join(mode_list)))
|
||||||
|
|
||||||
|
tx_dict = transaction.tx_dict if transaction.tx_dict else transaction.to_dict()
|
||||||
|
payload = {
|
||||||
|
"method": mode,
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"params": [encode_transaction(tx_dict)],
|
||||||
|
"id": str(uuid4()),
|
||||||
|
}
|
||||||
|
# TODO: handle connection errors!
|
||||||
|
return requests.post(endpoint, json=payload)
|
35
planetmint/abci/utils.py
Normal file
35
planetmint/abci/utils.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import json
|
||||||
|
|
||||||
|
from packaging import version
|
||||||
|
from transactions.common.crypto import key_pair_from_ed25519_key
|
||||||
|
|
||||||
|
from planetmint.abci.tendermint_utils import key_from_base64
|
||||||
|
from planetmint.version import __tm_supported_versions__
|
||||||
|
|
||||||
|
|
||||||
|
def load_node_key(path):
|
||||||
|
with open(path) as json_data:
|
||||||
|
priv_validator = json.load(json_data)
|
||||||
|
priv_key = priv_validator["priv_key"]["value"]
|
||||||
|
hex_private_key = key_from_base64(priv_key)
|
||||||
|
return key_pair_from_ed25519_key(hex_private_key)
|
||||||
|
|
||||||
|
|
||||||
|
def tendermint_version_is_compatible(running_tm_ver):
|
||||||
|
"""
|
||||||
|
Check Tendermint compatability with Planetmint server
|
||||||
|
|
||||||
|
:param running_tm_ver: Version number of the connected Tendermint instance
|
||||||
|
:type running_tm_ver: str
|
||||||
|
:return: True/False depending on the compatability with Planetmint server
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Splitting because version can look like this e.g. 0.22.8-40d6dc2e
|
||||||
|
tm_ver = running_tm_ver.split("-")
|
||||||
|
if not tm_ver:
|
||||||
|
return False
|
||||||
|
for ver in __tm_supported_versions__:
|
||||||
|
if version.parse(ver) == version.parse(tm_ver[0]):
|
||||||
|
return True
|
||||||
|
return False
|
2
planetmint/application/__init__.py
Normal file
2
planetmint/application/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
from .validator import Validator
|
||||||
|
from .basevalidationrules import BaseValidationRules
|
554
planetmint/application/validator.py
Normal file
554
planetmint/application/validator.py
Normal file
@ -0,0 +1,554 @@
|
|||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
from collections import OrderedDict
|
||||||
|
from transactions import Transaction, Vote
|
||||||
|
from transactions.common.exceptions import (
|
||||||
|
DoubleSpend,
|
||||||
|
AssetIdMismatch,
|
||||||
|
InvalidSignature,
|
||||||
|
AmountError,
|
||||||
|
SchemaValidationError,
|
||||||
|
ValidationError,
|
||||||
|
MultipleInputsError,
|
||||||
|
DuplicateTransaction,
|
||||||
|
InvalidProposer,
|
||||||
|
UnequalValidatorSet,
|
||||||
|
InvalidPowerChange,
|
||||||
|
)
|
||||||
|
from transactions.common.crypto import public_key_from_ed25519_key
|
||||||
|
from transactions.common.output import Output as TransactionOutput
|
||||||
|
from transactions.common.transaction import VALIDATOR_ELECTION, CHAIN_MIGRATION_ELECTION
|
||||||
|
from transactions.types.elections.election import Election
|
||||||
|
from transactions.types.elections.validator_utils import election_id_to_public_key
|
||||||
|
|
||||||
|
from planetmint.abci.tendermint_utils import (
|
||||||
|
merkleroot,
|
||||||
|
key_from_base64,
|
||||||
|
public_key_to_base64,
|
||||||
|
encode_validator,
|
||||||
|
new_validator_set,
|
||||||
|
)
|
||||||
|
from planetmint.application.basevalidationrules import BaseValidationRules
|
||||||
|
from planetmint.backend.models.output import Output
|
||||||
|
from planetmint.model.models import Models
|
||||||
|
from planetmint.config import Config
|
||||||
|
from planetmint.config_utils import load_validation_plugin
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Validator:
|
||||||
|
def __init__(self):
|
||||||
|
self.models = Models()
|
||||||
|
self.validation = Validator._get_validationmethod()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_validationmethod():
|
||||||
|
validationPlugin = Config().get().get("validation_plugin")
|
||||||
|
|
||||||
|
if validationPlugin:
|
||||||
|
validation_method = load_validation_plugin(validationPlugin)
|
||||||
|
else:
|
||||||
|
validation_method = BaseValidationRules
|
||||||
|
return validation_method
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_inputs_distinct(tx: Transaction):
|
||||||
|
# Validate that all inputs are distinct
|
||||||
|
links = [i.fulfills.to_uri() for i in tx.inputs]
|
||||||
|
if len(links) != len(set(links)):
|
||||||
|
raise DoubleSpend('tx "{}" spends inputs twice'.format(tx.id))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_asset_id(tx: Transaction, input_txs: list):
|
||||||
|
# validate asset
|
||||||
|
if tx.operation != Transaction.COMPOSE:
|
||||||
|
asset_id = tx.get_asset_id(input_txs)
|
||||||
|
if asset_id != Transaction.read_out_asset_id(tx):
|
||||||
|
raise AssetIdMismatch(
|
||||||
|
("The asset id of the input does not" " match the asset id of the" " transaction")
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
asset_ids = Transaction.get_asset_ids(input_txs)
|
||||||
|
if Transaction.read_out_asset_id(tx) in asset_ids:
|
||||||
|
raise AssetIdMismatch(("The asset ID of the compose must be different to all of its input asset IDs"))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def validate_input_conditions(tx: Transaction, input_conditions: list[Output]):
|
||||||
|
# convert planetmint.Output objects to transactions.common.Output objects
|
||||||
|
input_conditions_dict = Output.list_to_dict(input_conditions)
|
||||||
|
input_conditions_converted = []
|
||||||
|
for input_cond in input_conditions_dict:
|
||||||
|
input_conditions_converted.append(TransactionOutput.from_dict(input_cond))
|
||||||
|
|
||||||
|
if not tx.inputs_valid(input_conditions_converted):
|
||||||
|
raise InvalidSignature("Transaction signature is invalid.")
|
||||||
|
|
||||||
|
def validate_compose_inputs(self, tx, current_transactions=[]) -> bool:
|
||||||
|
input_txs, input_conditions = self.models.get_input_txs_and_conditions(tx.inputs, current_transactions)
|
||||||
|
|
||||||
|
Validator.validate_input_conditions(tx, input_conditions)
|
||||||
|
|
||||||
|
Validator.validate_asset_id(tx, input_txs)
|
||||||
|
|
||||||
|
Validator.validate_inputs_distinct(tx)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def validate_transfer_inputs(self, tx, current_transactions=[]) -> bool:
|
||||||
|
input_txs, input_conditions = self.models.get_input_txs_and_conditions(tx.inputs, current_transactions)
|
||||||
|
|
||||||
|
Validator.validate_input_conditions(tx, input_conditions)
|
||||||
|
|
||||||
|
Validator.validate_asset_id(tx, input_txs)
|
||||||
|
|
||||||
|
Validator.validate_inputs_distinct(tx)
|
||||||
|
|
||||||
|
input_amount = sum([input_condition.amount for input_condition in input_conditions])
|
||||||
|
output_amount = sum([output_condition.amount for output_condition in tx.outputs])
|
||||||
|
|
||||||
|
if output_amount != input_amount:
|
||||||
|
raise AmountError(
|
||||||
|
(
|
||||||
|
"The amount used in the inputs `{}`" " needs to be same as the amount used" " in the outputs `{}`"
|
||||||
|
).format(input_amount, output_amount)
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def validate_create_inputs(self, tx, current_transactions=[]) -> bool:
|
||||||
|
duplicates = any(txn for txn in current_transactions if txn.id == tx.id)
|
||||||
|
if self.models.is_committed(tx.id) or duplicates:
|
||||||
|
raise DuplicateTransaction("transaction `{}` already exists".format(tx.id))
|
||||||
|
|
||||||
|
fulfilling_inputs = [i for i in tx.inputs if i.fulfills is not None and i.fulfills.txid is not None]
|
||||||
|
|
||||||
|
if len(fulfilling_inputs) > 0:
|
||||||
|
input_txs, input_conditions = self.models.get_input_txs_and_conditions(
|
||||||
|
fulfilling_inputs, current_transactions
|
||||||
|
)
|
||||||
|
create_asset = tx.assets[0]
|
||||||
|
input_asset = input_txs[0].assets[tx.inputs[0].fulfills.output]["data"]
|
||||||
|
if create_asset != input_asset:
|
||||||
|
raise ValidationError("CREATE must have matching asset description with input transaction")
|
||||||
|
if input_txs[0].operation != Transaction.DECOMPOSE:
|
||||||
|
raise SchemaValidationError("CREATE can only consume DECOMPOSE outputs")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def validate_transaction(self, transaction, current_transactions=[]):
|
||||||
|
"""Validate a transaction against the current status of the database."""
|
||||||
|
|
||||||
|
# CLEANUP: The conditional below checks for transaction in dict format.
|
||||||
|
# It would be better to only have a single format for the transaction
|
||||||
|
# throught the code base.
|
||||||
|
if isinstance(transaction, dict):
|
||||||
|
try:
|
||||||
|
transaction = Transaction.from_dict(transaction, False)
|
||||||
|
except SchemaValidationError as e:
|
||||||
|
logger.warning("Invalid transaction schema: %s", e.__cause__.message)
|
||||||
|
return False
|
||||||
|
except ValidationError as e:
|
||||||
|
logger.warning("Invalid transaction (%s): %s", type(e).__name__, e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if transaction.operation == Transaction.CREATE:
|
||||||
|
self.validate_create_inputs(transaction, current_transactions)
|
||||||
|
elif transaction.operation in [Transaction.TRANSFER, Transaction.VOTE]:
|
||||||
|
self.validate_transfer_inputs(transaction, current_transactions)
|
||||||
|
elif transaction.operation in [Transaction.COMPOSE]:
|
||||||
|
self.validate_compose_inputs(transaction, current_transactions)
|
||||||
|
|
||||||
|
return transaction
|
||||||
|
|
||||||
|
def validate_election(self, transaction, current_transactions=[]): # TODO: move somewhere else
|
||||||
|
"""Validate election transaction
|
||||||
|
|
||||||
|
NOTE:
|
||||||
|
* A valid election is initiated by an existing validator.
|
||||||
|
|
||||||
|
* A valid election is one where voters are validators and votes are
|
||||||
|
allocated according to the voting power of each validator node.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
:param planet: (Planetmint) an instantiated planetmint.lib.Planetmint object.
|
||||||
|
:param current_transactions: (list) A list of transactions to be validated along with the election
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Election: a Election object or an object of the derived Election subclass.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValidationError: If the election is invalid
|
||||||
|
"""
|
||||||
|
|
||||||
|
duplicates = any(txn for txn in current_transactions if txn.id == transaction.id)
|
||||||
|
if self.models.is_committed(transaction.id) or duplicates:
|
||||||
|
raise DuplicateTransaction("transaction `{}` already exists".format(transaction.id))
|
||||||
|
|
||||||
|
current_validators = self.models.get_validators_dict()
|
||||||
|
|
||||||
|
# NOTE: Proposer should be a single node
|
||||||
|
if len(transaction.inputs) != 1 or len(transaction.inputs[0].owners_before) != 1:
|
||||||
|
raise MultipleInputsError("`tx_signers` must be a list instance of length one")
|
||||||
|
|
||||||
|
# NOTE: Check if the proposer is a validator.
|
||||||
|
[election_initiator_node_pub_key] = transaction.inputs[0].owners_before
|
||||||
|
if election_initiator_node_pub_key not in current_validators.keys():
|
||||||
|
raise InvalidProposer("Public key is not a part of the validator set")
|
||||||
|
|
||||||
|
# NOTE: Check if all validators have been assigned votes equal to their voting power
|
||||||
|
if not self.is_same_topology(current_validators, transaction.outputs):
|
||||||
|
raise UnequalValidatorSet("Validator set much be exactly same to the outputs of election")
|
||||||
|
|
||||||
|
if transaction.operation == VALIDATOR_ELECTION:
|
||||||
|
self.validate_validator_election(transaction)
|
||||||
|
|
||||||
|
return transaction
|
||||||
|
|
||||||
|
def is_same_topology(cls, current_topology, election_topology):
|
||||||
|
voters = {}
|
||||||
|
for voter in election_topology:
|
||||||
|
if len(voter.public_keys) > 1:
|
||||||
|
return False
|
||||||
|
|
||||||
|
[public_key] = voter.public_keys
|
||||||
|
voting_power = voter.amount
|
||||||
|
voters[public_key] = voting_power
|
||||||
|
|
||||||
|
# Check whether the voters and their votes is same to that of the
|
||||||
|
# validators and their voting power in the network
|
||||||
|
return current_topology == voters
|
||||||
|
|
||||||
|
def validate_validator_election(self, transaction): # TODO: move somewhere else
|
||||||
|
"""For more details refer BEP-21: https://github.com/planetmint/BEPs/tree/master/21"""
|
||||||
|
|
||||||
|
current_validators = self.models.get_validators_dict()
|
||||||
|
|
||||||
|
# NOTE: change more than 1/3 of the current power is not allowed
|
||||||
|
if transaction.get_assets()[0]["data"]["power"] >= (1 / 3) * sum(current_validators.values()):
|
||||||
|
raise InvalidPowerChange("`power` change must be less than 1/3 of total power")
|
||||||
|
|
||||||
|
def get_election_status(self, transaction):
|
||||||
|
election = self.models.get_election(transaction.id)
|
||||||
|
if election and election["is_concluded"]:
|
||||||
|
return Election.CONCLUDED
|
||||||
|
|
||||||
|
return Election.INCONCLUSIVE if self.has_validator_set_changed(transaction) else Election.ONGOING
|
||||||
|
|
||||||
|
def has_validator_set_changed(self, transaction):
|
||||||
|
latest_change = self.get_validator_change()
|
||||||
|
if latest_change is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
latest_change_height = latest_change["height"]
|
||||||
|
|
||||||
|
election = self.models.get_election(transaction.id)
|
||||||
|
|
||||||
|
return latest_change_height > election["height"]
|
||||||
|
|
||||||
|
def get_validator_change(self):
|
||||||
|
"""Return the validator set from the most recent approved block
|
||||||
|
|
||||||
|
:return: {
|
||||||
|
'height': <block_height>,
|
||||||
|
'validators': <validator_set>
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
latest_block = self.models.get_latest_block()
|
||||||
|
if latest_block is None:
|
||||||
|
return None
|
||||||
|
return self.models.get_validator_set(latest_block["height"])
|
||||||
|
|
||||||
|
def get_validator_dict(self, height=None):
|
||||||
|
"""Return a dictionary of validators with key as `public_key` and
|
||||||
|
value as the `voting_power`
|
||||||
|
"""
|
||||||
|
validators = {}
|
||||||
|
for validator in self.models.get_validators(height):
|
||||||
|
# NOTE: we assume that Tendermint encodes public key in base64
|
||||||
|
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
|
||||||
|
validators[public_key] = validator["voting_power"]
|
||||||
|
|
||||||
|
return validators
|
||||||
|
|
||||||
|
# TODO to be moved to planetmint.commands.planetmint
|
||||||
|
def show_election_status(self, transaction):
|
||||||
|
data = transaction.assets[0]
|
||||||
|
data = data.to_dict()["data"]
|
||||||
|
if "public_key" in data.keys():
|
||||||
|
data["public_key"] = public_key_to_base64(data["public_key"]["value"])
|
||||||
|
response = ""
|
||||||
|
for k, v in data.items():
|
||||||
|
if k != "seed":
|
||||||
|
response += f"{k}={v}\n"
|
||||||
|
response += f"status={self.get_election_status(transaction)}"
|
||||||
|
|
||||||
|
if transaction.operation == CHAIN_MIGRATION_ELECTION:
|
||||||
|
response = self.append_chain_migration_status(response)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
# TODO to be moved to planetmint.commands.planetmint
|
||||||
|
def append_chain_migration_status(self, status):
|
||||||
|
chain = self.models.get_latest_abci_chain()
|
||||||
|
if chain is None or chain["is_synced"]:
|
||||||
|
return status
|
||||||
|
|
||||||
|
status += f'\nchain_id={chain["chain_id"]}'
|
||||||
|
block = self.models.get_latest_block()
|
||||||
|
status += f'\napp_hash={block["app_hash"]}'
|
||||||
|
validators = [
|
||||||
|
{
|
||||||
|
"pub_key": {
|
||||||
|
"type": "tendermint/PubKeyEd25519",
|
||||||
|
"value": k,
|
||||||
|
},
|
||||||
|
"power": v,
|
||||||
|
}
|
||||||
|
for k, v in self.get_validator_dict().items()
|
||||||
|
]
|
||||||
|
status += f"\nvalidators={json.dumps(validators, indent=4)}"
|
||||||
|
return status
|
||||||
|
|
||||||
|
def get_recipients_list(self):
|
||||||
|
"""Convert validator dictionary to a recipient list for `Transaction`"""
|
||||||
|
|
||||||
|
recipients = []
|
||||||
|
for public_key, voting_power in self.get_validator_dict().items():
|
||||||
|
recipients.append(([public_key], voting_power))
|
||||||
|
|
||||||
|
return recipients
|
||||||
|
|
||||||
|
def count_votes(self, election_pk, transactions):
|
||||||
|
votes = 0
|
||||||
|
for txn in transactions:
|
||||||
|
if txn.operation == Vote.OPERATION:
|
||||||
|
for output in txn.outputs:
|
||||||
|
# NOTE: We enforce that a valid vote to election id will have only
|
||||||
|
# election_pk in the output public keys, including any other public key
|
||||||
|
# along with election_pk will lead to vote being not considered valid.
|
||||||
|
if len(output.public_keys) == 1 and [election_pk] == output.public_keys:
|
||||||
|
votes = votes + output.amount
|
||||||
|
return votes
|
||||||
|
|
||||||
|
def get_commited_votes(self, transaction, election_pk=None): # TODO: move somewhere else
|
||||||
|
if election_pk is None:
|
||||||
|
election_pk = election_id_to_public_key(transaction.id)
|
||||||
|
txns = self.models.get_asset_tokens_for_public_key(transaction.id, election_pk)
|
||||||
|
return self.count_votes(election_pk, txns)
|
||||||
|
|
||||||
|
def _get_initiated_elections(self, height, txns): # TODO: move somewhere else
|
||||||
|
elections = []
|
||||||
|
for tx in txns:
|
||||||
|
if not isinstance(tx, Election):
|
||||||
|
continue
|
||||||
|
|
||||||
|
elections.append({"election_id": tx.id, "height": height, "is_concluded": False})
|
||||||
|
return elections
|
||||||
|
|
||||||
|
def _get_votes(self, txns): # TODO: move somewhere else
|
||||||
|
elections = OrderedDict()
|
||||||
|
for tx in txns:
|
||||||
|
if not isinstance(tx, Vote):
|
||||||
|
continue
|
||||||
|
election_id = Transaction.read_out_asset_id(tx)
|
||||||
|
if election_id not in elections:
|
||||||
|
elections[election_id] = []
|
||||||
|
elections[election_id].append(tx)
|
||||||
|
return elections
|
||||||
|
|
||||||
|
def process_block(self, new_height, txns): # TODO: move somewhere else
|
||||||
|
"""Looks for election and vote transactions inside the block, records
|
||||||
|
and processes elections.
|
||||||
|
|
||||||
|
Every election is recorded in the database.
|
||||||
|
|
||||||
|
Every vote has a chance to conclude the corresponding election. When
|
||||||
|
an election is concluded, the corresponding database record is
|
||||||
|
marked as such.
|
||||||
|
|
||||||
|
Elections and votes are processed in the order in which they
|
||||||
|
appear in the block. Elections are concluded in the order of
|
||||||
|
appearance of their first votes in the block.
|
||||||
|
|
||||||
|
For every election concluded in the block, calls its `on_approval`
|
||||||
|
method. The returned value of the last `on_approval`, if any,
|
||||||
|
is a validator set update to be applied in one of the following blocks.
|
||||||
|
|
||||||
|
`on_approval` methods are implemented by elections of particular type.
|
||||||
|
The method may contain side effects but should be idempotent. To account
|
||||||
|
for other concluded elections, if it requires so, the method should
|
||||||
|
rely on the database state.
|
||||||
|
"""
|
||||||
|
# elections initiated in this block
|
||||||
|
initiated_elections = self._get_initiated_elections(new_height, txns)
|
||||||
|
|
||||||
|
if initiated_elections:
|
||||||
|
self.models.store_elections(initiated_elections)
|
||||||
|
|
||||||
|
# elections voted for in this block and their votes
|
||||||
|
elections = self._get_votes(txns)
|
||||||
|
|
||||||
|
validator_update = None
|
||||||
|
for election_id, votes in elections.items():
|
||||||
|
election = self.models.get_transaction(election_id)
|
||||||
|
if election is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not self.has_election_concluded(election, votes):
|
||||||
|
continue
|
||||||
|
|
||||||
|
validator_update = self.approve_election(election, new_height)
|
||||||
|
self.models.store_election(election.id, new_height, is_concluded=True)
|
||||||
|
|
||||||
|
return [validator_update] if validator_update else []
|
||||||
|
|
||||||
|
def has_election_concluded(self, transaction, current_votes=[]): # TODO: move somewhere else
|
||||||
|
"""Check if the election can be concluded or not.
|
||||||
|
|
||||||
|
* Elections can only be concluded if the validator set has not changed
|
||||||
|
since the election was initiated.
|
||||||
|
* Elections can be concluded only if the current votes form a supermajority.
|
||||||
|
|
||||||
|
Custom elections may override this function and introduce additional checks.
|
||||||
|
"""
|
||||||
|
if self.has_validator_set_changed(transaction):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if transaction.operation == VALIDATOR_ELECTION:
|
||||||
|
if not self.has_validator_election_concluded():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if transaction.operation == CHAIN_MIGRATION_ELECTION:
|
||||||
|
if not self.has_chain_migration_concluded():
|
||||||
|
return False
|
||||||
|
|
||||||
|
election_pk = election_id_to_public_key(transaction.id)
|
||||||
|
votes_committed = self.get_commited_votes(transaction, election_pk)
|
||||||
|
votes_current = self.count_votes(election_pk, current_votes)
|
||||||
|
|
||||||
|
total_votes = sum(int(output.amount) for output in transaction.outputs)
|
||||||
|
if (votes_committed < (2 / 3) * total_votes) and (votes_committed + votes_current >= (2 / 3) * total_votes):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_validator_election_concluded(self): # TODO: move somewhere else
|
||||||
|
latest_block = self.models.get_latest_block()
|
||||||
|
if latest_block is not None:
|
||||||
|
latest_block_height = latest_block["height"]
|
||||||
|
latest_validator_change = self.models.get_validator_set()["height"]
|
||||||
|
|
||||||
|
# TODO change to `latest_block_height + 3` when upgrading to Tendermint 0.24.0.
|
||||||
|
if latest_validator_change == latest_block_height + 2:
|
||||||
|
# do not conclude the election if there is a change assigned already
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def has_chain_migration_concluded(self): # TODO: move somewhere else
|
||||||
|
chain = self.models.get_latest_abci_chain()
|
||||||
|
if chain is not None and not chain["is_synced"]:
|
||||||
|
# do not conclude the migration election if
|
||||||
|
# there is another migration in progress
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def rollback_election(self, new_height, txn_ids): # TODO: move somewhere else
|
||||||
|
"""Looks for election and vote transactions inside the block and
|
||||||
|
cleans up the database artifacts possibly created in `process_blocks`.
|
||||||
|
|
||||||
|
Part of the `end_block`/`commit` crash recovery.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# delete election records for elections initiated at this height and
|
||||||
|
# elections concluded at this height
|
||||||
|
self.models.delete_elections(new_height)
|
||||||
|
|
||||||
|
txns = [self.models.get_transaction(tx_id) for tx_id in txn_ids]
|
||||||
|
|
||||||
|
txns = [Transaction.from_dict(tx.to_dict()) for tx in txns if tx]
|
||||||
|
|
||||||
|
elections = self._get_votes(txns)
|
||||||
|
for election_id in elections:
|
||||||
|
election = self.models.get_transaction(election_id)
|
||||||
|
if election.operation == VALIDATOR_ELECTION:
|
||||||
|
# TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0.
|
||||||
|
self.models.delete_validator_set(new_height + 1)
|
||||||
|
if election.operation == CHAIN_MIGRATION_ELECTION:
|
||||||
|
self.models.delete_abci_chain(new_height)
|
||||||
|
|
||||||
|
def approve_election(self, election, new_height):
|
||||||
|
"""Override to update the database state according to the
|
||||||
|
election rules. Consider the current database state to account for
|
||||||
|
other concluded elections, if required.
|
||||||
|
"""
|
||||||
|
if election.operation == CHAIN_MIGRATION_ELECTION:
|
||||||
|
self.migrate_abci_chain()
|
||||||
|
if election.operation == VALIDATOR_ELECTION:
|
||||||
|
validator_updates = [election.assets[0].data]
|
||||||
|
curr_validator_set = self.models.get_validators(new_height)
|
||||||
|
updated_validator_set = new_validator_set(curr_validator_set, validator_updates)
|
||||||
|
|
||||||
|
updated_validator_set = [v for v in updated_validator_set if v["voting_power"] > 0]
|
||||||
|
|
||||||
|
# TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0.
|
||||||
|
self.models.store_validator_set(new_height + 1, updated_validator_set)
|
||||||
|
return encode_validator(election.assets[0].data)
|
||||||
|
|
||||||
|
def is_valid_transaction(self, tx, current_transactions=[]):
|
||||||
|
# NOTE: the function returns the Transaction object in case
|
||||||
|
# the transaction is valid
|
||||||
|
try:
|
||||||
|
return self.validate_transaction(tx, current_transactions)
|
||||||
|
except ValidationError as e:
|
||||||
|
logger.warning("Invalid transaction (%s): %s", type(e).__name__, e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def migrate_abci_chain(self):
|
||||||
|
"""Generate and record a new ABCI chain ID. New blocks are not
|
||||||
|
accepted until we receive an InitChain ABCI request with
|
||||||
|
the matching chain ID and validator set.
|
||||||
|
|
||||||
|
Chain ID is generated based on the current chain and height.
|
||||||
|
`chain-X` => `chain-X-migrated-at-height-5`.
|
||||||
|
`chain-X-migrated-at-height-5` => `chain-X-migrated-at-height-21`.
|
||||||
|
|
||||||
|
If there is no known chain (we are at genesis), the function returns.
|
||||||
|
"""
|
||||||
|
latest_chain = self.models.get_latest_abci_chain()
|
||||||
|
if latest_chain is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
block = self.models.get_latest_block()
|
||||||
|
|
||||||
|
suffix = "-migrated-at-height-"
|
||||||
|
chain_id = latest_chain["chain_id"]
|
||||||
|
block_height_str = str(block["height"])
|
||||||
|
new_chain_id = chain_id.split(suffix)[0] + suffix + block_height_str
|
||||||
|
|
||||||
|
self.models.store_abci_chain(block["height"] + 1, new_chain_id, False)
|
||||||
|
|
||||||
|
def rollback(self):
|
||||||
|
pre_commit = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
pre_commit = self.models.get_pre_commit_state()
|
||||||
|
except Exception as e:
|
||||||
|
logger.exception("Unexpected error occurred while executing get_pre_commit_state()", e)
|
||||||
|
|
||||||
|
if pre_commit is None or len(pre_commit) == 0:
|
||||||
|
# the pre_commit record is first stored in the first `end_block`
|
||||||
|
return
|
||||||
|
|
||||||
|
latest_block = self.models.get_latest_block()
|
||||||
|
if latest_block is None:
|
||||||
|
logger.error("Found precommit state but no blocks!")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# NOTE: the pre-commit state is always at most 1 block ahead of the commited state
|
||||||
|
if latest_block["height"] < pre_commit["height"]:
|
||||||
|
self.rollback_election(pre_commit["height"], pre_commit["transactions"])
|
||||||
|
self.models.delete_transactions(pre_commit["transactions"])
|
@ -46,7 +46,7 @@ class DbTransaction:
|
|||||||
)
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_generated_fields(tx_dict: dict):
|
def remove_generated_fields(tx_dict: dict) -> dict:
|
||||||
tx_dict["outputs"] = [
|
tx_dict["outputs"] = [
|
||||||
DbTransaction.remove_generated_or_none_output_keys(output) for output in tx_dict["outputs"]
|
DbTransaction.remove_generated_or_none_output_keys(output) for output in tx_dict["outputs"]
|
||||||
]
|
]
|
||||||
@ -55,13 +55,19 @@ class DbTransaction:
|
|||||||
return tx_dict
|
return tx_dict
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def remove_generated_or_none_output_keys(output):
|
def remove_generated_or_none_output_keys(output: dict) -> dict:
|
||||||
output["condition"]["details"] = {k: v for k, v in output["condition"]["details"].items() if v is not None}
|
output["condition"]["details"] = {k: v for k, v in output["condition"]["details"].items() if v is not None}
|
||||||
if "id" in output:
|
if "id" in output:
|
||||||
output.pop("id")
|
output.pop("id")
|
||||||
return output
|
return output
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
|
"""
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
object
|
||||||
|
"""
|
||||||
assets = Asset.list_to_dict(self.assets)
|
assets = Asset.list_to_dict(self.assets)
|
||||||
tx = {
|
tx = {
|
||||||
"inputs": Input.list_to_dict(self.inputs),
|
"inputs": Input.list_to_dict(self.inputs),
|
||||||
|
@ -70,9 +70,9 @@ class Output:
|
|||||||
def outputs_dict(output: dict, transaction_id: str = "") -> Output:
|
def outputs_dict(output: dict, transaction_id: str = "") -> Output:
|
||||||
out_dict: Output
|
out_dict: Output
|
||||||
if output["condition"]["details"].get("subconditions") is None:
|
if output["condition"]["details"].get("subconditions") is None:
|
||||||
out_dict = output_with_public_key(output, transaction_id)
|
out_dict = Output.output_with_public_key(output, transaction_id)
|
||||||
else:
|
else:
|
||||||
out_dict = output_with_sub_conditions(output, transaction_id)
|
out_dict = Output.output_with_sub_conditions(output, transaction_id)
|
||||||
return out_dict
|
return out_dict
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -111,7 +111,7 @@ class Output:
|
|||||||
def list_to_dict(output_list: list[Output]) -> list[dict]:
|
def list_to_dict(output_list: list[Output]) -> list[dict]:
|
||||||
return [output.to_dict() for output in output_list or []]
|
return [output.to_dict() for output in output_list or []]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
def output_with_public_key(output, transaction_id) -> Output:
|
def output_with_public_key(output, transaction_id) -> Output:
|
||||||
return Output(
|
return Output(
|
||||||
transaction_id=transaction_id,
|
transaction_id=transaction_id,
|
||||||
@ -122,7 +122,7 @@ def output_with_public_key(output, transaction_id) -> Output:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
def output_with_sub_conditions(output, transaction_id) -> Output:
|
def output_with_sub_conditions(output, transaction_id) -> Output:
|
||||||
return Output(
|
return Output(
|
||||||
transaction_id=transaction_id,
|
transaction_id=transaction_id,
|
||||||
|
@ -14,23 +14,29 @@ import json
|
|||||||
import sys
|
import sys
|
||||||
import planetmint
|
import planetmint
|
||||||
|
|
||||||
from planetmint.core import rollback
|
|
||||||
from planetmint.utils import load_node_key
|
|
||||||
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
||||||
from transactions.common.exceptions import DatabaseDoesNotExist, ValidationError
|
from transactions.common.exceptions import DatabaseDoesNotExist, ValidationError
|
||||||
from transactions.types.elections.vote import Vote
|
from transactions.types.elections.vote import Vote
|
||||||
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
||||||
from transactions.types.elections.validator_utils import election_id_to_public_key
|
from transactions.types.elections.validator_utils import election_id_to_public_key
|
||||||
|
from transactions.types.elections.validator_election import ValidatorElection
|
||||||
from transactions.common.transaction import Transaction
|
from transactions.common.transaction import Transaction
|
||||||
from planetmint import ValidatorElection, Planetmint
|
|
||||||
|
|
||||||
|
from planetmint.abci.rpc import ABCI_RPC
|
||||||
|
from planetmint.abci.utils import load_node_key
|
||||||
|
from planetmint.application.validator import Validator
|
||||||
from planetmint.backend import schema
|
from planetmint.backend import schema
|
||||||
from planetmint.commands import utils
|
from planetmint.commands import utils
|
||||||
from planetmint.commands.utils import configure_planetmint, input_on_stderr
|
from planetmint.commands.utils import configure_planetmint, input_on_stderr
|
||||||
from planetmint.log import setup_logging
|
from planetmint.config_utils import setup_logging
|
||||||
from planetmint.tendermint_utils import public_key_from_base64
|
from planetmint.abci.tendermint_utils import public_key_from_base64
|
||||||
|
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
|
||||||
from planetmint.commands.election_types import elections
|
from planetmint.commands.election_types import elections
|
||||||
from planetmint.version import __tm_supported_versions__
|
from planetmint.version import __tm_supported_versions__
|
||||||
from planetmint.config import Config
|
from planetmint.config import Config
|
||||||
|
from planetmint.model.models import Models
|
||||||
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -118,7 +124,7 @@ def run_election_new(args, planet):
|
|||||||
globals()[f"run_election_new_{election_type}"](args, planet)
|
globals()[f"run_election_new_{election_type}"](args, planet)
|
||||||
|
|
||||||
|
|
||||||
def create_new_election(sk, planet, election_class, data):
|
def create_new_election(sk, planet, election_class, data, abci_rpc):
|
||||||
try:
|
try:
|
||||||
key = load_node_key(sk)
|
key = load_node_key(sk)
|
||||||
voters = planet.get_recipients_list()
|
voters = planet.get_recipients_list()
|
||||||
@ -131,7 +137,9 @@ def create_new_election(sk, planet, election_class, data):
|
|||||||
logger.error(fd_404)
|
logger.error(fd_404)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
resp = planet.write_transaction(election, BROADCAST_TX_COMMIT)
|
resp = abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, election, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
if resp == (202, ""):
|
if resp == (202, ""):
|
||||||
logger.info("[SUCCESS] Submitted proposal with id: {}".format(election.id))
|
logger.info("[SUCCESS] Submitted proposal with id: {}".format(election.id))
|
||||||
return election.id
|
return election.id
|
||||||
@ -140,7 +148,7 @@ def create_new_election(sk, planet, election_class, data):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def run_election_new_upsert_validator(args, planet):
|
def run_election_new_upsert_validator(args, planet, abci_rpc):
|
||||||
"""Initiates an election to add/update/remove a validator to an existing Planetmint network
|
"""Initiates an election to add/update/remove a validator to an existing Planetmint network
|
||||||
|
|
||||||
:param args: dict
|
:param args: dict
|
||||||
@ -164,10 +172,10 @@ def run_election_new_upsert_validator(args, planet):
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
return create_new_election(args.sk, planet, ValidatorElection, new_validator)
|
return create_new_election(args.sk, planet, ValidatorElection, new_validator, abci_rpc)
|
||||||
|
|
||||||
|
|
||||||
def run_election_new_chain_migration(args, planet):
|
def run_election_new_chain_migration(args, planet, abci_rpc):
|
||||||
"""Initiates an election to halt block production
|
"""Initiates an election to halt block production
|
||||||
|
|
||||||
:param args: dict
|
:param args: dict
|
||||||
@ -178,10 +186,10 @@ def run_election_new_chain_migration(args, planet):
|
|||||||
:return: election_id or `False` in case of failure
|
:return: election_id or `False` in case of failure
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return create_new_election(args.sk, planet, ChainMigrationElection, [{"data": {}}])
|
return create_new_election(args.sk, planet, ChainMigrationElection, [{"data": {}}], abci_rpc)
|
||||||
|
|
||||||
|
|
||||||
def run_election_approve(args, planet):
|
def run_election_approve(args, validator: Validator, abci_rpc):
|
||||||
"""Approve an election
|
"""Approve an election
|
||||||
|
|
||||||
:param args: dict
|
:param args: dict
|
||||||
@ -194,7 +202,7 @@ def run_election_approve(args, planet):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
key = load_node_key(args.sk)
|
key = load_node_key(args.sk)
|
||||||
tx = planet.get_transaction(args.election_id)
|
tx = validator.models.get_transaction(args.election_id)
|
||||||
voting_powers = [v.amount for v in tx.outputs if key.public_key in v.public_keys]
|
voting_powers = [v.amount for v in tx.outputs if key.public_key in v.public_keys]
|
||||||
if len(voting_powers) > 0:
|
if len(voting_powers) > 0:
|
||||||
voting_power = voting_powers[0]
|
voting_power = voting_powers[0]
|
||||||
@ -206,9 +214,11 @@ def run_election_approve(args, planet):
|
|||||||
inputs = [i for i in tx_converted.to_inputs() if key.public_key in i.owners_before]
|
inputs = [i for i in tx_converted.to_inputs() if key.public_key in i.owners_before]
|
||||||
election_pub_key = election_id_to_public_key(tx.id)
|
election_pub_key = election_id_to_public_key(tx.id)
|
||||||
approval = Vote.generate(inputs, [([election_pub_key], voting_power)], [tx.id]).sign([key.private_key])
|
approval = Vote.generate(inputs, [([election_pub_key], voting_power)], [tx.id]).sign([key.private_key])
|
||||||
planet.validate_transaction(approval)
|
validator.validate_transaction(approval)
|
||||||
|
|
||||||
resp = planet.write_transaction(approval, BROADCAST_TX_COMMIT)
|
resp = abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, approval, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
|
|
||||||
if resp == (202, ""):
|
if resp == (202, ""):
|
||||||
logger.info("[SUCCESS] Your vote has been submitted")
|
logger.info("[SUCCESS] Your vote has been submitted")
|
||||||
@ -218,7 +228,7 @@ def run_election_approve(args, planet):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def run_election_show(args, planet):
|
def run_election_show(args, validator: Validator):
|
||||||
"""Retrieves information about an election
|
"""Retrieves information about an election
|
||||||
|
|
||||||
:param args: dict
|
:param args: dict
|
||||||
@ -228,12 +238,12 @@ def run_election_show(args, planet):
|
|||||||
:param planet: an instance of Planetmint
|
:param planet: an instance of Planetmint
|
||||||
"""
|
"""
|
||||||
|
|
||||||
election = planet.get_transaction(args.election_id)
|
election = validator.models.get_transaction(args.election_id)
|
||||||
if not election:
|
if not election:
|
||||||
logger.error(f"No election found with election_id {args.election_id}")
|
logger.error(f"No election found with election_id {args.election_id}")
|
||||||
return
|
return
|
||||||
|
|
||||||
response = planet.show_election_status(election)
|
response = validator.show_election_status(election)
|
||||||
|
|
||||||
logger.info(response)
|
logger.info(response)
|
||||||
|
|
||||||
@ -241,8 +251,8 @@ def run_election_show(args, planet):
|
|||||||
|
|
||||||
|
|
||||||
def _run_init():
|
def _run_init():
|
||||||
bdb = planetmint.Planetmint()
|
validator = Validator()
|
||||||
schema.init_database(bdb.connection)
|
schema.init_database(validator.models.connection)
|
||||||
|
|
||||||
|
|
||||||
@configure_planetmint
|
@configure_planetmint
|
||||||
@ -269,13 +279,10 @@ def run_drop(args):
|
|||||||
print("Drop was executed, but spaces doesn't exist.", file=sys.stderr)
|
print("Drop was executed, but spaces doesn't exist.", file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
def run_recover(b):
|
|
||||||
rollback(b)
|
|
||||||
|
|
||||||
|
|
||||||
@configure_planetmint
|
@configure_planetmint
|
||||||
def run_start(args):
|
def run_start(args):
|
||||||
"""Start the processes to run the node"""
|
"""Start the processes to run the node"""
|
||||||
|
logger.info("Planetmint Version %s", planetmint.version.__version__)
|
||||||
|
|
||||||
# Configure Logging
|
# Configure Logging
|
||||||
setup_logging()
|
setup_logging()
|
||||||
@ -284,8 +291,8 @@ def run_start(args):
|
|||||||
logger.info("Initializing database")
|
logger.info("Initializing database")
|
||||||
_run_init()
|
_run_init()
|
||||||
|
|
||||||
logger.info("Planetmint Version %s", planetmint.version.__version__)
|
validator = Validator()
|
||||||
run_recover(planetmint.lib.Planetmint())
|
validator.rollback()
|
||||||
|
|
||||||
logger.info("Starting Planetmint main process.")
|
logger.info("Starting Planetmint main process.")
|
||||||
from planetmint.start import start
|
from planetmint.start import start
|
||||||
|
@ -1,19 +1,10 @@
|
|||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# from planetmint.log import DEFAULT_LOGGING_CONFIG as log_config
|
|
||||||
from planetmint.version import __version__ # noqa
|
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
|
||||||
|
from planetmint.utils import Singleton
|
||||||
class Singleton(type):
|
from planetmint.version import __version__
|
||||||
_instances = {}
|
|
||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
|
||||||
if cls not in cls._instances:
|
|
||||||
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
|
||||||
return cls._instances[cls]
|
|
||||||
|
|
||||||
|
|
||||||
class Config(metaclass=Singleton):
|
class Config(metaclass=Singleton):
|
||||||
|
@ -23,10 +23,14 @@ import logging
|
|||||||
import collections.abc
|
import collections.abc
|
||||||
|
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
|
from logging.config import dictConfig as set_logging_config
|
||||||
|
|
||||||
from pkg_resources import iter_entry_points, ResolutionError
|
from pkg_resources import iter_entry_points, ResolutionError
|
||||||
from planetmint.config import Config
|
from transactions.common.exceptions import ConfigurationError
|
||||||
|
|
||||||
|
from planetmint.config import Config, DEFAULT_LOGGING_CONFIG
|
||||||
|
from planetmint.application.basevalidationrules import BaseValidationRules
|
||||||
from transactions.common import exceptions
|
from transactions.common import exceptions
|
||||||
from planetmint.validation import BaseValidationRules
|
|
||||||
|
|
||||||
# TODO: move this to a proper configuration file for logging
|
# TODO: move this to a proper configuration file for logging
|
||||||
logging.getLogger("requests").setLevel(logging.WARNING)
|
logging.getLogger("requests").setLevel(logging.WARNING)
|
||||||
@ -306,3 +310,69 @@ def load_events_plugins(names=None):
|
|||||||
plugins.append((name, entry_point.load()))
|
plugins.append((name, entry_point.load()))
|
||||||
|
|
||||||
return plugins
|
return plugins
|
||||||
|
|
||||||
|
|
||||||
|
def _normalize_log_level(level):
|
||||||
|
try:
|
||||||
|
return level.upper()
|
||||||
|
except AttributeError as exc:
|
||||||
|
raise ConfigurationError("Log level must be a string!") from exc
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging():
|
||||||
|
"""Function to configure log handlers.
|
||||||
|
.. important::
|
||||||
|
|
||||||
|
Configuration, if needed, should be applied before invoking this
|
||||||
|
decorator, as starting the subscriber process for logging will
|
||||||
|
configure the root logger for the child process based on the
|
||||||
|
state of :obj:`planetmint.config` at the moment this decorator
|
||||||
|
is invoked.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
logging_configs = DEFAULT_LOGGING_CONFIG
|
||||||
|
new_logging_configs = Config().get()["log"]
|
||||||
|
|
||||||
|
if "file" in new_logging_configs:
|
||||||
|
filename = new_logging_configs["file"]
|
||||||
|
logging_configs["handlers"]["file"]["filename"] = filename
|
||||||
|
|
||||||
|
if "error_file" in new_logging_configs:
|
||||||
|
error_filename = new_logging_configs["error_file"]
|
||||||
|
logging_configs["handlers"]["errors"]["filename"] = error_filename
|
||||||
|
|
||||||
|
if "level_console" in new_logging_configs:
|
||||||
|
level = _normalize_log_level(new_logging_configs["level_console"])
|
||||||
|
logging_configs["handlers"]["console"]["level"] = level
|
||||||
|
|
||||||
|
if "level_logfile" in new_logging_configs:
|
||||||
|
level = _normalize_log_level(new_logging_configs["level_logfile"])
|
||||||
|
logging_configs["handlers"]["file"]["level"] = level
|
||||||
|
|
||||||
|
if "fmt_console" in new_logging_configs:
|
||||||
|
fmt = new_logging_configs["fmt_console"]
|
||||||
|
logging_configs["formatters"]["console"]["format"] = fmt
|
||||||
|
|
||||||
|
if "fmt_logfile" in new_logging_configs:
|
||||||
|
fmt = new_logging_configs["fmt_logfile"]
|
||||||
|
logging_configs["formatters"]["file"]["format"] = fmt
|
||||||
|
|
||||||
|
if "datefmt_console" in new_logging_configs:
|
||||||
|
fmt = new_logging_configs["datefmt_console"]
|
||||||
|
logging_configs["formatters"]["console"]["datefmt"] = fmt
|
||||||
|
|
||||||
|
if "datefmt_logfile" in new_logging_configs:
|
||||||
|
fmt = new_logging_configs["datefmt_logfile"]
|
||||||
|
logging_configs["formatters"]["file"]["datefmt"] = fmt
|
||||||
|
|
||||||
|
log_levels = new_logging_configs.get("granular_levels", {})
|
||||||
|
|
||||||
|
for logger_name, level in log_levels.items():
|
||||||
|
level = _normalize_log_level(level)
|
||||||
|
try:
|
||||||
|
logging_configs["loggers"][logger_name]["level"] = level
|
||||||
|
except KeyError:
|
||||||
|
logging_configs["loggers"][logger_name] = {"level": level}
|
||||||
|
|
||||||
|
set_logging_config(logging_configs)
|
||||||
|
0
planetmint/ipc/__init__.py
Normal file
0
planetmint/ipc/__init__.py
Normal file
37
planetmint/ipc/events.py
Normal file
37
planetmint/ipc/events.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Copyright © 2020 Interplanetary Database Association e.V.,
|
||||||
|
# Planetmint and IPDB software contributors.
|
||||||
|
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||||
|
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||||
|
|
||||||
|
POISON_PILL = "POISON_PILL"
|
||||||
|
|
||||||
|
|
||||||
|
class EventTypes:
|
||||||
|
"""Container class that holds all the possible
|
||||||
|
events Planetmint manages.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# If you add a new Event Type, make sure to add it
|
||||||
|
# to the docs in docs/server/source/event-plugin-api.rst
|
||||||
|
ALL = ~0
|
||||||
|
BLOCK_VALID = 1
|
||||||
|
BLOCK_INVALID = 2
|
||||||
|
# NEW_EVENT = 4
|
||||||
|
# NEW_EVENT = 8
|
||||||
|
# NEW_EVENT = 16...
|
||||||
|
|
||||||
|
|
||||||
|
class Event:
|
||||||
|
"""An Event."""
|
||||||
|
|
||||||
|
def __init__(self, event_type, event_data):
|
||||||
|
"""Creates a new event.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event_type (int): the type of the event, see
|
||||||
|
:class:`~planetmint.events.EventTypes`
|
||||||
|
event_data (obj): the data of the event.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.type = event_type
|
||||||
|
self.data = event_data
|
@ -1,45 +1,8 @@
|
|||||||
# Copyright © 2020 Interplanetary Database Association e.V.,
|
|
||||||
# Planetmint and IPDB software contributors.
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
|
||||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
|
||||||
|
|
||||||
from queue import Empty
|
from queue import Empty
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
|
||||||
|
from planetmint.ipc.events import EventTypes, POISON_PILL
|
||||||
POISON_PILL = "POISON_PILL"
|
|
||||||
|
|
||||||
|
|
||||||
class EventTypes:
|
|
||||||
"""Container class that holds all the possible
|
|
||||||
events Planetmint manages.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# If you add a new Event Type, make sure to add it
|
|
||||||
# to the docs in docs/server/source/event-plugin-api.rst
|
|
||||||
ALL = ~0
|
|
||||||
BLOCK_VALID = 1
|
|
||||||
BLOCK_INVALID = 2
|
|
||||||
# NEW_EVENT = 4
|
|
||||||
# NEW_EVENT = 8
|
|
||||||
# NEW_EVENT = 16...
|
|
||||||
|
|
||||||
|
|
||||||
class Event:
|
|
||||||
"""An Event."""
|
|
||||||
|
|
||||||
def __init__(self, event_type, event_data):
|
|
||||||
"""Creates a new event.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
event_type (int): the type of the event, see
|
|
||||||
:class:`~planetmint.events.EventTypes`
|
|
||||||
event_data (obj): the data of the event.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.type = event_type
|
|
||||||
self.data = event_data
|
|
||||||
|
|
||||||
|
|
||||||
class Exchange:
|
class Exchange:
|
@ -1,952 +0,0 @@
|
|||||||
# Copyright © 2020 Interplanetary Database Association e.V.,
|
|
||||||
# Planetmint and IPDB software contributors.
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
|
||||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
|
||||||
|
|
||||||
"""Module containing main contact points with Tendermint and
|
|
||||||
MongoDB.
|
|
||||||
|
|
||||||
"""
|
|
||||||
import logging
|
|
||||||
from planetmint.backend.connection import Connection
|
|
||||||
|
|
||||||
import json
|
|
||||||
import rapidjson
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from itertools import chain
|
|
||||||
from collections import namedtuple, OrderedDict
|
|
||||||
from uuid import uuid4
|
|
||||||
from hashlib import sha3_256
|
|
||||||
from transactions import Transaction, Vote
|
|
||||||
from transactions.common.crypto import public_key_from_ed25519_key
|
|
||||||
from transactions.common.exceptions import (
|
|
||||||
SchemaValidationError,
|
|
||||||
ValidationError,
|
|
||||||
DuplicateTransaction,
|
|
||||||
InvalidSignature,
|
|
||||||
DoubleSpend,
|
|
||||||
InputDoesNotExist,
|
|
||||||
AssetIdMismatch,
|
|
||||||
AmountError,
|
|
||||||
MultipleInputsError,
|
|
||||||
InvalidProposer,
|
|
||||||
UnequalValidatorSet,
|
|
||||||
InvalidPowerChange,
|
|
||||||
)
|
|
||||||
from transactions.common.transaction import VALIDATOR_ELECTION, CHAIN_MIGRATION_ELECTION
|
|
||||||
from transactions.common.transaction_mode_types import (
|
|
||||||
BROADCAST_TX_COMMIT,
|
|
||||||
BROADCAST_TX_ASYNC,
|
|
||||||
BROADCAST_TX_SYNC,
|
|
||||||
)
|
|
||||||
from transactions.common.output import Output as TransactionOutput
|
|
||||||
from transactions.types.elections.election import Election
|
|
||||||
from transactions.types.elections.validator_utils import election_id_to_public_key
|
|
||||||
|
|
||||||
from planetmint.backend.models import Output, DbTransaction, Asset, MetaData
|
|
||||||
from planetmint.backend.tarantool.const import (
|
|
||||||
TARANT_TABLE_GOVERNANCE,
|
|
||||||
TARANT_TABLE_TRANSACTION,
|
|
||||||
)
|
|
||||||
from planetmint.config import Config
|
|
||||||
from planetmint import backend, config_utils, fastquery
|
|
||||||
from planetmint.tendermint_utils import (
|
|
||||||
encode_transaction,
|
|
||||||
merkleroot,
|
|
||||||
key_from_base64,
|
|
||||||
public_key_to_base64,
|
|
||||||
encode_validator,
|
|
||||||
new_validator_set,
|
|
||||||
)
|
|
||||||
from planetmint.validation import BaseValidationRules
|
|
||||||
from planetmint.const import GOVERNANCE_TRANSACTION_TYPES
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Planetmint(object):
|
|
||||||
"""Planetmint API
|
|
||||||
|
|
||||||
Create, read, sign, write transactions to the database
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, connection=None):
|
|
||||||
"""Initialize the Planetmint instance
|
|
||||||
|
|
||||||
A Planetmint instance has several configuration parameters (e.g. host).
|
|
||||||
If a parameter value is passed as an argument to the Planetmint
|
|
||||||
__init__ method, then that is the value it will have.
|
|
||||||
Otherwise, the parameter value will come from an environment variable.
|
|
||||||
If that environment variable isn't set, then the value
|
|
||||||
will come from the local configuration file. And if that variable
|
|
||||||
isn't in the local configuration file, then the parameter will have
|
|
||||||
its default value (defined in planetmint.__init__).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
connection (:class:`~planetmint.backend.connection.Connection`):
|
|
||||||
A connection to the database.
|
|
||||||
"""
|
|
||||||
config_utils.autoconfigure()
|
|
||||||
self.mode_commit = BROADCAST_TX_COMMIT
|
|
||||||
self.mode_list = (BROADCAST_TX_ASYNC, BROADCAST_TX_SYNC, self.mode_commit)
|
|
||||||
self.tendermint_host = Config().get()["tendermint"]["host"]
|
|
||||||
self.tendermint_port = Config().get()["tendermint"]["port"]
|
|
||||||
self.endpoint = "http://{}:{}/".format(self.tendermint_host, self.tendermint_port)
|
|
||||||
|
|
||||||
validationPlugin = Config().get().get("validation_plugin")
|
|
||||||
|
|
||||||
if validationPlugin:
|
|
||||||
self.validation = config_utils.load_validation_plugin(validationPlugin)
|
|
||||||
else:
|
|
||||||
self.validation = BaseValidationRules
|
|
||||||
self.connection = connection if connection is not None else Connection()
|
|
||||||
|
|
||||||
def post_transaction(self, transaction, mode):
|
|
||||||
"""Submit a valid transaction to the mempool."""
|
|
||||||
if not mode or mode not in self.mode_list:
|
|
||||||
raise ValidationError("Mode must be one of the following {}.".format(", ".join(self.mode_list)))
|
|
||||||
|
|
||||||
tx_dict = transaction.tx_dict if transaction.tx_dict else transaction.to_dict()
|
|
||||||
payload = {
|
|
||||||
"method": mode,
|
|
||||||
"jsonrpc": "2.0",
|
|
||||||
"params": [encode_transaction(tx_dict)],
|
|
||||||
"id": str(uuid4()),
|
|
||||||
}
|
|
||||||
# TODO: handle connection errors!
|
|
||||||
return requests.post(self.endpoint, json=payload)
|
|
||||||
|
|
||||||
def write_transaction(self, transaction, mode):
|
|
||||||
# This method offers backward compatibility with the Web API.
|
|
||||||
"""Submit a valid transaction to the mempool."""
|
|
||||||
response = self.post_transaction(transaction, mode)
|
|
||||||
return self._process_post_response(response.json(), mode)
|
|
||||||
|
|
||||||
def _process_post_response(self, response, mode):
|
|
||||||
logger.debug(response)
|
|
||||||
|
|
||||||
error = response.get("error")
|
|
||||||
if error:
|
|
||||||
status_code = 500
|
|
||||||
message = error.get("message", "Internal Error")
|
|
||||||
data = error.get("data", "")
|
|
||||||
|
|
||||||
if "Tx already exists in cache" in data:
|
|
||||||
status_code = 400
|
|
||||||
|
|
||||||
return (status_code, message + " - " + data)
|
|
||||||
|
|
||||||
result = response["result"]
|
|
||||||
if mode == self.mode_commit:
|
|
||||||
check_tx_code = result.get("check_tx", {}).get("code", 0)
|
|
||||||
deliver_tx_code = result.get("deliver_tx", {}).get("code", 0)
|
|
||||||
error_code = check_tx_code or deliver_tx_code
|
|
||||||
else:
|
|
||||||
error_code = result.get("code", 0)
|
|
||||||
|
|
||||||
if error_code:
|
|
||||||
return (500, "Transaction validation failed")
|
|
||||||
|
|
||||||
return (202, "")
|
|
||||||
|
|
||||||
def store_bulk_transactions(self, transactions):
|
|
||||||
txns = []
|
|
||||||
gov_txns = []
|
|
||||||
|
|
||||||
for t in transactions:
|
|
||||||
transaction = t.tx_dict if t.tx_dict else rapidjson.loads(rapidjson.dumps(t.to_dict()))
|
|
||||||
if transaction["operation"] in GOVERNANCE_TRANSACTION_TYPES:
|
|
||||||
gov_txns.append(transaction)
|
|
||||||
else:
|
|
||||||
txns.append(transaction)
|
|
||||||
|
|
||||||
backend.query.store_transactions(self.connection, txns, TARANT_TABLE_TRANSACTION)
|
|
||||||
backend.query.store_transactions(self.connection, gov_txns, TARANT_TABLE_GOVERNANCE)
|
|
||||||
|
|
||||||
def delete_transactions(self, txs):
|
|
||||||
return backend.query.delete_transactions(self.connection, txs)
|
|
||||||
|
|
||||||
def update_utxoset(self, transaction):
|
|
||||||
self.updated__ = """Update the UTXO set given ``transaction``. That is, remove
|
|
||||||
the outputs that the given ``transaction`` spends, and add the
|
|
||||||
outputs that the given ``transaction`` creates.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
transaction (:obj:`~planetmint.models.Transaction`): A new
|
|
||||||
transaction incoming into the system for which the UTXOF
|
|
||||||
set needs to be updated.
|
|
||||||
"""
|
|
||||||
spent_outputs = [spent_output for spent_output in transaction.spent_outputs]
|
|
||||||
if spent_outputs:
|
|
||||||
self.delete_unspent_outputs(*spent_outputs)
|
|
||||||
self.store_unspent_outputs(*[utxo._asdict() for utxo in transaction.unspent_outputs])
|
|
||||||
|
|
||||||
def store_unspent_outputs(self, *unspent_outputs):
|
|
||||||
"""Store the given ``unspent_outputs`` (utxos).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
|
|
||||||
length tuple or list of unspent outputs.
|
|
||||||
"""
|
|
||||||
if unspent_outputs:
|
|
||||||
return backend.query.store_unspent_outputs(self.connection, *unspent_outputs)
|
|
||||||
|
|
||||||
def get_utxoset_merkle_root(self):
|
|
||||||
"""Returns the merkle root of the utxoset. This implies that
|
|
||||||
the utxoset is first put into a merkle tree.
|
|
||||||
|
|
||||||
For now, the merkle tree and its root will be computed each
|
|
||||||
time. This obviously is not efficient and a better approach
|
|
||||||
that limits the repetition of the same computation when
|
|
||||||
unnecesary should be sought. For instance, future optimizations
|
|
||||||
could simply re-compute the branches of the tree that were
|
|
||||||
affected by a change.
|
|
||||||
|
|
||||||
The transaction hash (id) and output index should be sufficient
|
|
||||||
to uniquely identify a utxo, and consequently only that
|
|
||||||
information from a utxo record is needed to compute the merkle
|
|
||||||
root. Hence, each node of the merkle tree should contain the
|
|
||||||
tuple (txid, output_index).
|
|
||||||
|
|
||||||
.. important:: The leaves of the tree will need to be sorted in
|
|
||||||
some kind of lexicographical order.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Merkle root in hexadecimal form.
|
|
||||||
"""
|
|
||||||
utxoset = backend.query.get_unspent_outputs(self.connection)
|
|
||||||
# TODO Once ready, use the already pre-computed utxo_hash field.
|
|
||||||
# See common/transactions.py for details.
|
|
||||||
hashes = [
|
|
||||||
sha3_256("{}{}".format(utxo["transaction_id"], utxo["output_index"]).encode()).digest() for utxo in utxoset
|
|
||||||
]
|
|
||||||
# TODO Notice the sorted call!
|
|
||||||
return merkleroot(sorted(hashes))
|
|
||||||
|
|
||||||
def get_unspent_outputs(self):
|
|
||||||
"""Get the utxoset.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
generator of unspent_outputs.
|
|
||||||
"""
|
|
||||||
cursor = backend.query.get_unspent_outputs(self.connection)
|
|
||||||
return (record for record in cursor)
|
|
||||||
|
|
||||||
def delete_unspent_outputs(self, *unspent_outputs):
|
|
||||||
"""Deletes the given ``unspent_outputs`` (utxos).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
|
|
||||||
length tuple or list of unspent outputs.
|
|
||||||
"""
|
|
||||||
if unspent_outputs:
|
|
||||||
return backend.query.delete_unspent_outputs(self.connection, *unspent_outputs)
|
|
||||||
|
|
||||||
def is_committed(self, transaction_id):
|
|
||||||
transaction = backend.query.get_transaction_single(self.connection, transaction_id)
|
|
||||||
return bool(transaction)
|
|
||||||
|
|
||||||
def get_transaction(self, transaction_id):
|
|
||||||
return backend.query.get_transaction_single(self.connection, transaction_id)
|
|
||||||
|
|
||||||
def get_transactions(self, txn_ids):
|
|
||||||
return backend.query.get_transactions(self.connection, txn_ids)
|
|
||||||
|
|
||||||
def get_transactions_filtered(self, asset_ids, operation=None, last_tx=False):
|
|
||||||
"""Get a list of transactions filtered on some criteria"""
|
|
||||||
txids = backend.query.get_txids_filtered(self.connection, asset_ids, operation, last_tx)
|
|
||||||
for txid in txids:
|
|
||||||
yield self.get_transaction(txid)
|
|
||||||
|
|
||||||
def get_outputs_by_tx_id(self, txid):
|
|
||||||
return backend.query.get_outputs_by_tx_id(self.connection, txid)
|
|
||||||
|
|
||||||
def get_outputs_filtered(self, owner, spent=None):
|
|
||||||
"""Get a list of output links filtered on some criteria
|
|
||||||
|
|
||||||
Args:
|
|
||||||
owner (str): base58 encoded public_key.
|
|
||||||
spent (bool): If ``True`` return only the spent outputs. If
|
|
||||||
``False`` return only unspent outputs. If spent is
|
|
||||||
not specified (``None``) return all outputs.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
:obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s
|
|
||||||
pointing to another transaction's condition
|
|
||||||
"""
|
|
||||||
outputs = self.fastquery.get_outputs_by_public_key(owner)
|
|
||||||
if spent is None:
|
|
||||||
return outputs
|
|
||||||
elif spent is True:
|
|
||||||
return self.fastquery.filter_unspent_outputs(outputs)
|
|
||||||
elif spent is False:
|
|
||||||
return self.fastquery.filter_spent_outputs(outputs)
|
|
||||||
|
|
||||||
def get_spent(self, txid, output, current_transactions=[]):
|
|
||||||
transactions = backend.query.get_spent(self.connection, txid, output)
|
|
||||||
|
|
||||||
current_spent_transactions = []
|
|
||||||
for ctxn in current_transactions:
|
|
||||||
for ctxn_input in ctxn.inputs:
|
|
||||||
if ctxn_input.fulfills and ctxn_input.fulfills.txid == txid and ctxn_input.fulfills.output == output:
|
|
||||||
current_spent_transactions.append(ctxn)
|
|
||||||
|
|
||||||
transaction = None
|
|
||||||
if len(transactions) + len(current_spent_transactions) > 1:
|
|
||||||
raise DoubleSpend('tx "{}" spends inputs twice'.format(txid))
|
|
||||||
elif transactions:
|
|
||||||
tx_id = transactions[0].id
|
|
||||||
tx = backend.query.get_transaction_single(self.connection, tx_id)
|
|
||||||
transaction = tx.to_dict()
|
|
||||||
elif current_spent_transactions:
|
|
||||||
transaction = current_spent_transactions[0]
|
|
||||||
|
|
||||||
return transaction
|
|
||||||
|
|
||||||
def store_block(self, block):
|
|
||||||
"""Create a new block."""
|
|
||||||
|
|
||||||
return backend.query.store_block(self.connection, block)
|
|
||||||
|
|
||||||
def get_latest_block(self) -> dict:
|
|
||||||
"""Get the block with largest height."""
|
|
||||||
|
|
||||||
return backend.query.get_latest_block(self.connection)
|
|
||||||
|
|
||||||
def get_block(self, block_id) -> dict:
|
|
||||||
"""Get the block with the specified `block_id`.
|
|
||||||
|
|
||||||
Returns the block corresponding to `block_id` or None if no match is
|
|
||||||
found.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
block_id (int): block id of the block to get.
|
|
||||||
"""
|
|
||||||
|
|
||||||
block = backend.query.get_block(self.connection, block_id)
|
|
||||||
latest_block = self.get_latest_block()
|
|
||||||
latest_block_height = latest_block["height"] if latest_block else 0
|
|
||||||
|
|
||||||
if not block and block_id > latest_block_height:
|
|
||||||
return
|
|
||||||
|
|
||||||
return block
|
|
||||||
|
|
||||||
def get_block_containing_tx(self, txid):
|
|
||||||
"""Retrieve the list of blocks (block ids) containing a
|
|
||||||
transaction with transaction id `txid`
|
|
||||||
|
|
||||||
Args:
|
|
||||||
txid (str): transaction id of the transaction to query
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Block id list (list(int))
|
|
||||||
"""
|
|
||||||
block = backend.query.get_block_with_transaction(self.connection, txid)
|
|
||||||
|
|
||||||
return block
|
|
||||||
|
|
||||||
def validate_transaction(self, transaction, current_transactions=[]):
|
|
||||||
"""Validate a transaction against the current status of the database."""
|
|
||||||
|
|
||||||
# CLEANUP: The conditional below checks for transaction in dict format.
|
|
||||||
# It would be better to only have a single format for the transaction
|
|
||||||
# throught the code base.
|
|
||||||
if isinstance(transaction, dict):
|
|
||||||
try:
|
|
||||||
transaction = Transaction.from_dict(transaction, False)
|
|
||||||
except SchemaValidationError as e:
|
|
||||||
logger.warning("Invalid transaction schema: %s", e.__cause__.message)
|
|
||||||
return False
|
|
||||||
except ValidationError as e:
|
|
||||||
logger.warning("Invalid transaction (%s): %s", type(e).__name__, e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if transaction.operation == Transaction.CREATE:
|
|
||||||
self.validate_create_inputs(transaction, current_transactions)
|
|
||||||
elif transaction.operation in [Transaction.TRANSFER, Transaction.VOTE]:
|
|
||||||
self.validate_transfer_inputs(transaction, current_transactions)
|
|
||||||
elif transaction.operation in [Transaction.COMPOSE]:
|
|
||||||
self.validate_compose_inputs(transaction, current_transactions)
|
|
||||||
|
|
||||||
return transaction
|
|
||||||
|
|
||||||
def validate_create_inputs(self, tx, current_transactions=[]) -> bool:
|
|
||||||
duplicates = any(txn for txn in current_transactions if txn.id == tx.id)
|
|
||||||
if self.is_committed(tx.id) or duplicates:
|
|
||||||
raise DuplicateTransaction("transaction `{}` already exists".format(tx.id))
|
|
||||||
|
|
||||||
fulfilling_inputs = [i for i in tx.inputs if i.fulfills is not None and i.fulfills.txid is not None]
|
|
||||||
|
|
||||||
if len(fulfilling_inputs) > 0:
|
|
||||||
input_txs, input_conditions = self.get_input_txs_and_conditions(fulfilling_inputs, current_transactions)
|
|
||||||
create_asset = tx.assets[0]
|
|
||||||
input_asset = input_txs[0].assets[tx.inputs[0].fulfills.output]["data"]
|
|
||||||
if create_asset != input_asset:
|
|
||||||
raise ValidationError("CREATE must have matching asset description with input transaction")
|
|
||||||
if input_txs[0].operation != Transaction.DECOMPOSE:
|
|
||||||
raise SchemaValidationError("CREATE can only consume DECOMPOSE outputs")
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def validate_transfer_inputs(self, tx, current_transactions=[]) -> bool:
|
|
||||||
input_txs, input_conditions = self.get_input_txs_and_conditions(tx.inputs, current_transactions)
|
|
||||||
|
|
||||||
self.validate_input_conditions(tx, input_conditions)
|
|
||||||
|
|
||||||
self.validate_asset_id(tx, input_txs)
|
|
||||||
|
|
||||||
self.validate_inputs_distinct(tx)
|
|
||||||
|
|
||||||
input_amount = sum([input_condition.amount for input_condition in input_conditions])
|
|
||||||
output_amount = sum([output_condition.amount for output_condition in tx.outputs])
|
|
||||||
|
|
||||||
if output_amount != input_amount:
|
|
||||||
raise AmountError(
|
|
||||||
(
|
|
||||||
"The amount used in the inputs `{}`" " needs to be same as the amount used" " in the outputs `{}`"
|
|
||||||
).format(input_amount, output_amount)
|
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def validate_compose_inputs(self, tx, current_transactions=[]) -> bool:
|
|
||||||
input_txs, input_conditions = self.get_input_txs_and_conditions(tx.inputs, current_transactions)
|
|
||||||
|
|
||||||
self.validate_input_conditions(tx, input_conditions)
|
|
||||||
|
|
||||||
self.validate_asset_id(tx, input_txs)
|
|
||||||
|
|
||||||
self.validate_inputs_distinct(tx)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def get_input_txs_and_conditions(self, inputs, current_transactions=[]):
|
|
||||||
# store the inputs so that we can check if the asset ids match
|
|
||||||
input_txs = []
|
|
||||||
input_conditions = []
|
|
||||||
|
|
||||||
for input_ in inputs:
|
|
||||||
input_txid = input_.fulfills.txid
|
|
||||||
input_tx = self.get_transaction(input_txid)
|
|
||||||
_output = self.get_outputs_by_tx_id(input_txid)
|
|
||||||
if input_tx is None:
|
|
||||||
for ctxn in current_transactions:
|
|
||||||
if ctxn.id == input_txid:
|
|
||||||
ctxn_dict = ctxn.to_dict()
|
|
||||||
input_tx = DbTransaction.from_dict(ctxn_dict)
|
|
||||||
_output = [
|
|
||||||
Output.from_dict(output, index, ctxn.id)
|
|
||||||
for index, output in enumerate(ctxn_dict["outputs"])
|
|
||||||
]
|
|
||||||
|
|
||||||
if input_tx is None:
|
|
||||||
raise InputDoesNotExist("input `{}` doesn't exist".format(input_txid))
|
|
||||||
|
|
||||||
spent = self.get_spent(input_txid, input_.fulfills.output, current_transactions)
|
|
||||||
if spent:
|
|
||||||
raise DoubleSpend("input `{}` was already spent".format(input_txid))
|
|
||||||
|
|
||||||
output = _output[input_.fulfills.output]
|
|
||||||
input_conditions.append(output)
|
|
||||||
tx_dict = input_tx.to_dict()
|
|
||||||
tx_dict["outputs"] = Output.list_to_dict(_output)
|
|
||||||
tx_dict = DbTransaction.remove_generated_fields(tx_dict)
|
|
||||||
pm_transaction = Transaction.from_dict(tx_dict, False)
|
|
||||||
input_txs.append(pm_transaction)
|
|
||||||
|
|
||||||
return (input_txs, input_conditions)
|
|
||||||
|
|
||||||
def validate_input_conditions(self, tx, input_conditions):
|
|
||||||
# convert planetmint.Output objects to transactions.common.Output objects
|
|
||||||
input_conditions_dict = Output.list_to_dict(input_conditions)
|
|
||||||
input_conditions_converted = []
|
|
||||||
for input_cond in input_conditions_dict:
|
|
||||||
input_conditions_converted.append(TransactionOutput.from_dict(input_cond))
|
|
||||||
|
|
||||||
if not tx.inputs_valid(input_conditions_converted):
|
|
||||||
raise InvalidSignature("Transaction signature is invalid.")
|
|
||||||
|
|
||||||
def validate_asset_id(self, tx: Transaction, input_txs: list):
|
|
||||||
# validate asset
|
|
||||||
if tx.operation != Transaction.COMPOSE:
|
|
||||||
asset_id = tx.get_asset_id(input_txs)
|
|
||||||
if asset_id != Transaction.read_out_asset_id(tx):
|
|
||||||
raise AssetIdMismatch(
|
|
||||||
("The asset id of the input does not" " match the asset id of the" " transaction")
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
asset_ids = Transaction.get_asset_ids(input_txs)
|
|
||||||
if Transaction.read_out_asset_id(tx) in asset_ids:
|
|
||||||
raise AssetIdMismatch(("The asset ID of the compose must be different to all of its input asset IDs"))
|
|
||||||
|
|
||||||
def validate_inputs_distinct(self, tx):
|
|
||||||
# Validate that all inputs are distinct
|
|
||||||
links = [i.fulfills.to_uri() for i in tx.inputs]
|
|
||||||
if len(links) != len(set(links)):
|
|
||||||
raise DoubleSpend('tx "{}" spends inputs twice'.format(tx.id))
|
|
||||||
|
|
||||||
def is_valid_transaction(self, tx, current_transactions=[]):
|
|
||||||
# NOTE: the function returns the Transaction object in case
|
|
||||||
# the transaction is valid
|
|
||||||
try:
|
|
||||||
return self.validate_transaction(tx, current_transactions)
|
|
||||||
except ValidationError as e:
|
|
||||||
logger.warning("Invalid transaction (%s): %s", type(e).__name__, e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_assets(self, asset_ids) -> list[Asset]:
|
|
||||||
"""Return a list of assets that match the asset_ids
|
|
||||||
|
|
||||||
Args:
|
|
||||||
asset_ids (:obj:`list` of :obj:`str`): A list of asset_ids to
|
|
||||||
retrieve from the database.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: The list of assets returned from the database.
|
|
||||||
"""
|
|
||||||
return backend.query.get_assets(self.connection, asset_ids)
|
|
||||||
|
|
||||||
def get_assets_by_cid(self, asset_cid, **kwargs) -> list[dict]:
|
|
||||||
asset_txs = backend.query.get_transactions_by_asset(self.connection, asset_cid, **kwargs)
|
|
||||||
# flatten and return all found assets
|
|
||||||
return list(chain.from_iterable([Asset.list_to_dict(tx.assets) for tx in asset_txs]))
|
|
||||||
|
|
||||||
def get_metadata(self, txn_ids) -> list[MetaData]:
|
|
||||||
"""Return a list of metadata that match the transaction ids (txn_ids)
|
|
||||||
|
|
||||||
Args:
|
|
||||||
txn_ids (:obj:`list` of :obj:`str`): A list of txn_ids to
|
|
||||||
retrieve from the database.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: The list of metadata returned from the database.
|
|
||||||
"""
|
|
||||||
return backend.query.get_metadata(self.connection, txn_ids)
|
|
||||||
|
|
||||||
def get_metadata_by_cid(self, metadata_cid, **kwargs) -> list[str]:
|
|
||||||
metadata_txs = backend.query.get_transactions_by_metadata(self.connection, metadata_cid, **kwargs)
|
|
||||||
return [tx.metadata.metadata for tx in metadata_txs]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fastquery(self):
|
|
||||||
return fastquery.FastQuery(self.connection)
|
|
||||||
|
|
||||||
def get_validator_set(self, height=None):
|
|
||||||
return backend.query.get_validator_set(self.connection, height)
|
|
||||||
|
|
||||||
def get_validators(self, height=None):
|
|
||||||
result = self.get_validator_set(height)
|
|
||||||
return [] if result is None else result["validators"]
|
|
||||||
|
|
||||||
def get_election(self, election_id):
|
|
||||||
return backend.query.get_election(self.connection, election_id)
|
|
||||||
|
|
||||||
def get_pre_commit_state(self):
|
|
||||||
return backend.query.get_pre_commit_state(self.connection)
|
|
||||||
|
|
||||||
def store_pre_commit_state(self, state):
|
|
||||||
return backend.query.store_pre_commit_state(self.connection, state)
|
|
||||||
|
|
||||||
def store_validator_set(self, height, validators):
|
|
||||||
"""Store validator set at a given `height`.
|
|
||||||
NOTE: If the validator set already exists at that `height` then an
|
|
||||||
exception will be raised.
|
|
||||||
"""
|
|
||||||
return backend.query.store_validator_set(self.connection, {"height": height, "validators": validators})
|
|
||||||
|
|
||||||
def delete_validator_set(self, height):
|
|
||||||
return backend.query.delete_validator_set(self.connection, height)
|
|
||||||
|
|
||||||
def store_abci_chain(self, height, chain_id, is_synced=True):
|
|
||||||
return backend.query.store_abci_chain(self.connection, height, chain_id, is_synced)
|
|
||||||
|
|
||||||
def delete_abci_chain(self, height):
|
|
||||||
return backend.query.delete_abci_chain(self.connection, height)
|
|
||||||
|
|
||||||
def get_latest_abci_chain(self):
|
|
||||||
return backend.query.get_latest_abci_chain(self.connection)
|
|
||||||
|
|
||||||
def migrate_abci_chain(self):
|
|
||||||
"""Generate and record a new ABCI chain ID. New blocks are not
|
|
||||||
accepted until we receive an InitChain ABCI request with
|
|
||||||
the matching chain ID and validator set.
|
|
||||||
|
|
||||||
Chain ID is generated based on the current chain and height.
|
|
||||||
`chain-X` => `chain-X-migrated-at-height-5`.
|
|
||||||
`chain-X-migrated-at-height-5` => `chain-X-migrated-at-height-21`.
|
|
||||||
|
|
||||||
If there is no known chain (we are at genesis), the function returns.
|
|
||||||
"""
|
|
||||||
latest_chain = self.get_latest_abci_chain()
|
|
||||||
if latest_chain is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
block = self.get_latest_block()
|
|
||||||
|
|
||||||
suffix = "-migrated-at-height-"
|
|
||||||
chain_id = latest_chain["chain_id"]
|
|
||||||
block_height_str = str(block["height"])
|
|
||||||
new_chain_id = chain_id.split(suffix)[0] + suffix + block_height_str
|
|
||||||
|
|
||||||
self.store_abci_chain(block["height"] + 1, new_chain_id, False)
|
|
||||||
|
|
||||||
def store_election(self, election_id, height, is_concluded):
|
|
||||||
return backend.query.store_election(self.connection, election_id, height, is_concluded)
|
|
||||||
|
|
||||||
def store_elections(self, elections):
|
|
||||||
return backend.query.store_elections(self.connection, elections)
|
|
||||||
|
|
||||||
def delete_elections(self, height):
|
|
||||||
return backend.query.delete_elections(self.connection, height)
|
|
||||||
|
|
||||||
# NOTE: moved here from Election needs to be placed somewhere else
|
|
||||||
def get_validators_dict(self, height=None):
|
|
||||||
"""Return a dictionary of validators with key as `public_key` and
|
|
||||||
value as the `voting_power`
|
|
||||||
"""
|
|
||||||
validators = {}
|
|
||||||
for validator in self.get_validators(height):
|
|
||||||
# NOTE: we assume that Tendermint encodes public key in base64
|
|
||||||
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
|
|
||||||
validators[public_key] = validator["voting_power"]
|
|
||||||
|
|
||||||
return validators
|
|
||||||
|
|
||||||
def validate_election(self, transaction, current_transactions=[]): # TODO: move somewhere else
|
|
||||||
"""Validate election transaction
|
|
||||||
|
|
||||||
NOTE:
|
|
||||||
* A valid election is initiated by an existing validator.
|
|
||||||
|
|
||||||
* A valid election is one where voters are validators and votes are
|
|
||||||
allocated according to the voting power of each validator node.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
:param planet: (Planetmint) an instantiated planetmint.lib.Planetmint object.
|
|
||||||
:param current_transactions: (list) A list of transactions to be validated along with the election
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Election: a Election object or an object of the derived Election subclass.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValidationError: If the election is invalid
|
|
||||||
"""
|
|
||||||
|
|
||||||
duplicates = any(txn for txn in current_transactions if txn.id == transaction.id)
|
|
||||||
if self.is_committed(transaction.id) or duplicates:
|
|
||||||
raise DuplicateTransaction("transaction `{}` already exists".format(transaction.id))
|
|
||||||
|
|
||||||
current_validators = self.get_validators_dict()
|
|
||||||
|
|
||||||
# NOTE: Proposer should be a single node
|
|
||||||
if len(transaction.inputs) != 1 or len(transaction.inputs[0].owners_before) != 1:
|
|
||||||
raise MultipleInputsError("`tx_signers` must be a list instance of length one")
|
|
||||||
|
|
||||||
# NOTE: Check if the proposer is a validator.
|
|
||||||
[election_initiator_node_pub_key] = transaction.inputs[0].owners_before
|
|
||||||
if election_initiator_node_pub_key not in current_validators.keys():
|
|
||||||
raise InvalidProposer("Public key is not a part of the validator set")
|
|
||||||
|
|
||||||
# NOTE: Check if all validators have been assigned votes equal to their voting power
|
|
||||||
if not self.is_same_topology(current_validators, transaction.outputs):
|
|
||||||
raise UnequalValidatorSet("Validator set much be exactly same to the outputs of election")
|
|
||||||
|
|
||||||
if transaction.operation == VALIDATOR_ELECTION:
|
|
||||||
self.validate_validator_election(transaction)
|
|
||||||
|
|
||||||
return transaction
|
|
||||||
|
|
||||||
def validate_validator_election(self, transaction): # TODO: move somewhere else
|
|
||||||
"""For more details refer BEP-21: https://github.com/planetmint/BEPs/tree/master/21"""
|
|
||||||
|
|
||||||
current_validators = self.get_validators_dict()
|
|
||||||
|
|
||||||
# NOTE: change more than 1/3 of the current power is not allowed
|
|
||||||
if transaction.get_assets()[0]["data"]["power"] >= (1 / 3) * sum(current_validators.values()):
|
|
||||||
raise InvalidPowerChange("`power` change must be less than 1/3 of total power")
|
|
||||||
|
|
||||||
def get_election_status(self, transaction):
|
|
||||||
election = self.get_election(transaction.id)
|
|
||||||
if election and election["is_concluded"]:
|
|
||||||
return Election.CONCLUDED
|
|
||||||
|
|
||||||
return Election.INCONCLUSIVE if self.has_validator_set_changed(transaction) else Election.ONGOING
|
|
||||||
|
|
||||||
def has_validator_set_changed(self, transaction): # TODO: move somewhere else
|
|
||||||
latest_change = self.get_validator_change()
|
|
||||||
if latest_change is None:
|
|
||||||
return False
|
|
||||||
|
|
||||||
latest_change_height = latest_change["height"]
|
|
||||||
|
|
||||||
election = self.get_election(transaction.id)
|
|
||||||
|
|
||||||
return latest_change_height > election["height"]
|
|
||||||
|
|
||||||
def get_validator_change(self): # TODO: move somewhere else
|
|
||||||
"""Return the validator set from the most recent approved block
|
|
||||||
|
|
||||||
:return: {
|
|
||||||
'height': <block_height>,
|
|
||||||
'validators': <validator_set>
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
latest_block = self.get_latest_block()
|
|
||||||
if latest_block is None:
|
|
||||||
return None
|
|
||||||
return self.get_validator_set(latest_block["height"])
|
|
||||||
|
|
||||||
def get_validator_dict(self, height=None):
|
|
||||||
"""Return a dictionary of validators with key as `public_key` and
|
|
||||||
value as the `voting_power`
|
|
||||||
"""
|
|
||||||
validators = {}
|
|
||||||
for validator in self.get_validators(height):
|
|
||||||
# NOTE: we assume that Tendermint encodes public key in base64
|
|
||||||
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
|
|
||||||
validators[public_key] = validator["voting_power"]
|
|
||||||
|
|
||||||
return validators
|
|
||||||
|
|
||||||
def get_recipients_list(self):
|
|
||||||
"""Convert validator dictionary to a recipient list for `Transaction`"""
|
|
||||||
|
|
||||||
recipients = []
|
|
||||||
for public_key, voting_power in self.get_validator_dict().items():
|
|
||||||
recipients.append(([public_key], voting_power))
|
|
||||||
|
|
||||||
return recipients
|
|
||||||
|
|
||||||
def show_election_status(self, transaction):
|
|
||||||
data = transaction.assets[0]
|
|
||||||
data = data.to_dict()["data"]
|
|
||||||
if "public_key" in data.keys():
|
|
||||||
data["public_key"] = public_key_to_base64(data["public_key"]["value"])
|
|
||||||
response = ""
|
|
||||||
for k, v in data.items():
|
|
||||||
if k != "seed":
|
|
||||||
response += f"{k}={v}\n"
|
|
||||||
response += f"status={self.get_election_status(transaction)}"
|
|
||||||
|
|
||||||
if transaction.operation == CHAIN_MIGRATION_ELECTION:
|
|
||||||
response = self.append_chain_migration_status(response)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
def append_chain_migration_status(self, status):
|
|
||||||
chain = self.get_latest_abci_chain()
|
|
||||||
if chain is None or chain["is_synced"]:
|
|
||||||
return status
|
|
||||||
|
|
||||||
status += f'\nchain_id={chain["chain_id"]}'
|
|
||||||
block = self.get_latest_block()
|
|
||||||
status += f'\napp_hash={block["app_hash"]}'
|
|
||||||
validators = [
|
|
||||||
{
|
|
||||||
"pub_key": {
|
|
||||||
"type": "tendermint/PubKeyEd25519",
|
|
||||||
"value": k,
|
|
||||||
},
|
|
||||||
"power": v,
|
|
||||||
}
|
|
||||||
for k, v in self.get_validator_dict().items()
|
|
||||||
]
|
|
||||||
status += f"\nvalidators={json.dumps(validators, indent=4)}"
|
|
||||||
return status
|
|
||||||
|
|
||||||
def is_same_topology(cls, current_topology, election_topology):
|
|
||||||
voters = {}
|
|
||||||
for voter in election_topology:
|
|
||||||
if len(voter.public_keys) > 1:
|
|
||||||
return False
|
|
||||||
|
|
||||||
[public_key] = voter.public_keys
|
|
||||||
voting_power = voter.amount
|
|
||||||
voters[public_key] = voting_power
|
|
||||||
|
|
||||||
# Check whether the voters and their votes is same to that of the
|
|
||||||
# validators and their voting power in the network
|
|
||||||
return current_topology == voters
|
|
||||||
|
|
||||||
def count_votes(self, election_pk, transactions):
|
|
||||||
votes = 0
|
|
||||||
for txn in transactions:
|
|
||||||
if txn.operation == Vote.OPERATION:
|
|
||||||
for output in txn.outputs:
|
|
||||||
# NOTE: We enforce that a valid vote to election id will have only
|
|
||||||
# election_pk in the output public keys, including any other public key
|
|
||||||
# along with election_pk will lead to vote being not considered valid.
|
|
||||||
if len(output.public_keys) == 1 and [election_pk] == output.public_keys:
|
|
||||||
votes = votes + output.amount
|
|
||||||
return votes
|
|
||||||
|
|
||||||
def get_commited_votes(self, transaction, election_pk=None): # TODO: move somewhere else
|
|
||||||
if election_pk is None:
|
|
||||||
election_pk = election_id_to_public_key(transaction.id)
|
|
||||||
txns = backend.query.get_asset_tokens_for_public_key(self.connection, transaction.id, election_pk)
|
|
||||||
return self.count_votes(election_pk, txns)
|
|
||||||
|
|
||||||
def _get_initiated_elections(self, height, txns): # TODO: move somewhere else
|
|
||||||
elections = []
|
|
||||||
for tx in txns:
|
|
||||||
if not isinstance(tx, Election):
|
|
||||||
continue
|
|
||||||
|
|
||||||
elections.append({"election_id": tx.id, "height": height, "is_concluded": False})
|
|
||||||
return elections
|
|
||||||
|
|
||||||
def _get_votes(self, txns): # TODO: move somewhere else
|
|
||||||
elections = OrderedDict()
|
|
||||||
for tx in txns:
|
|
||||||
if not isinstance(tx, Vote):
|
|
||||||
continue
|
|
||||||
election_id = Transaction.read_out_asset_id(tx)
|
|
||||||
if election_id not in elections:
|
|
||||||
elections[election_id] = []
|
|
||||||
elections[election_id].append(tx)
|
|
||||||
return elections
|
|
||||||
|
|
||||||
def process_block(self, new_height, txns): # TODO: move somewhere else
|
|
||||||
"""Looks for election and vote transactions inside the block, records
|
|
||||||
and processes elections.
|
|
||||||
|
|
||||||
Every election is recorded in the database.
|
|
||||||
|
|
||||||
Every vote has a chance to conclude the corresponding election. When
|
|
||||||
an election is concluded, the corresponding database record is
|
|
||||||
marked as such.
|
|
||||||
|
|
||||||
Elections and votes are processed in the order in which they
|
|
||||||
appear in the block. Elections are concluded in the order of
|
|
||||||
appearance of their first votes in the block.
|
|
||||||
|
|
||||||
For every election concluded in the block, calls its `on_approval`
|
|
||||||
method. The returned value of the last `on_approval`, if any,
|
|
||||||
is a validator set update to be applied in one of the following blocks.
|
|
||||||
|
|
||||||
`on_approval` methods are implemented by elections of particular type.
|
|
||||||
The method may contain side effects but should be idempotent. To account
|
|
||||||
for other concluded elections, if it requires so, the method should
|
|
||||||
rely on the database state.
|
|
||||||
"""
|
|
||||||
# elections initiated in this block
|
|
||||||
initiated_elections = self._get_initiated_elections(new_height, txns)
|
|
||||||
|
|
||||||
if initiated_elections:
|
|
||||||
self.store_elections(initiated_elections)
|
|
||||||
|
|
||||||
# elections voted for in this block and their votes
|
|
||||||
elections = self._get_votes(txns)
|
|
||||||
|
|
||||||
validator_update = None
|
|
||||||
for election_id, votes in elections.items():
|
|
||||||
election = self.get_transaction(election_id)
|
|
||||||
if election is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not self.has_election_concluded(election, votes):
|
|
||||||
continue
|
|
||||||
|
|
||||||
validator_update = self.approve_election(election, new_height)
|
|
||||||
self.store_election(election.id, new_height, is_concluded=True)
|
|
||||||
|
|
||||||
return [validator_update] if validator_update else []
|
|
||||||
|
|
||||||
def has_election_concluded(self, transaction, current_votes=[]): # TODO: move somewhere else
|
|
||||||
"""Check if the election can be concluded or not.
|
|
||||||
|
|
||||||
* Elections can only be concluded if the validator set has not changed
|
|
||||||
since the election was initiated.
|
|
||||||
* Elections can be concluded only if the current votes form a supermajority.
|
|
||||||
|
|
||||||
Custom elections may override this function and introduce additional checks.
|
|
||||||
"""
|
|
||||||
if self.has_validator_set_changed(transaction):
|
|
||||||
return False
|
|
||||||
|
|
||||||
if transaction.operation == VALIDATOR_ELECTION:
|
|
||||||
if not self.has_validator_election_concluded():
|
|
||||||
return False
|
|
||||||
|
|
||||||
if transaction.operation == CHAIN_MIGRATION_ELECTION:
|
|
||||||
if not self.has_chain_migration_concluded():
|
|
||||||
return False
|
|
||||||
|
|
||||||
election_pk = election_id_to_public_key(transaction.id)
|
|
||||||
votes_committed = self.get_commited_votes(transaction, election_pk)
|
|
||||||
votes_current = self.count_votes(election_pk, current_votes)
|
|
||||||
|
|
||||||
total_votes = sum(int(output.amount) for output in transaction.outputs)
|
|
||||||
if (votes_committed < (2 / 3) * total_votes) and (votes_committed + votes_current >= (2 / 3) * total_votes):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_validator_election_concluded(self): # TODO: move somewhere else
|
|
||||||
latest_block = self.get_latest_block()
|
|
||||||
if latest_block is not None:
|
|
||||||
latest_block_height = latest_block["height"]
|
|
||||||
latest_validator_change = self.get_validator_set()["height"]
|
|
||||||
|
|
||||||
# TODO change to `latest_block_height + 3` when upgrading to Tendermint 0.24.0.
|
|
||||||
if latest_validator_change == latest_block_height + 2:
|
|
||||||
# do not conclude the election if there is a change assigned already
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def has_chain_migration_concluded(self): # TODO: move somewhere else
|
|
||||||
chain = self.get_latest_abci_chain()
|
|
||||||
if chain is not None and not chain["is_synced"]:
|
|
||||||
# do not conclude the migration election if
|
|
||||||
# there is another migration in progress
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def rollback_election(self, new_height, txn_ids): # TODO: move somewhere else
|
|
||||||
"""Looks for election and vote transactions inside the block and
|
|
||||||
cleans up the database artifacts possibly created in `process_blocks`.
|
|
||||||
|
|
||||||
Part of the `end_block`/`commit` crash recovery.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# delete election records for elections initiated at this height and
|
|
||||||
# elections concluded at this height
|
|
||||||
self.delete_elections(new_height)
|
|
||||||
|
|
||||||
txns = [self.get_transaction(tx_id) for tx_id in txn_ids]
|
|
||||||
|
|
||||||
txns = [Transaction.from_dict(tx.to_dict()) for tx in txns if tx]
|
|
||||||
|
|
||||||
elections = self._get_votes(txns)
|
|
||||||
for election_id in elections:
|
|
||||||
election = self.get_transaction(election_id)
|
|
||||||
if election.operation == VALIDATOR_ELECTION:
|
|
||||||
# TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0.
|
|
||||||
self.delete_validator_set(new_height + 1)
|
|
||||||
if election.operation == CHAIN_MIGRATION_ELECTION:
|
|
||||||
self.delete_abci_chain(new_height)
|
|
||||||
|
|
||||||
def approve_election(self, election, new_height):
|
|
||||||
"""Override to update the database state according to the
|
|
||||||
election rules. Consider the current database state to account for
|
|
||||||
other concluded elections, if required.
|
|
||||||
"""
|
|
||||||
if election.operation == CHAIN_MIGRATION_ELECTION:
|
|
||||||
self.migrate_abci_chain()
|
|
||||||
if election.operation == VALIDATOR_ELECTION:
|
|
||||||
validator_updates = [election.assets[0].data]
|
|
||||||
curr_validator_set = self.get_validators(new_height)
|
|
||||||
updated_validator_set = new_validator_set(curr_validator_set, validator_updates)
|
|
||||||
|
|
||||||
updated_validator_set = [v for v in updated_validator_set if v["voting_power"] > 0]
|
|
||||||
|
|
||||||
# TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0.
|
|
||||||
self.store_validator_set(new_height + 1, updated_validator_set)
|
|
||||||
return encode_validator(election.assets[0].data)
|
|
||||||
|
|
||||||
|
|
||||||
Block = namedtuple("Block", ("app_hash", "height", "transactions"))
|
|
@ -1,75 +0,0 @@
|
|||||||
# Copyright © 2020 Interplanetary Database Association e.V.,
|
|
||||||
# Planetmint and IPDB software contributors.
|
|
||||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
|
||||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
|
||||||
|
|
||||||
from transactions.common.exceptions import ConfigurationError
|
|
||||||
from logging.config import dictConfig as set_logging_config
|
|
||||||
from planetmint.config import Config, DEFAULT_LOGGING_CONFIG
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_log_level(level):
|
|
||||||
try:
|
|
||||||
return level.upper()
|
|
||||||
except AttributeError as exc:
|
|
||||||
raise ConfigurationError("Log level must be a string!") from exc
|
|
||||||
|
|
||||||
|
|
||||||
def setup_logging():
|
|
||||||
"""Function to configure log hadlers.
|
|
||||||
|
|
||||||
.. important::
|
|
||||||
|
|
||||||
Configuration, if needed, should be applied before invoking this
|
|
||||||
decorator, as starting the subscriber process for logging will
|
|
||||||
configure the root logger for the child process based on the
|
|
||||||
state of :obj:`planetmint.config` at the moment this decorator
|
|
||||||
is invoked.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
logging_configs = DEFAULT_LOGGING_CONFIG
|
|
||||||
new_logging_configs = Config().get()["log"]
|
|
||||||
|
|
||||||
if "file" in new_logging_configs:
|
|
||||||
filename = new_logging_configs["file"]
|
|
||||||
logging_configs["handlers"]["file"]["filename"] = filename
|
|
||||||
|
|
||||||
if "error_file" in new_logging_configs:
|
|
||||||
error_filename = new_logging_configs["error_file"]
|
|
||||||
logging_configs["handlers"]["errors"]["filename"] = error_filename
|
|
||||||
|
|
||||||
if "level_console" in new_logging_configs:
|
|
||||||
level = _normalize_log_level(new_logging_configs["level_console"])
|
|
||||||
logging_configs["handlers"]["console"]["level"] = level
|
|
||||||
|
|
||||||
if "level_logfile" in new_logging_configs:
|
|
||||||
level = _normalize_log_level(new_logging_configs["level_logfile"])
|
|
||||||
logging_configs["handlers"]["file"]["level"] = level
|
|
||||||
|
|
||||||
if "fmt_console" in new_logging_configs:
|
|
||||||
fmt = new_logging_configs["fmt_console"]
|
|
||||||
logging_configs["formatters"]["console"]["format"] = fmt
|
|
||||||
|
|
||||||
if "fmt_logfile" in new_logging_configs:
|
|
||||||
fmt = new_logging_configs["fmt_logfile"]
|
|
||||||
logging_configs["formatters"]["file"]["format"] = fmt
|
|
||||||
|
|
||||||
if "datefmt_console" in new_logging_configs:
|
|
||||||
fmt = new_logging_configs["datefmt_console"]
|
|
||||||
logging_configs["formatters"]["console"]["datefmt"] = fmt
|
|
||||||
|
|
||||||
if "datefmt_logfile" in new_logging_configs:
|
|
||||||
fmt = new_logging_configs["datefmt_logfile"]
|
|
||||||
logging_configs["formatters"]["file"]["datefmt"] = fmt
|
|
||||||
|
|
||||||
log_levels = new_logging_configs.get("granular_levels", {})
|
|
||||||
|
|
||||||
for logger_name, level in log_levels.items():
|
|
||||||
level = _normalize_log_level(level)
|
|
||||||
try:
|
|
||||||
logging_configs["loggers"][logger_name]["level"] = level
|
|
||||||
except KeyError:
|
|
||||||
logging_configs["loggers"][logger_name] = {"level": level}
|
|
||||||
|
|
||||||
set_logging_config(logging_configs)
|
|
0
planetmint/model/__init__.py
Normal file
0
planetmint/model/__init__.py
Normal file
@ -3,10 +3,11 @@
|
|||||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||||
|
|
||||||
from planetmint.utils import condition_details_has_owner
|
|
||||||
from planetmint.backend import query
|
from planetmint.backend import query
|
||||||
from transactions.common.transaction import TransactionLink
|
from transactions.common.transaction import TransactionLink
|
||||||
|
|
||||||
|
from planetmint.backend.models.output import ConditionDetails
|
||||||
|
|
||||||
|
|
||||||
class FastQuery:
|
class FastQuery:
|
||||||
"""Database queries that join on block results from a single node."""
|
"""Database queries that join on block results from a single node."""
|
||||||
@ -45,3 +46,31 @@ class FastQuery:
|
|||||||
txs = query.get_spending_transactions(self.connection, links)
|
txs = query.get_spending_transactions(self.connection, links)
|
||||||
spends = {TransactionLink.from_dict(input.fulfills.to_dict()) for tx in txs for input in tx.inputs}
|
spends = {TransactionLink.from_dict(input.fulfills.to_dict()) for tx in txs for input in tx.inputs}
|
||||||
return [ff for ff in outputs if ff in spends]
|
return [ff for ff in outputs if ff in spends]
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Rename this function, it's handling fulfillments not conditions
|
||||||
|
def condition_details_has_owner(condition_details, owner):
|
||||||
|
"""Check if the public_key of owner is in the condition details
|
||||||
|
as an Ed25519Fulfillment.public_key
|
||||||
|
|
||||||
|
Args:
|
||||||
|
condition_details (dict): dict with condition details
|
||||||
|
owner (str): base58 public key of owner
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the public key is found in the condition details, False otherwise
|
||||||
|
|
||||||
|
"""
|
||||||
|
if isinstance(condition_details, ConditionDetails) and condition_details.sub_conditions is not None:
|
||||||
|
result = condition_details_has_owner(condition_details.sub_conditions, owner)
|
||||||
|
if result:
|
||||||
|
return True
|
||||||
|
elif isinstance(condition_details, list):
|
||||||
|
for subcondition in condition_details:
|
||||||
|
result = condition_details_has_owner(subcondition, owner)
|
||||||
|
if result:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
if condition_details.public_key is not None and owner == condition_details.public_key:
|
||||||
|
return True
|
||||||
|
return False
|
283
planetmint/model/models.py
Normal file
283
planetmint/model/models.py
Normal file
@ -0,0 +1,283 @@
|
|||||||
|
import rapidjson
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
|
from transactions import Transaction
|
||||||
|
from transactions.common.exceptions import DoubleSpend
|
||||||
|
from transactions.common.crypto import public_key_from_ed25519_key
|
||||||
|
from transactions.common.exceptions import InputDoesNotExist
|
||||||
|
|
||||||
|
from planetmint import config_utils, backend
|
||||||
|
from planetmint.const import GOVERNANCE_TRANSACTION_TYPES
|
||||||
|
from planetmint.backend.connection import Connection
|
||||||
|
from planetmint.backend.tarantool.const import TARANT_TABLE_TRANSACTION, TARANT_TABLE_GOVERNANCE
|
||||||
|
from planetmint.model.fastquery import FastQuery
|
||||||
|
from planetmint.abci.tendermint_utils import key_from_base64
|
||||||
|
|
||||||
|
from planetmint.backend.models.block import Block
|
||||||
|
from planetmint.backend.models.output import Output
|
||||||
|
from planetmint.backend.models.asset import Asset
|
||||||
|
from planetmint.backend.models.metadata import MetaData
|
||||||
|
from planetmint.backend.models.dbtransaction import DbTransaction
|
||||||
|
|
||||||
|
|
||||||
|
class Models:
|
||||||
|
def __init__(self, database_connection=None):
|
||||||
|
config_utils.autoconfigure()
|
||||||
|
self.connection = database_connection if database_connection is not None else Connection()
|
||||||
|
|
||||||
|
def store_bulk_transactions(self, transactions):
|
||||||
|
txns = []
|
||||||
|
gov_txns = []
|
||||||
|
|
||||||
|
for t in transactions:
|
||||||
|
transaction = t.tx_dict if t.tx_dict else rapidjson.loads(rapidjson.dumps(t.to_dict()))
|
||||||
|
if transaction["operation"] in GOVERNANCE_TRANSACTION_TYPES:
|
||||||
|
gov_txns.append(transaction)
|
||||||
|
else:
|
||||||
|
txns.append(transaction)
|
||||||
|
|
||||||
|
backend.query.store_transactions(self.connection, txns, TARANT_TABLE_TRANSACTION)
|
||||||
|
backend.query.store_transactions(self.connection, gov_txns, TARANT_TABLE_GOVERNANCE)
|
||||||
|
|
||||||
|
def delete_transactions(self, txs):
|
||||||
|
return backend.query.delete_transactions(self.connection, txs)
|
||||||
|
|
||||||
|
def is_committed(self, transaction_id):
|
||||||
|
transaction = backend.query.get_transaction_single(self.connection, transaction_id)
|
||||||
|
return bool(transaction)
|
||||||
|
|
||||||
|
def get_transaction(self, transaction_id):
|
||||||
|
return backend.query.get_transaction_single(self.connection, transaction_id)
|
||||||
|
|
||||||
|
def get_transactions(self, txn_ids):
|
||||||
|
return backend.query.get_transactions(self.connection, txn_ids)
|
||||||
|
|
||||||
|
def get_transactions_filtered(self, asset_ids, operation=None, last_tx=False):
|
||||||
|
"""Get a list of transactions filtered on some criteria"""
|
||||||
|
txids = backend.query.get_txids_filtered(self.connection, asset_ids, operation, last_tx)
|
||||||
|
for txid in txids:
|
||||||
|
yield self.get_transaction(txid)
|
||||||
|
|
||||||
|
def get_outputs_by_tx_id(self, txid):
|
||||||
|
return backend.query.get_outputs_by_tx_id(self.connection, txid)
|
||||||
|
|
||||||
|
def get_outputs_filtered(self, owner, spent=None):
|
||||||
|
"""Get a list of output links filtered on some criteria
|
||||||
|
|
||||||
|
Args:
|
||||||
|
owner (str): base58 encoded public_key.
|
||||||
|
spent (bool): If ``True`` return only the spent outputs. If
|
||||||
|
``False`` return only unspent outputs. If spent is
|
||||||
|
not specified (``None``) return all outputs.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
:obj:`list` of TransactionLink: list of ``txid`` s and ``output`` s
|
||||||
|
pointing to another transaction's condition
|
||||||
|
"""
|
||||||
|
outputs = self.fastquery.get_outputs_by_public_key(owner)
|
||||||
|
if spent is None:
|
||||||
|
return outputs
|
||||||
|
elif spent is True:
|
||||||
|
return self.fastquery.filter_unspent_outputs(outputs)
|
||||||
|
elif spent is False:
|
||||||
|
return self.fastquery.filter_spent_outputs(outputs)
|
||||||
|
|
||||||
|
def store_block(self, block):
|
||||||
|
"""Create a new block."""
|
||||||
|
|
||||||
|
return backend.query.store_block(self.connection, block)
|
||||||
|
|
||||||
|
def get_latest_block(self) -> dict:
|
||||||
|
"""Get the block with largest height."""
|
||||||
|
|
||||||
|
return backend.query.get_latest_block(self.connection)
|
||||||
|
|
||||||
|
def get_block(self, block_id) -> dict:
|
||||||
|
"""Get the block with the specified `block_id`.
|
||||||
|
|
||||||
|
Returns the block corresponding to `block_id` or None if no match is
|
||||||
|
found.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
block_id (int): block id of the block to get.
|
||||||
|
"""
|
||||||
|
|
||||||
|
block = backend.query.get_block(self.connection, block_id)
|
||||||
|
latest_block = self.get_latest_block()
|
||||||
|
latest_block_height = latest_block["height"] if latest_block else 0
|
||||||
|
|
||||||
|
if not block and block_id > latest_block_height:
|
||||||
|
return
|
||||||
|
|
||||||
|
return block
|
||||||
|
|
||||||
|
def delete_abci_chain(self, height):
|
||||||
|
return backend.query.delete_abci_chain(self.connection, height)
|
||||||
|
|
||||||
|
def get_latest_abci_chain(self):
|
||||||
|
return backend.query.get_latest_abci_chain(self.connection)
|
||||||
|
|
||||||
|
def store_election(self, election_id, height, is_concluded):
|
||||||
|
return backend.query.store_election(self.connection, election_id, height, is_concluded)
|
||||||
|
|
||||||
|
def store_elections(self, elections):
|
||||||
|
return backend.query.store_elections(self.connection, elections)
|
||||||
|
|
||||||
|
def delete_elections(self, height):
|
||||||
|
return backend.query.delete_elections(self.connection, height)
|
||||||
|
|
||||||
|
# NOTE: moved here from Election needs to be placed somewhere else
|
||||||
|
def get_validators_dict(self, height=None):
|
||||||
|
"""Return a dictionary of validators with key as `public_key` and
|
||||||
|
value as the `voting_power`
|
||||||
|
"""
|
||||||
|
validators = {}
|
||||||
|
for validator in self.get_validators(height):
|
||||||
|
# NOTE: we assume that Tendermint encodes public key in base64
|
||||||
|
public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"]))
|
||||||
|
validators[public_key] = validator["voting_power"]
|
||||||
|
|
||||||
|
return validators
|
||||||
|
|
||||||
|
def get_spent(self, txid, output, current_transactions=[]) -> DbTransaction:
|
||||||
|
transactions = backend.query.get_spent(self.connection, txid, output)
|
||||||
|
|
||||||
|
current_spent_transactions = []
|
||||||
|
for ctxn in current_transactions:
|
||||||
|
for ctxn_input in ctxn.inputs:
|
||||||
|
if ctxn_input.fulfills and ctxn_input.fulfills.txid == txid and ctxn_input.fulfills.output == output:
|
||||||
|
current_spent_transactions.append(ctxn)
|
||||||
|
|
||||||
|
transaction = None
|
||||||
|
if len(transactions) + len(current_spent_transactions) > 1:
|
||||||
|
raise DoubleSpend('tx "{}" spends inputs twice'.format(txid))
|
||||||
|
elif transactions:
|
||||||
|
tx_id = transactions[0].id
|
||||||
|
tx = backend.query.get_transaction_single(self.connection, tx_id)
|
||||||
|
transaction = tx.to_dict()
|
||||||
|
elif current_spent_transactions:
|
||||||
|
transaction = current_spent_transactions[0]
|
||||||
|
|
||||||
|
return transaction
|
||||||
|
|
||||||
|
def get_block_containing_tx(self, txid) -> Block:
|
||||||
|
"""
|
||||||
|
Retrieve the list of blocks (block ids) containing a
|
||||||
|
transaction with transaction id `txid`
|
||||||
|
|
||||||
|
Args:
|
||||||
|
txid (str): transaction id of the transaction to query
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Block id list (list(int))
|
||||||
|
"""
|
||||||
|
block = backend.query.get_block_with_transaction(self.connection, txid)
|
||||||
|
|
||||||
|
return block
|
||||||
|
|
||||||
|
def get_input_txs_and_conditions(self, inputs, current_transactions=[]):
|
||||||
|
# store the inputs so that we can check if the asset ids match
|
||||||
|
input_txs = []
|
||||||
|
input_conditions = []
|
||||||
|
|
||||||
|
for input_ in inputs:
|
||||||
|
input_txid = input_.fulfills.txid
|
||||||
|
input_tx = self.get_transaction(input_txid)
|
||||||
|
_output = self.get_outputs_by_tx_id(input_txid)
|
||||||
|
if input_tx is None:
|
||||||
|
for ctxn in current_transactions:
|
||||||
|
if ctxn.id == input_txid:
|
||||||
|
ctxn_dict = ctxn.to_dict()
|
||||||
|
input_tx = DbTransaction.from_dict(ctxn_dict)
|
||||||
|
_output = [
|
||||||
|
Output.from_dict(output, index, ctxn.id)
|
||||||
|
for index, output in enumerate(ctxn_dict["outputs"])
|
||||||
|
]
|
||||||
|
|
||||||
|
if input_tx is None:
|
||||||
|
raise InputDoesNotExist("input `{}` doesn't exist".format(input_txid))
|
||||||
|
|
||||||
|
spent = self.get_spent(input_txid, input_.fulfills.output, current_transactions)
|
||||||
|
if spent:
|
||||||
|
raise DoubleSpend("input `{}` was already spent".format(input_txid))
|
||||||
|
|
||||||
|
output = _output[input_.fulfills.output]
|
||||||
|
input_conditions.append(output)
|
||||||
|
tx_dict = input_tx.to_dict()
|
||||||
|
tx_dict["outputs"] = Output.list_to_dict(_output)
|
||||||
|
tx_dict = DbTransaction.remove_generated_fields(tx_dict)
|
||||||
|
pm_transaction = Transaction.from_dict(tx_dict, False)
|
||||||
|
input_txs.append(pm_transaction)
|
||||||
|
|
||||||
|
return input_txs, input_conditions
|
||||||
|
|
||||||
|
def get_assets(self, asset_ids) -> list[Asset]:
|
||||||
|
"""Return a list of assets that match the asset_ids
|
||||||
|
|
||||||
|
Args:
|
||||||
|
asset_ids (:obj:`list` of :obj:`str`): A list of asset_ids to
|
||||||
|
retrieve from the database.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: The list of assets returned from the database.
|
||||||
|
"""
|
||||||
|
return backend.query.get_assets(self.connection, asset_ids)
|
||||||
|
|
||||||
|
def get_assets_by_cid(self, asset_cid, **kwargs) -> list[dict]:
|
||||||
|
asset_txs = backend.query.get_transactions_by_asset(self.connection, asset_cid, **kwargs)
|
||||||
|
# flatten and return all found assets
|
||||||
|
return list(chain.from_iterable([Asset.list_to_dict(tx.assets) for tx in asset_txs]))
|
||||||
|
|
||||||
|
def get_metadata(self, txn_ids) -> list[MetaData]:
|
||||||
|
"""Return a list of metadata that match the transaction ids (txn_ids)
|
||||||
|
|
||||||
|
Args:
|
||||||
|
txn_ids (:obj:`list` of :obj:`str`): A list of txn_ids to
|
||||||
|
retrieve from the database.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list: The list of metadata returned from the database.
|
||||||
|
"""
|
||||||
|
return backend.query.get_metadata(self.connection, txn_ids)
|
||||||
|
|
||||||
|
def get_metadata_by_cid(self, metadata_cid, **kwargs) -> list[str]:
|
||||||
|
metadata_txs = backend.query.get_transactions_by_metadata(self.connection, metadata_cid, **kwargs)
|
||||||
|
return [tx.metadata.metadata for tx in metadata_txs]
|
||||||
|
|
||||||
|
def get_validator_set(self, height=None):
|
||||||
|
return backend.query.get_validator_set(self.connection, height)
|
||||||
|
|
||||||
|
def get_validators(self, height=None):
|
||||||
|
result = self.get_validator_set(height)
|
||||||
|
return [] if result is None else result["validators"]
|
||||||
|
|
||||||
|
def get_election(self, election_id):
|
||||||
|
return backend.query.get_election(self.connection, election_id)
|
||||||
|
|
||||||
|
def get_pre_commit_state(self):
|
||||||
|
return backend.query.get_pre_commit_state(self.connection)
|
||||||
|
|
||||||
|
def store_pre_commit_state(self, state):
|
||||||
|
return backend.query.store_pre_commit_state(self.connection, state)
|
||||||
|
|
||||||
|
def store_validator_set(self, height, validators):
|
||||||
|
"""
|
||||||
|
Store validator set at a given `height`.
|
||||||
|
NOTE: If the validator set already exists at that `height` then an
|
||||||
|
exception will be raised.
|
||||||
|
"""
|
||||||
|
return backend.query.store_validator_set(self.connection, {"height": height, "validators": validators})
|
||||||
|
|
||||||
|
def delete_validator_set(self, height):
|
||||||
|
return backend.query.delete_validator_set(self.connection, height)
|
||||||
|
|
||||||
|
def store_abci_chain(self, height, chain_id, is_synced=True):
|
||||||
|
return backend.query.store_abci_chain(self.connection, height, chain_id, is_synced)
|
||||||
|
|
||||||
|
def get_asset_tokens_for_public_key(self, transaction_id, election_pk):
|
||||||
|
txns = backend.query.get_asset_tokens_for_public_key(self.connection, transaction_id, election_pk)
|
||||||
|
return txns
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fastquery(self):
|
||||||
|
return FastQuery(self.connection)
|
@ -7,11 +7,12 @@ import logging
|
|||||||
import setproctitle
|
import setproctitle
|
||||||
|
|
||||||
from planetmint.config import Config
|
from planetmint.config import Config
|
||||||
from planetmint.lib import Planetmint
|
from planetmint.application.validator import Validator
|
||||||
from planetmint.core import App
|
from planetmint.abci.core import App
|
||||||
from planetmint.parallel_validation import ParallelValidationApp
|
from planetmint.abci.parallel_validation import ParallelValidationApp
|
||||||
from planetmint.web import server, websocket_server
|
from planetmint.web import server, websocket_server
|
||||||
from planetmint.events import Exchange, EventTypes
|
from planetmint.ipc.events import EventTypes
|
||||||
|
from planetmint.ipc.exchange import Exchange
|
||||||
from planetmint.utils import Process
|
from planetmint.utils import Process
|
||||||
from planetmint.version import __version__
|
from planetmint.version import __version__
|
||||||
|
|
||||||
@ -40,7 +41,7 @@ def start(args):
|
|||||||
exchange = Exchange()
|
exchange = Exchange()
|
||||||
# start the web api
|
# start the web api
|
||||||
app_server = server.create_server(
|
app_server = server.create_server(
|
||||||
settings=Config().get()["server"], log_config=Config().get()["log"], planetmint_factory=Planetmint
|
settings=Config().get()["server"], log_config=Config().get()["log"], planetmint_factory=Validator
|
||||||
)
|
)
|
||||||
p_webapi = Process(name="planetmint_webapi", target=app_server.run, daemon=True)
|
p_webapi = Process(name="planetmint_webapi", target=app_server.run, daemon=True)
|
||||||
p_webapi.start()
|
p_webapi.start()
|
||||||
|
@ -7,39 +7,16 @@ import contextlib
|
|||||||
import threading
|
import threading
|
||||||
import queue
|
import queue
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import json
|
|
||||||
import setproctitle
|
import setproctitle
|
||||||
|
|
||||||
from packaging import version
|
|
||||||
from planetmint.version import __tm_supported_versions__
|
|
||||||
from planetmint.tendermint_utils import key_from_base64
|
|
||||||
from planetmint.backend.models.output import ConditionDetails
|
|
||||||
from transactions.common.crypto import key_pair_from_ed25519_key
|
|
||||||
|
|
||||||
|
class Singleton(type):
|
||||||
|
_instances = {}
|
||||||
|
|
||||||
class ProcessGroup(object):
|
def __call__(cls, *args, **kwargs):
|
||||||
def __init__(self, concurrency=None, group=None, target=None, name=None, args=None, kwargs=None, daemon=None):
|
if cls not in cls._instances:
|
||||||
self.concurrency = concurrency or multiprocessing.cpu_count()
|
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||||
self.group = group
|
return cls._instances[cls]
|
||||||
self.target = target
|
|
||||||
self.name = name
|
|
||||||
self.args = args or ()
|
|
||||||
self.kwargs = kwargs or {}
|
|
||||||
self.daemon = daemon
|
|
||||||
self.processes = []
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
for i in range(self.concurrency):
|
|
||||||
proc = multiprocessing.Process(
|
|
||||||
group=self.group,
|
|
||||||
target=self.target,
|
|
||||||
name=self.name,
|
|
||||||
args=self.args,
|
|
||||||
kwargs=self.kwargs,
|
|
||||||
daemon=self.daemon,
|
|
||||||
)
|
|
||||||
proc.start()
|
|
||||||
self.processes.append(proc)
|
|
||||||
|
|
||||||
|
|
||||||
class Process(multiprocessing.Process):
|
class Process(multiprocessing.Process):
|
||||||
@ -108,34 +85,6 @@ def pool(builder, size, timeout=None):
|
|||||||
return pooled
|
return pooled
|
||||||
|
|
||||||
|
|
||||||
# TODO: Rename this function, it's handling fulfillments not conditions
|
|
||||||
def condition_details_has_owner(condition_details, owner):
|
|
||||||
"""Check if the public_key of owner is in the condition details
|
|
||||||
as an Ed25519Fulfillment.public_key
|
|
||||||
|
|
||||||
Args:
|
|
||||||
condition_details (dict): dict with condition details
|
|
||||||
owner (str): base58 public key of owner
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the public key is found in the condition details, False otherwise
|
|
||||||
|
|
||||||
"""
|
|
||||||
if isinstance(condition_details, ConditionDetails) and condition_details.sub_conditions is not None:
|
|
||||||
result = condition_details_has_owner(condition_details.sub_conditions, owner)
|
|
||||||
if result:
|
|
||||||
return True
|
|
||||||
elif isinstance(condition_details, list):
|
|
||||||
for subcondition in condition_details:
|
|
||||||
result = condition_details_has_owner(subcondition, owner)
|
|
||||||
if result:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
if condition_details.public_key is not None and owner == condition_details.public_key:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Lazy:
|
class Lazy:
|
||||||
"""Lazy objects are useful to create chains of methods to
|
"""Lazy objects are useful to create chains of methods to
|
||||||
execute later.
|
execute later.
|
||||||
@ -180,32 +129,3 @@ class Lazy:
|
|||||||
|
|
||||||
self.stack = []
|
self.stack = []
|
||||||
return last
|
return last
|
||||||
|
|
||||||
|
|
||||||
# Load Tendermint's public and private key from the file path
|
|
||||||
def load_node_key(path):
|
|
||||||
with open(path) as json_data:
|
|
||||||
priv_validator = json.load(json_data)
|
|
||||||
priv_key = priv_validator["priv_key"]["value"]
|
|
||||||
hex_private_key = key_from_base64(priv_key)
|
|
||||||
return key_pair_from_ed25519_key(hex_private_key)
|
|
||||||
|
|
||||||
|
|
||||||
def tendermint_version_is_compatible(running_tm_ver):
|
|
||||||
"""
|
|
||||||
Check Tendermint compatability with Planetmint server
|
|
||||||
|
|
||||||
:param running_tm_ver: Version number of the connected Tendermint instance
|
|
||||||
:type running_tm_ver: str
|
|
||||||
:return: True/False depending on the compatability with Planetmint server
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Splitting because version can look like this e.g. 0.22.8-40d6dc2e
|
|
||||||
tm_ver = running_tm_ver.split("-")
|
|
||||||
if not tm_ver:
|
|
||||||
return False
|
|
||||||
for ver in __tm_supported_versions__:
|
|
||||||
if version.parse(ver) == version.parse(tm_ver[0]):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
@ -15,7 +15,7 @@ import gunicorn.app.base
|
|||||||
from flask import Flask
|
from flask import Flask
|
||||||
from flask_cors import CORS
|
from flask_cors import CORS
|
||||||
from planetmint import utils
|
from planetmint import utils
|
||||||
from planetmint import Planetmint
|
from planetmint.application.validator import Validator
|
||||||
from planetmint.web.routes import add_routes
|
from planetmint.web.routes import add_routes
|
||||||
from planetmint.web.strip_content_type_middleware import StripContentTypeMiddleware
|
from planetmint.web.strip_content_type_middleware import StripContentTypeMiddleware
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ def create_app(*, debug=False, threads=1, planetmint_factory=None):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
if not planetmint_factory:
|
if not planetmint_factory:
|
||||||
planetmint_factory = Planetmint
|
planetmint_factory = Validator
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.wsgi_app = StripContentTypeMiddleware(app.wsgi_app)
|
app.wsgi_app = StripContentTypeMiddleware(app.wsgi_app)
|
||||||
|
@ -28,8 +28,8 @@ class AssetListApi(Resource):
|
|||||||
|
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
|
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
assets = planet.get_assets_by_cid(cid, **args)
|
assets = validator.models.get_assets_by_cid(cid, **args)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# This only works with MongoDB as the backend
|
# This only works with MongoDB as the backend
|
||||||
|
@ -22,8 +22,8 @@ class LatestBlock(Resource):
|
|||||||
|
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
|
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
block = planet.get_latest_block()
|
block = validator.models.get_latest_block()
|
||||||
|
|
||||||
if not block:
|
if not block:
|
||||||
return make_error(404)
|
return make_error(404)
|
||||||
@ -44,8 +44,8 @@ class BlockApi(Resource):
|
|||||||
|
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
|
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
block = planet.get_block(block_id=block_id)
|
block = validator.models.get_block(block_id=block_id)
|
||||||
|
|
||||||
if not block:
|
if not block:
|
||||||
return make_error(404)
|
return make_error(404)
|
||||||
@ -70,8 +70,8 @@ class BlockListApi(Resource):
|
|||||||
|
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
|
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
block = planet.get_block_containing_tx(tx_id)
|
block = validator.models.get_block_containing_tx(tx_id)
|
||||||
|
|
||||||
if not block:
|
if not block:
|
||||||
return make_error(404, "Block containing transaction with id: {} not found.".format(tx_id))
|
return make_error(404, "Block containing transaction with id: {} not found.".format(tx_id))
|
||||||
|
@ -36,8 +36,8 @@ class MetadataApi(Resource):
|
|||||||
|
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
|
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
metadata = planet.get_metadata_by_cid(cid, **args)
|
metadata = validator.models.get_metadata_by_cid(cid, **args)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return metadata
|
return metadata
|
||||||
|
@ -23,9 +23,9 @@ class OutputListApi(Resource):
|
|||||||
args = parser.parse_args(strict=True)
|
args = parser.parse_args(strict=True)
|
||||||
|
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
try:
|
try:
|
||||||
outputs = planet.get_outputs_filtered(args["public_key"], args["spent"])
|
outputs = validator.models.get_outputs_filtered(args["public_key"], args["spent"])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return make_error(
|
return make_error(
|
||||||
500,
|
500,
|
||||||
|
@ -16,10 +16,12 @@ from transactions.common.exceptions import (
|
|||||||
SchemaValidationError,
|
SchemaValidationError,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from planetmint.abci.rpc import ABCI_RPC
|
||||||
from planetmint.web.views.base import make_error
|
from planetmint.web.views.base import make_error
|
||||||
from planetmint.web.views import parameters
|
from planetmint.web.views import parameters
|
||||||
from transactions.common.transaction import Transaction
|
from transactions.common.transaction import Transaction
|
||||||
|
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -36,8 +38,8 @@ class TransactionApi(Resource):
|
|||||||
"""
|
"""
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
|
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
tx = planet.get_transaction(tx_id)
|
tx = validator.models.get_transaction(tx_id)
|
||||||
|
|
||||||
if not tx:
|
if not tx:
|
||||||
return make_error(404)
|
return make_error(404)
|
||||||
@ -52,8 +54,8 @@ class TransactionListApi(Resource):
|
|||||||
parser.add_argument("asset_ids", type=parameters.valid_txid_list, required=True)
|
parser.add_argument("asset_ids", type=parameters.valid_txid_list, required=True)
|
||||||
parser.add_argument("last_tx", type=parameters.valid_bool, required=False)
|
parser.add_argument("last_tx", type=parameters.valid_bool, required=False)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
with current_app.config["bigchain_pool"]() as planet:
|
with current_app.config["bigchain_pool"]() as validator:
|
||||||
txs = planet.get_transactions_filtered(**args)
|
txs = validator.models.get_transactions_filtered(**args)
|
||||||
|
|
||||||
return [tx.to_dict() for tx in txs]
|
return [tx.to_dict() for tx in txs]
|
||||||
|
|
||||||
@ -104,7 +106,9 @@ class TransactionListApi(Resource):
|
|||||||
but this node only accepts transaction with higher \
|
but this node only accepts transaction with higher \
|
||||||
schema version number.",
|
schema version number.",
|
||||||
)
|
)
|
||||||
status_code, message = planet.write_transaction(tx_obj, mode)
|
status_code, message = ABCI_RPC().write_transaction(
|
||||||
|
MODE_LIST, ABCI_RPC().tendermint_rpc_endpoint, MODE_COMMIT, tx_obj, mode
|
||||||
|
)
|
||||||
|
|
||||||
if status_code == 202:
|
if status_code == 202:
|
||||||
response = jsonify(tx)
|
response = jsonify(tx)
|
||||||
|
@ -17,7 +17,7 @@ class ValidatorsApi(Resource):
|
|||||||
|
|
||||||
pool = current_app.config["bigchain_pool"]
|
pool = current_app.config["bigchain_pool"]
|
||||||
|
|
||||||
with pool() as planet:
|
with pool() as validator:
|
||||||
validators = planet.get_validators()
|
validators = validator.models.get_validators()
|
||||||
|
|
||||||
return validators
|
return validators
|
||||||
|
@ -6,8 +6,8 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from planetmint.events import EventTypes
|
from planetmint.ipc.events import EventTypes
|
||||||
from planetmint.events import POISON_PILL
|
from planetmint.ipc.events import POISON_PILL
|
||||||
|
|
||||||
|
|
||||||
class Dispatcher:
|
class Dispatcher:
|
||||||
|
@ -15,7 +15,7 @@ def test_asset_transfer(b, signed_create_tx, user_pk, user_sk, _bdb):
|
|||||||
tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], [signed_create_tx.id])
|
tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], [signed_create_tx.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([signed_create_tx])
|
b.models.store_bulk_transactions([signed_create_tx])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert tx_transfer_signed.assets[0]["id"] == signed_create_tx.id
|
assert tx_transfer_signed.assets[0]["id"] == signed_create_tx.id
|
||||||
@ -33,7 +33,7 @@ def test_validate_transfer_asset_id_mismatch(b, signed_create_tx, user_pk, user_
|
|||||||
tx_transfer.assets[0]["id"] = "a" * 64
|
tx_transfer.assets[0]["id"] = "a" * 64
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([signed_create_tx])
|
b.models.store_bulk_transactions([signed_create_tx])
|
||||||
|
|
||||||
with pytest.raises(AssetIdMismatch):
|
with pytest.raises(AssetIdMismatch):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
@ -73,7 +73,7 @@ def test_asset_id_mismatch(alice, user_pk):
|
|||||||
|
|
||||||
def test_compose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
|
def test_compose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
|
||||||
validated = b.validate_transaction(signed_create_tx)
|
validated = b.validate_transaction(signed_create_tx)
|
||||||
b.store_bulk_transactions([validated])
|
b.models.store_bulk_transactions([validated])
|
||||||
|
|
||||||
inputs = signed_create_tx.to_inputs()
|
inputs = signed_create_tx.to_inputs()
|
||||||
assets = [signed_create_tx.id, "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK8n"]
|
assets = [signed_create_tx.id, "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK8n"]
|
||||||
@ -84,7 +84,7 @@ def test_compose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx
|
|||||||
|
|
||||||
def test_decompose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
|
def test_decompose_valid_transactions(b, user_pk, user_sk, alice, signed_create_tx, _bdb):
|
||||||
validated = b.validate_transaction(signed_create_tx)
|
validated = b.validate_transaction(signed_create_tx)
|
||||||
b.store_bulk_transactions([validated])
|
b.models.store_bulk_transactions([validated])
|
||||||
|
|
||||||
inputs = signed_create_tx.to_inputs()
|
inputs = signed_create_tx.to_inputs()
|
||||||
assets = [
|
assets = [
|
||||||
@ -102,7 +102,7 @@ def test_decompose_valid_transactions(b, user_pk, user_sk, alice, signed_create_
|
|||||||
|
|
||||||
def test_create_decompose_output(b, user_pk, user_sk, signed_create_tx, _bdb):
|
def test_create_decompose_output(b, user_pk, user_sk, signed_create_tx, _bdb):
|
||||||
validated = b.validate_transaction(signed_create_tx)
|
validated = b.validate_transaction(signed_create_tx)
|
||||||
b.store_bulk_transactions([validated])
|
b.models.store_bulk_transactions([validated])
|
||||||
|
|
||||||
inputs = signed_create_tx.to_inputs()
|
inputs = signed_create_tx.to_inputs()
|
||||||
assets = [
|
assets = [
|
||||||
@ -116,7 +116,7 @@ def test_create_decompose_output(b, user_pk, user_sk, signed_create_tx, _bdb):
|
|||||||
)
|
)
|
||||||
decompose_transaction.sign([user_sk])
|
decompose_transaction.sign([user_sk])
|
||||||
validated_decompose = b.validate_transaction(decompose_transaction)
|
validated_decompose = b.validate_transaction(decompose_transaction)
|
||||||
b.store_bulk_transactions([validated_decompose])
|
b.models.store_bulk_transactions([validated_decompose])
|
||||||
|
|
||||||
create_inputs = decompose_transaction.to_inputs([0])
|
create_inputs = decompose_transaction.to_inputs([0])
|
||||||
create_tx = Create.generate([user_pk], recipients=[([user_pk], 1)], assets=[assets[0]], inputs=create_inputs)
|
create_tx = Create.generate([user_pk], recipients=[([user_pk], 1)], assets=[assets[0]], inputs=create_inputs)
|
||||||
@ -141,6 +141,6 @@ def test_v_2_0_validation_create_invalid(b, signed_2_0_create_tx_assets, _bdb):
|
|||||||
|
|
||||||
def test_v_2_0_validation_transfer(b, signed_2_0_create_tx, signed_2_0_transfer_tx, _bdb):
|
def test_v_2_0_validation_transfer(b, signed_2_0_create_tx, signed_2_0_transfer_tx, _bdb):
|
||||||
validated = b.validate_transaction(signed_2_0_create_tx)
|
validated = b.validate_transaction(signed_2_0_create_tx)
|
||||||
b.store_bulk_transactions([validated])
|
b.models.store_bulk_transactions([validated])
|
||||||
assert validated.to_dict() == signed_2_0_create_tx
|
assert validated.to_dict() == signed_2_0_create_tx
|
||||||
assert b.validate_transaction(signed_2_0_transfer_tx).to_dict() == signed_2_0_transfer_tx
|
assert b.validate_transaction(signed_2_0_transfer_tx).to_dict() == signed_2_0_transfer_tx
|
||||||
|
@ -137,7 +137,7 @@ def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk,
|
|||||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 1
|
assert len(tx_transfer_signed.outputs) == 1
|
||||||
@ -163,7 +163,7 @@ def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk
|
|||||||
)
|
)
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 2
|
assert len(tx_transfer_signed.outputs) == 2
|
||||||
@ -190,7 +190,7 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk
|
|||||||
)
|
)
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 1
|
assert len(tx_transfer_signed.outputs) == 1
|
||||||
@ -201,7 +201,7 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk
|
|||||||
assert len(condition["condition"]["details"]["subconditions"]) == 2
|
assert len(condition["condition"]["details"]["subconditions"]) == 2
|
||||||
|
|
||||||
assert len(tx_transfer_signed.inputs) == 1
|
assert len(tx_transfer_signed.inputs) == 1
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -227,7 +227,7 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
|||||||
)
|
)
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 2
|
assert len(tx_transfer_signed.outputs) == 2
|
||||||
@ -240,7 +240,7 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
|||||||
|
|
||||||
assert len(tx_transfer_signed.inputs) == 1
|
assert len(tx_transfer_signed.inputs) == 1
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -265,7 +265,7 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk
|
|||||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 1
|
assert len(tx_transfer_signed.outputs) == 1
|
||||||
@ -276,7 +276,7 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk
|
|||||||
assert "subconditions" in ffill
|
assert "subconditions" in ffill
|
||||||
assert len(ffill["subconditions"]) == 2
|
assert len(ffill["subconditions"]) == 2
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -299,14 +299,14 @@ def test_multiple_in_single_own_single_out_single_own_transfer(alice, b, user_pk
|
|||||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 1
|
assert len(tx_transfer_signed.outputs) == 1
|
||||||
assert tx_transfer_signed.outputs[0].amount == 100
|
assert tx_transfer_signed.outputs[0].amount == 100
|
||||||
assert len(tx_transfer_signed.inputs) == 2
|
assert len(tx_transfer_signed.inputs) == 2
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -331,7 +331,7 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_
|
|||||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 1
|
assert len(tx_transfer_signed.outputs) == 1
|
||||||
@ -345,7 +345,7 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_
|
|||||||
assert len(ffill_fid0["subconditions"]) == 2
|
assert len(ffill_fid0["subconditions"]) == 2
|
||||||
assert len(ffill_fid1["subconditions"]) == 2
|
assert len(ffill_fid1["subconditions"]) == 2
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -371,7 +371,7 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk
|
|||||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 1
|
assert len(tx_transfer_signed.outputs) == 1
|
||||||
assert tx_transfer_signed.outputs[0].amount == 100
|
assert tx_transfer_signed.outputs[0].amount == 100
|
||||||
@ -383,7 +383,7 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk
|
|||||||
assert "subconditions" in ffill_fid1
|
assert "subconditions" in ffill_fid1
|
||||||
assert len(ffill_fid1["subconditions"]) == 2
|
assert len(ffill_fid1["subconditions"]) == 2
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -410,7 +410,7 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
|||||||
tx_create.to_inputs(), [([alice.public_key], 50), ([alice.public_key, user_pk], 50)], asset_ids=[tx_create.id]
|
tx_create.to_inputs(), [([alice.public_key], 50), ([alice.public_key, user_pk], 50)], asset_ids=[tx_create.id]
|
||||||
)
|
)
|
||||||
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk])
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 2
|
assert len(tx_transfer_signed.outputs) == 2
|
||||||
@ -430,7 +430,7 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
|
|||||||
assert "subconditions" in ffill_fid1
|
assert "subconditions" in ffill_fid1
|
||||||
assert len(ffill_fid1["subconditions"]) == 2
|
assert len(ffill_fid1["subconditions"]) == 2
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -466,7 +466,7 @@ def test_multiple_in_different_transactions(alice, b, user_pk, user_sk):
|
|||||||
)
|
)
|
||||||
tx_transfer2_signed = tx_transfer2.sign([user_sk])
|
tx_transfer2_signed = tx_transfer2.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed, tx_transfer1_signed])
|
b.models.store_bulk_transactions([tx_create_signed, tx_transfer1_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer2_signed) == tx_transfer2_signed
|
assert b.validate_transaction(tx_transfer2_signed) == tx_transfer2_signed
|
||||||
assert len(tx_transfer2_signed.outputs) == 1
|
assert len(tx_transfer2_signed.outputs) == 1
|
||||||
@ -491,7 +491,7 @@ def test_amount_error_transfer(alice, b, user_pk, user_sk):
|
|||||||
)
|
)
|
||||||
tx_create_signed = tx_create.sign([alice.private_key])
|
tx_create_signed = tx_create.sign([alice.private_key])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
# TRANSFER
|
# TRANSFER
|
||||||
# output amount less than input amount
|
# output amount less than input amount
|
||||||
@ -529,12 +529,12 @@ def test_threshold_same_public_key(alice, b, user_pk, user_sk):
|
|||||||
# TRANSFER
|
# TRANSFER
|
||||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_ids=[tx_create.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk, user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk, user_sk])
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
# assert tx_transfer_signed.validate(b) == tx_transfer_signed
|
# assert tx_transfer_signed.validate(b) == tx_transfer_signed
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
# tx_transfer_signed.validate(b)
|
# tx_transfer_signed.validate(b)
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
@ -554,13 +554,13 @@ def test_sum_amount(alice, b, user_pk, user_sk):
|
|||||||
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 3)], asset_ids=[tx_create.id])
|
tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 3)], asset_ids=[tx_create.id])
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 1
|
assert len(tx_transfer_signed.outputs) == 1
|
||||||
assert tx_transfer_signed.outputs[0].amount == 3
|
assert tx_transfer_signed.outputs[0].amount == 3
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
|
||||||
@ -581,13 +581,13 @@ def test_divide(alice, b, user_pk, user_sk):
|
|||||||
)
|
)
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
assert b.validate_transaction(tx_transfer_signed) == tx_transfer_signed
|
||||||
assert len(tx_transfer_signed.outputs) == 3
|
assert len(tx_transfer_signed.outputs) == 3
|
||||||
for output in tx_transfer_signed.outputs:
|
for output in tx_transfer_signed.outputs:
|
||||||
assert output.amount == 1
|
assert output.amount == 1
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(tx_transfer_signed)
|
b.validate_transaction(tx_transfer_signed)
|
||||||
|
@ -51,7 +51,7 @@ SCRIPT_INPUT = {
|
|||||||
metadata = {"units": 300, "type": "KG"}
|
metadata = {"units": 300, "type": "KG"}
|
||||||
|
|
||||||
|
|
||||||
def test_zenroom_signing():
|
def test_zenroom_signing(b):
|
||||||
biolabs = generate_key_pair()
|
biolabs = generate_key_pair()
|
||||||
version = "3.0"
|
version = "3.0"
|
||||||
|
|
||||||
@ -149,7 +149,6 @@ def test_zenroom_signing():
|
|||||||
tx["id"] = shared_creation_txid
|
tx["id"] = shared_creation_txid
|
||||||
|
|
||||||
from transactions.common.transaction import Transaction
|
from transactions.common.transaction import Transaction
|
||||||
from planetmint.lib import Planetmint
|
|
||||||
from transactions.common.exceptions import (
|
from transactions.common.exceptions import (
|
||||||
SchemaValidationError,
|
SchemaValidationError,
|
||||||
ValidationError,
|
ValidationError,
|
||||||
@ -164,9 +163,9 @@ def test_zenroom_signing():
|
|||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
print(e)
|
print(e)
|
||||||
assert ()
|
assert ()
|
||||||
planet = Planetmint()
|
|
||||||
try:
|
try:
|
||||||
planet.validate_transaction(tx_obj)
|
b.validate_transaction(tx_obj)
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
print("Invalid transaction ({}): {}".format(type(e).__name__, e))
|
print("Invalid transaction ({}): {}".format(type(e).__name__, e))
|
||||||
assert ()
|
assert ()
|
||||||
|
@ -48,7 +48,7 @@ def test_get_owned_ids(signed_create_tx, user_pk, db_conn):
|
|||||||
|
|
||||||
|
|
||||||
def test_store_block(db_conn):
|
def test_store_block(db_conn):
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
from planetmint.backend.tarantool import query
|
from planetmint.backend.tarantool import query
|
||||||
|
|
||||||
block = Block(app_hash="random_utxo", height=3, transactions=[])
|
block = Block(app_hash="random_utxo", height=3, transactions=[])
|
||||||
@ -59,7 +59,7 @@ def test_store_block(db_conn):
|
|||||||
|
|
||||||
|
|
||||||
def test_get_block(db_conn):
|
def test_get_block(db_conn):
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
from planetmint.backend.tarantool import query
|
from planetmint.backend.tarantool import query
|
||||||
|
|
||||||
block = Block(app_hash="random_utxo", height=3, transactions=[])
|
block = Block(app_hash="random_utxo", height=3, transactions=[])
|
||||||
|
@ -63,7 +63,7 @@ def run_start_args(request):
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mocked_setup_logging(mocker):
|
def mocked_setup_logging(mocker):
|
||||||
return mocker.patch(
|
return mocker.patch(
|
||||||
"planetmint.log.setup_logging",
|
"planetmint.config_utils.setup_logging",
|
||||||
autospec=True,
|
autospec=True,
|
||||||
spec_set=True,
|
spec_set=True,
|
||||||
)
|
)
|
||||||
|
@ -9,13 +9,19 @@ import pytest
|
|||||||
|
|
||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
|
from transactions.types.elections.validator_election import ValidatorElection
|
||||||
|
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
||||||
|
|
||||||
|
from planetmint.abci.rpc import ABCI_RPC
|
||||||
|
from planetmint.abci.block import Block
|
||||||
from planetmint.config import Config
|
from planetmint.config import Config
|
||||||
from planetmint import ValidatorElection
|
|
||||||
from planetmint.commands.planetmint import run_election_show
|
from planetmint.commands.planetmint import run_election_show
|
||||||
from planetmint.commands.planetmint import run_election_new_chain_migration
|
from planetmint.commands.planetmint import run_election_new_chain_migration
|
||||||
|
from planetmint.commands.planetmint import run_election_approve
|
||||||
|
from planetmint.commands.planetmint import run_election_new_upsert_validator
|
||||||
from planetmint.backend.connection import Connection
|
from planetmint.backend.connection import Connection
|
||||||
from planetmint.lib import Block
|
|
||||||
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
|
||||||
|
|
||||||
from tests.utils import generate_election, generate_validators
|
from tests.utils import generate_election, generate_validators
|
||||||
|
|
||||||
@ -62,7 +68,7 @@ def test_main_entrypoint(mock_start):
|
|||||||
assert mock_start.called
|
assert mock_start.called
|
||||||
|
|
||||||
|
|
||||||
@patch("planetmint.log.setup_logging")
|
@patch("planetmint.config_utils.setup_logging")
|
||||||
@patch("planetmint.commands.planetmint._run_init")
|
@patch("planetmint.commands.planetmint._run_init")
|
||||||
@patch("planetmint.config_utils.autoconfigure")
|
@patch("planetmint.config_utils.autoconfigure")
|
||||||
def test_bigchain_run_start(mock_setup_logging, mock_run_init, mock_autoconfigure, mock_processes_start):
|
def test_bigchain_run_start(mock_setup_logging, mock_run_init, mock_autoconfigure, mock_processes_start):
|
||||||
@ -245,23 +251,22 @@ def test_calling_main(start_mock, monkeypatch):
|
|||||||
assert start_mock.called is True
|
assert start_mock.called is True
|
||||||
|
|
||||||
|
|
||||||
@patch("planetmint.commands.planetmint.run_recover")
|
@patch("planetmint.application.validator.Validator.rollback")
|
||||||
@patch("planetmint.start.start")
|
@patch("planetmint.start.start")
|
||||||
def test_recover_db_on_start(mock_run_recover, mock_start, mocked_setup_logging):
|
def test_recover_db_on_start(mock_rollback, mock_start, mocked_setup_logging):
|
||||||
from planetmint.commands.planetmint import run_start
|
from planetmint.commands.planetmint import run_start
|
||||||
|
|
||||||
args = Namespace(config=None, yes=True, skip_initialize_database=False)
|
args = Namespace(config=None, yes=True, skip_initialize_database=False)
|
||||||
run_start(args)
|
run_start(args)
|
||||||
|
|
||||||
assert mock_run_recover.called
|
assert mock_rollback.called
|
||||||
assert mock_start.called
|
assert mock_start.called
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_run_recover(b, alice, bob):
|
def test_run_recover(b, alice, bob, test_models):
|
||||||
from planetmint.commands.planetmint import run_recover
|
|
||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
from planetmint.backend import query
|
from planetmint.backend import query
|
||||||
|
|
||||||
tx1 = Create.generate(
|
tx1 = Create.generate(
|
||||||
@ -278,23 +283,23 @@ def test_run_recover(b, alice, bob):
|
|||||||
).sign([bob.private_key])
|
).sign([bob.private_key])
|
||||||
|
|
||||||
# store the transactions
|
# store the transactions
|
||||||
b.store_bulk_transactions([tx1, tx2])
|
b.models.store_bulk_transactions([tx1, tx2])
|
||||||
|
|
||||||
# create a random block
|
# create a random block
|
||||||
block8 = Block(app_hash="random_app_hash1", height=8, transactions=["txid_doesnt_matter"])._asdict()
|
block8 = Block(app_hash="random_app_hash1", height=8, transactions=["txid_doesnt_matter"])._asdict()
|
||||||
b.store_block(block8)
|
b.models.store_block(block8)
|
||||||
|
|
||||||
# create the next block
|
# create the next block
|
||||||
block9 = Block(app_hash="random_app_hash1", height=9, transactions=[tx1.id])._asdict()
|
block9 = Block(app_hash="random_app_hash1", height=9, transactions=[tx1.id])._asdict()
|
||||||
b.store_block(block9)
|
b.models.store_block(block9)
|
||||||
|
|
||||||
# create a pre_commit state which is ahead of the commit state
|
# create a pre_commit state which is ahead of the commit state
|
||||||
pre_commit_state = dict(height=10, transactions=[tx2.id])
|
pre_commit_state = dict(height=10, transactions=[tx2.id])
|
||||||
b.store_pre_commit_state(pre_commit_state)
|
b.models.store_pre_commit_state(pre_commit_state)
|
||||||
|
|
||||||
run_recover(b)
|
b.rollback()
|
||||||
|
|
||||||
assert not query.get_transaction(b.connection, tx2.id)
|
assert not query.get_transaction(b.models.connection, tx2.id)
|
||||||
|
|
||||||
|
|
||||||
# Helper
|
# Helper
|
||||||
@ -307,9 +312,7 @@ class MockResponse:
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.abci
|
@pytest.mark.abci
|
||||||
def test_election_new_upsert_validator_with_tendermint(b, priv_validator_path, user_sk, validators):
|
def test_election_new_upsert_validator_with_tendermint(b, priv_validator_path, user_sk, validators, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_new_upsert_validator
|
|
||||||
|
|
||||||
new_args = Namespace(
|
new_args = Namespace(
|
||||||
action="new",
|
action="new",
|
||||||
election_type="upsert-validator",
|
election_type="upsert-validator",
|
||||||
@ -320,21 +323,19 @@ def test_election_new_upsert_validator_with_tendermint(b, priv_validator_path, u
|
|||||||
config={},
|
config={},
|
||||||
)
|
)
|
||||||
|
|
||||||
election_id = run_election_new_upsert_validator(new_args, b)
|
election_id = run_election_new_upsert_validator(new_args, b, test_abci_rpc)
|
||||||
|
|
||||||
assert b.get_transaction(election_id)
|
assert b.models.get_transaction(election_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_validator_path, user_sk):
|
def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_validator_path, user_sk, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_new_upsert_validator
|
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||||
|
b.models.store_bulk_transactions([transaction])
|
||||||
def mock_write(tx, mode):
|
|
||||||
b.store_bulk_transactions([tx])
|
|
||||||
return (202, "")
|
return (202, "")
|
||||||
|
|
||||||
b.get_validators = mock_get_validators
|
b.models.get_validators = mock_get_validators
|
||||||
b.write_transaction = mock_write
|
test_abci_rpc.write_transaction = mock_write
|
||||||
|
|
||||||
args = Namespace(
|
args = Namespace(
|
||||||
action="new",
|
action="new",
|
||||||
@ -347,41 +348,39 @@ def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_valida
|
|||||||
)
|
)
|
||||||
|
|
||||||
with caplog.at_level(logging.INFO):
|
with caplog.at_level(logging.INFO):
|
||||||
election_id = run_election_new_upsert_validator(args, b)
|
election_id = run_election_new_upsert_validator(args, b, test_abci_rpc)
|
||||||
assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id
|
assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id
|
||||||
assert b.get_transaction(election_id)
|
assert b.models.get_transaction(election_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.abci
|
@pytest.mark.abci
|
||||||
def test_election_new_chain_migration_with_tendermint(b, priv_validator_path, user_sk, validators):
|
def test_election_new_chain_migration_with_tendermint(b, priv_validator_path, user_sk, validators, test_abci_rpc):
|
||||||
new_args = Namespace(action="new", election_type="migration", sk=priv_validator_path, config={})
|
new_args = Namespace(action="new", election_type="migration", sk=priv_validator_path, config={})
|
||||||
|
|
||||||
election_id = run_election_new_chain_migration(new_args, b)
|
election_id = run_election_new_chain_migration(new_args, b, test_abci_rpc)
|
||||||
|
|
||||||
assert b.get_transaction(election_id)
|
assert b.models.get_transaction(election_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_election_new_chain_migration_without_tendermint(caplog, b, priv_validator_path, user_sk):
|
def test_election_new_chain_migration_without_tendermint(caplog, b, priv_validator_path, user_sk, test_abci_rpc):
|
||||||
def mock_write(tx, mode):
|
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([transaction])
|
||||||
return (202, "")
|
return (202, "")
|
||||||
|
|
||||||
b.get_validators = mock_get_validators
|
b.models.get_validators = mock_get_validators
|
||||||
b.write_transaction = mock_write
|
test_abci_rpc.write_transaction = mock_write
|
||||||
|
|
||||||
args = Namespace(action="new", election_type="migration", sk=priv_validator_path, config={})
|
args = Namespace(action="new", election_type="migration", sk=priv_validator_path, config={})
|
||||||
|
|
||||||
with caplog.at_level(logging.INFO):
|
with caplog.at_level(logging.INFO):
|
||||||
election_id = run_election_new_chain_migration(args, b)
|
election_id = run_election_new_chain_migration(args, b, test_abci_rpc)
|
||||||
assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id
|
assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id
|
||||||
assert b.get_transaction(election_id)
|
assert b.models.get_transaction(election_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_election_new_upsert_validator_invalid_election(caplog, b, priv_validator_path, user_sk):
|
def test_election_new_upsert_validator_invalid_election(caplog, b, priv_validator_path, user_sk, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_new_upsert_validator
|
|
||||||
|
|
||||||
args = Namespace(
|
args = Namespace(
|
||||||
action="new",
|
action="new",
|
||||||
election_type="upsert-validator",
|
election_type="upsert-validator",
|
||||||
@ -393,21 +392,20 @@ def test_election_new_upsert_validator_invalid_election(caplog, b, priv_validato
|
|||||||
)
|
)
|
||||||
|
|
||||||
with caplog.at_level(logging.ERROR):
|
with caplog.at_level(logging.ERROR):
|
||||||
assert not run_election_new_upsert_validator(args, b)
|
assert not run_election_new_upsert_validator(args, b, test_abci_rpc)
|
||||||
assert caplog.records[0].msg.__class__ == FileNotFoundError
|
assert caplog.records[0].msg.__class__ == FileNotFoundError
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_election_new_upsert_validator_invalid_power(caplog, b, priv_validator_path, user_sk):
|
def test_election_new_upsert_validator_invalid_power(caplog, b, priv_validator_path, user_sk, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_new_upsert_validator
|
|
||||||
from transactions.common.exceptions import InvalidPowerChange
|
from transactions.common.exceptions import InvalidPowerChange
|
||||||
|
|
||||||
def mock_write(tx, mode):
|
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([transaction])
|
||||||
return (400, "")
|
return (400, "")
|
||||||
|
|
||||||
b.write_transaction = mock_write
|
test_abci_rpc.write_transaction = mock_write
|
||||||
b.get_validators = mock_get_validators
|
b.models.get_validators = mock_get_validators
|
||||||
args = Namespace(
|
args = Namespace(
|
||||||
action="new",
|
action="new",
|
||||||
election_type="upsert-validator",
|
election_type="upsert-validator",
|
||||||
@ -419,14 +417,12 @@ def test_election_new_upsert_validator_invalid_power(caplog, b, priv_validator_p
|
|||||||
)
|
)
|
||||||
|
|
||||||
with caplog.at_level(logging.ERROR):
|
with caplog.at_level(logging.ERROR):
|
||||||
assert not run_election_new_upsert_validator(args, b)
|
assert not run_election_new_upsert_validator(args, b, test_abci_rpc)
|
||||||
assert caplog.records[0].msg.__class__ == InvalidPowerChange
|
assert caplog.records[0].msg.__class__ == InvalidPowerChange
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.abci
|
@pytest.mark.abci
|
||||||
def test_election_approve_with_tendermint(b, priv_validator_path, user_sk, validators):
|
def test_election_approve_with_tendermint(b, priv_validator_path, user_sk, validators, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_new_upsert_validator, run_election_approve
|
|
||||||
|
|
||||||
public_key = "CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg="
|
public_key = "CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg="
|
||||||
new_args = Namespace(
|
new_args = Namespace(
|
||||||
action="new",
|
action="new",
|
||||||
@ -438,65 +434,63 @@ def test_election_approve_with_tendermint(b, priv_validator_path, user_sk, valid
|
|||||||
config={},
|
config={},
|
||||||
)
|
)
|
||||||
|
|
||||||
election_id = run_election_new_upsert_validator(new_args, b)
|
election_id = run_election_new_upsert_validator(new_args, b, test_abci_rpc)
|
||||||
assert election_id
|
assert election_id
|
||||||
|
|
||||||
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
|
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
|
||||||
approve = run_election_approve(args, b)
|
approve = run_election_approve(args, b, test_abci_rpc)
|
||||||
|
|
||||||
assert b.get_transaction(approve)
|
assert b.models.get_transaction(approve)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_election_approve_without_tendermint(caplog, b, priv_validator_path, new_validator, node_key):
|
def test_election_approve_without_tendermint(caplog, b, priv_validator_path, new_validator, node_key, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_approve
|
from planetmint.commands.planetmint import run_election_approve
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
b, election_id = call_election(b, new_validator, node_key)
|
b, election_id = call_election(b, new_validator, node_key, test_abci_rpc)
|
||||||
|
|
||||||
# call run_election_approve with args that point to the election
|
# call run_election_approve with args that point to the election
|
||||||
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
|
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
|
||||||
|
|
||||||
# assert returned id is in the db
|
# assert returned id is in the db
|
||||||
with caplog.at_level(logging.INFO):
|
with caplog.at_level(logging.INFO):
|
||||||
approval_id = run_election_approve(args, b)
|
approval_id = run_election_approve(args, b, test_abci_rpc)
|
||||||
assert caplog.records[0].msg == "[SUCCESS] Your vote has been submitted"
|
assert caplog.records[0].msg == "[SUCCESS] Your vote has been submitted"
|
||||||
assert b.get_transaction(approval_id)
|
assert b.models.get_transaction(approval_id)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_election_approve_failure(caplog, b, priv_validator_path, new_validator, node_key):
|
def test_election_approve_failure(caplog, b, priv_validator_path, new_validator, node_key, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_approve
|
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
b, election_id = call_election(b, new_validator, node_key)
|
b, election_id = call_election(b, new_validator, node_key, test_abci_rpc)
|
||||||
|
|
||||||
def mock_write(tx, mode):
|
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([transaction])
|
||||||
return (400, "")
|
return (400, "")
|
||||||
|
|
||||||
b.write_transaction = mock_write
|
test_abci_rpc.write_transaction = mock_write
|
||||||
|
|
||||||
# call run_upsert_validator_approve with args that point to the election
|
# call run_upsert_validator_approve with args that point to the election
|
||||||
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
|
args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={})
|
||||||
|
|
||||||
with caplog.at_level(logging.ERROR):
|
with caplog.at_level(logging.ERROR):
|
||||||
assert not run_election_approve(args, b)
|
assert not run_election_approve(args, b, test_abci_rpc)
|
||||||
assert caplog.records[0].msg == "Failed to commit vote"
|
assert caplog.records[0].msg == "Failed to commit vote"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_election_approve_called_with_bad_key(caplog, b, bad_validator_path, new_validator, node_key):
|
def test_election_approve_called_with_bad_key(caplog, b, bad_validator_path, new_validator, node_key, test_abci_rpc):
|
||||||
from planetmint.commands.planetmint import run_election_approve
|
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
b, election_id = call_election(b, new_validator, node_key)
|
b, election_id = call_election(b, new_validator, node_key, test_abci_rpc)
|
||||||
|
|
||||||
# call run_upsert_validator_approve with args that point to the election, but a bad signing key
|
# call run_upsert_validator_approve with args that point to the election, but a bad signing key
|
||||||
args = Namespace(action="approve", election_id=election_id, sk=bad_validator_path, config={})
|
args = Namespace(action="approve", election_id=election_id, sk=bad_validator_path, config={})
|
||||||
|
|
||||||
with caplog.at_level(logging.ERROR):
|
with caplog.at_level(logging.ERROR):
|
||||||
assert not run_election_approve(args, b)
|
assert not run_election_approve(args, b, test_abci_rpc)
|
||||||
assert (
|
assert (
|
||||||
caplog.records[0].msg == "The key you provided does not match any of "
|
caplog.records[0].msg == "The key you provided does not match any of "
|
||||||
"the eligible voters in this election."
|
"the eligible voters in this election."
|
||||||
@ -506,7 +500,7 @@ def test_election_approve_called_with_bad_key(caplog, b, bad_validator_path, new
|
|||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_chain_migration_election_show_shows_inconclusive(b):
|
def test_chain_migration_election_show_shows_inconclusive(b):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
public_key = validators[0]["public_key"]
|
public_key = validators[0]["public_key"]
|
||||||
private_key = validators[0]["private_key"]
|
private_key = validators[0]["private_key"]
|
||||||
@ -517,16 +511,16 @@ def test_chain_migration_election_show_shows_inconclusive(b):
|
|||||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||||
|
|
||||||
b.process_block(1, [election])
|
b.process_block(1, [election])
|
||||||
b.store_bulk_transactions([election])
|
b.models.store_bulk_transactions([election])
|
||||||
|
|
||||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||||
|
|
||||||
b.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
|
||||||
b.store_validator_set(2, [v["storage"] for v in validators])
|
b.models.store_validator_set(2, [v["storage"] for v in validators])
|
||||||
|
|
||||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||||
|
|
||||||
b.store_block(Block(height=2, transactions=[], app_hash="")._asdict())
|
b.models.store_block(Block(height=2, transactions=[], app_hash="")._asdict())
|
||||||
# TODO insert yet another block here when upgrading to Tendermint 0.22.4.
|
# TODO insert yet another block here when upgrading to Tendermint 0.22.4.
|
||||||
|
|
||||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=inconclusive"
|
assert run_election_show(Namespace(election_id=election.id), b) == "status=inconclusive"
|
||||||
@ -535,7 +529,7 @@ def test_chain_migration_election_show_shows_inconclusive(b):
|
|||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_chain_migration_election_show_shows_concluded(b):
|
def test_chain_migration_election_show_shows_concluded(b):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
public_key = validators[0]["public_key"]
|
public_key = validators[0]["public_key"]
|
||||||
private_key = validators[0]["private_key"]
|
private_key = validators[0]["private_key"]
|
||||||
@ -545,13 +539,13 @@ def test_chain_migration_election_show_shows_concluded(b):
|
|||||||
|
|
||||||
assert not run_election_show(Namespace(election_id=election.id), b)
|
assert not run_election_show(Namespace(election_id=election.id), b)
|
||||||
|
|
||||||
b.store_bulk_transactions([election])
|
b.models.store_bulk_transactions([election])
|
||||||
b.process_block(1, [election])
|
b.process_block(1, [election])
|
||||||
|
|
||||||
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing"
|
||||||
|
|
||||||
b.store_abci_chain(1, "chain-X")
|
b.models.store_abci_chain(1, "chain-X")
|
||||||
b.store_block(Block(height=1, transactions=[v.id for v in votes], app_hash="last_app_hash")._asdict())
|
b.models.store_block(Block(height=1, transactions=[v.id for v in votes], app_hash="last_app_hash")._asdict())
|
||||||
b.process_block(2, votes)
|
b.process_block(2, votes)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
@ -593,14 +587,14 @@ def mock_get_validators(height):
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def call_election(b, new_validator, node_key):
|
def call_election(b, new_validator, node_key, abci_rpc):
|
||||||
def mock_write(tx, mode):
|
def mock_write(modelist, endpoint, mode_commit, transaction, mode):
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([transaction])
|
||||||
return (202, "")
|
return (202, "")
|
||||||
|
|
||||||
# patch the validator set. We now have one validator with power 10
|
# patch the validator set. We now have one validator with power 10
|
||||||
b.get_validators = mock_get_validators
|
b.models.get_validators = mock_get_validators
|
||||||
b.write_transaction = mock_write
|
abci_rpc.write_transaction = mock_write
|
||||||
|
|
||||||
# our voters is a list of length 1, populated from our mocked validator
|
# our voters is a list of length 1, populated from our mocked validator
|
||||||
voters = b.get_recipients_list()
|
voters = b.get_recipients_list()
|
||||||
@ -610,6 +604,6 @@ def call_election(b, new_validator, node_key):
|
|||||||
|
|
||||||
# patch in an election with a vote issued to the user
|
# patch in an election with a vote issued to the user
|
||||||
election_id = valid_election.id
|
election_id = valid_election.id
|
||||||
b.store_bulk_transactions([valid_election])
|
b.models.store_bulk_transactions([valid_election])
|
||||||
|
|
||||||
return b, election_id
|
return b, election_id
|
||||||
|
@ -20,14 +20,17 @@ from ipld import marshal, multihash
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from logging.config import dictConfig
|
from logging.config import dictConfig
|
||||||
|
|
||||||
from planetmint.backend.connection import Connection
|
from planetmint.backend.connection import Connection
|
||||||
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
||||||
from transactions.common import crypto
|
from transactions.common import crypto
|
||||||
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
||||||
from planetmint.tendermint_utils import key_from_base64
|
from planetmint.abci.tendermint_utils import key_from_base64
|
||||||
from planetmint.backend import schema, query
|
from planetmint.backend import schema, query
|
||||||
from transactions.common.crypto import key_pair_from_ed25519_key, public_key_from_ed25519_key
|
from transactions.common.crypto import key_pair_from_ed25519_key, public_key_from_ed25519_key
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
|
from planetmint.abci.rpc import MODE_LIST
|
||||||
|
from planetmint.model.models import Models
|
||||||
from tests.utils import gen_vote
|
from tests.utils import gen_vote
|
||||||
from planetmint.config import Config
|
from planetmint.config import Config
|
||||||
from transactions.types.elections.validator_election import ValidatorElection # noqa
|
from transactions.types.elections.validator_election import ValidatorElection # noqa
|
||||||
@ -246,10 +249,31 @@ def abci_fixture():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def b():
|
def test_models():
|
||||||
from planetmint import Planetmint
|
from planetmint.model.models import Models
|
||||||
|
|
||||||
return Planetmint()
|
return Models()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_validator():
|
||||||
|
from planetmint.application import Validator
|
||||||
|
|
||||||
|
return Validator()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def test_abci_rpc():
|
||||||
|
from planetmint.abci.rpc import ABCI_RPC
|
||||||
|
|
||||||
|
return ABCI_RPC()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def b():
|
||||||
|
from planetmint.application import Validator
|
||||||
|
|
||||||
|
return Validator()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@ -261,7 +285,7 @@ def eventqueue_fixture():
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def b_mock(b, network_validators):
|
def b_mock(b, network_validators):
|
||||||
b.get_validators = mock_get_validators(network_validators)
|
b.models.get_validators = mock_get_validators(network_validators)
|
||||||
return b
|
return b
|
||||||
|
|
||||||
|
|
||||||
@ -290,8 +314,10 @@ def signed_create_tx(alice, create_tx):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def posted_create_tx(b, signed_create_tx):
|
def posted_create_tx(b, signed_create_tx, test_abci_rpc):
|
||||||
res = b.post_transaction(signed_create_tx, BROADCAST_TX_COMMIT)
|
res = test_abci_rpc.post_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, signed_create_tx, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
assert res.status_code == 200
|
assert res.status_code == 200
|
||||||
return signed_create_tx
|
return signed_create_tx
|
||||||
|
|
||||||
@ -315,7 +341,7 @@ def double_spend_tx(signed_create_tx, carol_pubkey, user_sk):
|
|||||||
|
|
||||||
|
|
||||||
def _get_height(b):
|
def _get_height(b):
|
||||||
maybe_block = b.get_latest_block()
|
maybe_block = b.models.get_latest_block()
|
||||||
return 0 if maybe_block is None else maybe_block["height"]
|
return 0 if maybe_block is None else maybe_block["height"]
|
||||||
|
|
||||||
|
|
||||||
@ -333,8 +359,8 @@ def inputs(user_pk, b, alice):
|
|||||||
]
|
]
|
||||||
tx_ids = [tx.id for tx in transactions]
|
tx_ids = [tx.id for tx in transactions]
|
||||||
block = Block(app_hash="hash" + str(height), height=height, transactions=tx_ids)
|
block = Block(app_hash="hash" + str(height), height=height, transactions=tx_ids)
|
||||||
b.store_block(block._asdict())
|
b.models.store_block(block._asdict())
|
||||||
b.store_bulk_transactions(transactions)
|
b.models.store_bulk_transactions(transactions)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
@ -427,7 +453,7 @@ def abci_server():
|
|||||||
from abci.server import ABCIServer
|
from abci.server import ABCIServer
|
||||||
|
|
||||||
# from tendermint.abci import types_pb2 as types_v0_34_11
|
# from tendermint.abci import types_pb2 as types_v0_34_11
|
||||||
from planetmint.core import App
|
from planetmint.abci.core import App
|
||||||
from planetmint.utils import Process
|
from planetmint.utils import Process
|
||||||
|
|
||||||
app = ABCIServer(app=App())
|
app = ABCIServer(app=App())
|
||||||
@ -630,7 +656,7 @@ def validators(b, node_keys):
|
|||||||
|
|
||||||
height = get_block_height(b)
|
height = get_block_height(b)
|
||||||
|
|
||||||
original_validators = b.get_validators()
|
original_validators = b.models.get_validators()
|
||||||
|
|
||||||
(public_key, private_key) = list(node_keys.items())[0]
|
(public_key, private_key) = list(node_keys.items())[0]
|
||||||
|
|
||||||
@ -644,7 +670,7 @@ def validators(b, node_keys):
|
|||||||
|
|
||||||
validator_update = {"validators": validator_set, "height": height + 1, "election_id": f"setup_at_{timestamp()}"}
|
validator_update = {"validators": validator_set, "height": height + 1, "election_id": f"setup_at_{timestamp()}"}
|
||||||
|
|
||||||
query.store_validator_set(b.connection, validator_update)
|
query.store_validator_set(b.models.connection, validator_update)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
@ -656,12 +682,12 @@ def validators(b, node_keys):
|
|||||||
"election_id": f"teardown_at_{timestamp()}",
|
"election_id": f"teardown_at_{timestamp()}",
|
||||||
}
|
}
|
||||||
|
|
||||||
query.store_validator_set(b.connection, validator_update)
|
query.store_validator_set(b.models.connection, validator_update)
|
||||||
|
|
||||||
|
|
||||||
def get_block_height(b):
|
def get_block_height(b):
|
||||||
if b.get_latest_block():
|
if b.models.get_latest_block():
|
||||||
height = b.get_latest_block()["height"]
|
height = b.models.get_latest_block()["height"]
|
||||||
else:
|
else:
|
||||||
height = 0
|
height = 0
|
||||||
|
|
||||||
@ -693,25 +719,25 @@ def valid_upsert_validator_election_2(b_mock, node_key, new_validator):
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def ongoing_validator_election(b, valid_upsert_validator_election, ed25519_node_keys):
|
def ongoing_validator_election(b, valid_upsert_validator_election, ed25519_node_keys):
|
||||||
validators = b.get_validators(height=1)
|
validators = b.models.get_validators(height=1)
|
||||||
genesis_validators = {"validators": validators, "height": 0}
|
genesis_validators = {"validators": validators, "height": 0}
|
||||||
query.store_validator_set(b.connection, genesis_validators)
|
query.store_validator_set(b.models.connection, genesis_validators)
|
||||||
b.store_bulk_transactions([valid_upsert_validator_election])
|
b.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||||
query.store_election(b.connection, valid_upsert_validator_election.id, 1, is_concluded=False)
|
query.store_election(b.models.connection, valid_upsert_validator_election.id, 1, is_concluded=False)
|
||||||
block_1 = Block(app_hash="hash_1", height=1, transactions=[valid_upsert_validator_election.id])
|
block_1 = Block(app_hash="hash_1", height=1, transactions=[valid_upsert_validator_election.id])
|
||||||
b.store_block(block_1._asdict())
|
b.models.store_block(block_1._asdict())
|
||||||
return valid_upsert_validator_election
|
return valid_upsert_validator_election
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def ongoing_validator_election_2(b, valid_upsert_validator_election_2, ed25519_node_keys):
|
def ongoing_validator_election_2(b, valid_upsert_validator_election_2, ed25519_node_keys):
|
||||||
validators = b.get_validators(height=1)
|
validators = b.models.get_validators(height=1)
|
||||||
genesis_validators = {"validators": validators, "height": 0, "election_id": None}
|
genesis_validators = {"validators": validators, "height": 0, "election_id": None}
|
||||||
query.store_validator_set(b.connection, genesis_validators)
|
query.store_validator_set(b.models.connection, genesis_validators)
|
||||||
|
|
||||||
b.store_bulk_transactions([valid_upsert_validator_election_2])
|
b.models.store_bulk_transactions([valid_upsert_validator_election_2])
|
||||||
block_1 = Block(app_hash="hash_2", height=1, transactions=[valid_upsert_validator_election_2.id])
|
block_1 = Block(app_hash="hash_2", height=1, transactions=[valid_upsert_validator_election_2.id])
|
||||||
b.store_block(block_1._asdict())
|
b.models.store_block(block_1._asdict())
|
||||||
return valid_upsert_validator_election_2
|
return valid_upsert_validator_election_2
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,19 +5,16 @@
|
|||||||
import random
|
import random
|
||||||
import warnings
|
import warnings
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from base58 import b58decode
|
from base58 import b58decode
|
||||||
from ipld import marshal, multihash
|
from ipld import marshal, multihash
|
||||||
|
|
||||||
from transactions.common import crypto
|
from transactions.common import crypto
|
||||||
from transactions.common.output import Output as TransactionOutput
|
|
||||||
from transactions.common.transaction import TransactionLink
|
from transactions.common.transaction import TransactionLink
|
||||||
from transactions.common.transaction import Transaction
|
from transactions.common.transaction import Transaction
|
||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from transactions.types.assets.transfer import Transfer
|
from transactions.types.assets.transfer import Transfer
|
||||||
|
from planetmint.model.fastquery import FastQuery
|
||||||
|
|
||||||
from planetmint.backend.models import Output
|
|
||||||
from planetmint.exceptions import CriticalDoubleSpend
|
from planetmint.exceptions import CriticalDoubleSpend
|
||||||
|
|
||||||
pytestmark = pytest.mark.bdb
|
pytestmark = pytest.mark.bdb
|
||||||
@ -32,7 +29,7 @@ class TestBigchainApi(object):
|
|||||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
|
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
transfer_tx = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], asset_ids=[tx.id])
|
transfer_tx = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], asset_ids=[tx.id])
|
||||||
transfer_tx = transfer_tx.sign([alice.private_key])
|
transfer_tx = transfer_tx.sign([alice.private_key])
|
||||||
@ -42,13 +39,13 @@ class TestBigchainApi(object):
|
|||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(transfer_tx2, [transfer_tx])
|
b.validate_transaction(transfer_tx2, [transfer_tx])
|
||||||
|
|
||||||
b.store_bulk_transactions([transfer_tx])
|
b.models.store_bulk_transactions([transfer_tx])
|
||||||
|
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(transfer_tx2)
|
b.validate_transaction(transfer_tx2)
|
||||||
|
|
||||||
with pytest.raises(CriticalDoubleSpend):
|
with pytest.raises(CriticalDoubleSpend):
|
||||||
b.store_bulk_transactions([transfer_tx2])
|
b.models.store_bulk_transactions([transfer_tx2])
|
||||||
|
|
||||||
def test_double_inclusion(self, b, alice):
|
def test_double_inclusion(self, b, alice):
|
||||||
from tarantool.error import DatabaseError
|
from tarantool.error import DatabaseError
|
||||||
@ -59,13 +56,13 @@ class TestBigchainApi(object):
|
|||||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
|
tx = Create.generate([alice.public_key], [([alice.public_key], 1)])
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
if isinstance(b.connection, TarantoolDBConnection):
|
if isinstance(b.models.connection, TarantoolDBConnection):
|
||||||
with pytest.raises(CriticalDoubleSpend):
|
with pytest.raises(CriticalDoubleSpend):
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
else:
|
else:
|
||||||
with pytest.raises(OperationError):
|
with pytest.raises(OperationError):
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
@pytest.mark.usefixtures("inputs")
|
@pytest.mark.usefixtures("inputs")
|
||||||
def test_non_create_input_not_found(self, b, user_pk):
|
def test_non_create_input_not_found(self, b, user_pk):
|
||||||
@ -85,9 +82,9 @@ class TestBigchainApi(object):
|
|||||||
asset1 = {"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}
|
asset1 = {"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}
|
||||||
|
|
||||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=[asset1]).sign([alice.private_key])
|
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=[asset1]).sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
tx_from_db = b.get_transaction(tx.id)
|
tx_from_db = b.models.get_transaction(tx.id)
|
||||||
|
|
||||||
before = tx.to_dict()
|
before = tx.to_dict()
|
||||||
after = tx_from_db.to_dict()
|
after = tx_from_db.to_dict()
|
||||||
@ -112,8 +109,8 @@ class TestTransactionValidation(object):
|
|||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
from transactions.common.exceptions import InvalidSignature
|
from transactions.common.exceptions import InvalidSignature
|
||||||
|
|
||||||
input_tx = b.fastquery.get_outputs_by_public_key(user_pk).pop()
|
input_tx = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||||
input_transaction = b.get_transaction(input_tx.txid)
|
input_transaction = b.models.get_transaction(input_tx.txid)
|
||||||
sk, pk = generate_key_pair()
|
sk, pk = generate_key_pair()
|
||||||
tx = Create.generate([pk], [([user_pk], 1)])
|
tx = Create.generate([pk], [([user_pk], 1)])
|
||||||
tx.operation = "TRANSFER"
|
tx.operation = "TRANSFER"
|
||||||
@ -127,7 +124,7 @@ class TestTransactionValidation(object):
|
|||||||
def test_non_create_double_spend(self, b, signed_create_tx, signed_transfer_tx, double_spend_tx):
|
def test_non_create_double_spend(self, b, signed_create_tx, signed_transfer_tx, double_spend_tx):
|
||||||
from transactions.common.exceptions import DoubleSpend
|
from transactions.common.exceptions import DoubleSpend
|
||||||
|
|
||||||
b.store_bulk_transactions([signed_create_tx, signed_transfer_tx])
|
b.models.store_bulk_transactions([signed_create_tx, signed_transfer_tx])
|
||||||
|
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(double_spend_tx)
|
b.validate_transaction(double_spend_tx)
|
||||||
@ -136,8 +133,8 @@ class TestTransactionValidation(object):
|
|||||||
class TestMultipleInputs(object):
|
class TestMultipleInputs(object):
|
||||||
def test_transfer_single_owner_single_input(self, b, inputs, user_pk, user_sk):
|
def test_transfer_single_owner_single_input(self, b, inputs, user_pk, user_sk):
|
||||||
user2_sk, user2_pk = crypto.generate_key_pair()
|
user2_sk, user2_pk = crypto.generate_key_pair()
|
||||||
tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop()
|
tx_link = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||||
input_tx = b.get_transaction(tx_link.txid)
|
input_tx = b.models.get_transaction(tx_link.txid)
|
||||||
tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
|
tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
|
||||||
|
|
||||||
tx = Transfer.generate(tx_converted.to_inputs(), [([user2_pk], 1)], asset_ids=[input_tx.id])
|
tx = Transfer.generate(tx_converted.to_inputs(), [([user2_pk], 1)], asset_ids=[input_tx.id])
|
||||||
@ -151,9 +148,9 @@ class TestMultipleInputs(object):
|
|||||||
def test_single_owner_before_multiple_owners_after_single_input(self, b, user_sk, user_pk, inputs):
|
def test_single_owner_before_multiple_owners_after_single_input(self, b, user_sk, user_pk, inputs):
|
||||||
user2_sk, user2_pk = crypto.generate_key_pair()
|
user2_sk, user2_pk = crypto.generate_key_pair()
|
||||||
user3_sk, user3_pk = crypto.generate_key_pair()
|
user3_sk, user3_pk = crypto.generate_key_pair()
|
||||||
tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop()
|
tx_link = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||||
|
|
||||||
input_tx = b.get_transaction(tx_link.txid)
|
input_tx = b.models.get_transaction(tx_link.txid)
|
||||||
tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
|
tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
|
||||||
|
|
||||||
tx = Transfer.generate(tx_converted.to_inputs(), [([user2_pk, user3_pk], 1)], asset_ids=[input_tx.id])
|
tx = Transfer.generate(tx_converted.to_inputs(), [([user2_pk, user3_pk], 1)], asset_ids=[input_tx.id])
|
||||||
@ -170,10 +167,10 @@ class TestMultipleInputs(object):
|
|||||||
|
|
||||||
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
owned_input = b.fastquery.get_outputs_by_public_key(user_pk).pop()
|
owned_input = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||||
input_tx = b.get_transaction(owned_input.txid)
|
input_tx = b.models.get_transaction(owned_input.txid)
|
||||||
input_tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
|
input_tx_converted = Transaction.from_dict(input_tx.to_dict(), True)
|
||||||
|
|
||||||
transfer_tx = Transfer.generate(input_tx_converted.to_inputs(), [([user3_pk], 1)], asset_ids=[input_tx.id])
|
transfer_tx = Transfer.generate(input_tx_converted.to_inputs(), [([user3_pk], 1)], asset_ids=[input_tx.id])
|
||||||
@ -192,11 +189,11 @@ class TestMultipleInputs(object):
|
|||||||
|
|
||||||
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
# get input
|
# get input
|
||||||
tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop()
|
tx_link = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||||
tx_input = b.get_transaction(tx_link.txid)
|
tx_input = b.models.get_transaction(tx_link.txid)
|
||||||
input_tx_converted = Transaction.from_dict(tx_input.to_dict(), True)
|
input_tx_converted = Transaction.from_dict(tx_input.to_dict(), True)
|
||||||
|
|
||||||
tx = Transfer.generate(input_tx_converted.to_inputs(), [([user3_pk, user4_pk], 1)], asset_ids=[tx_input.id])
|
tx = Transfer.generate(input_tx_converted.to_inputs(), [([user3_pk, user4_pk], 1)], asset_ids=[tx_input.id])
|
||||||
@ -211,19 +208,19 @@ class TestMultipleInputs(object):
|
|||||||
|
|
||||||
tx = Create.generate([alice.public_key], [([user_pk], 1)])
|
tx = Create.generate([alice.public_key], [([user_pk], 1)])
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
|
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||||
assert owned_inputs_user1 == [TransactionLink(tx.id, 0)]
|
assert owned_inputs_user1 == [TransactionLink(tx.id, 0)]
|
||||||
assert owned_inputs_user2 == []
|
assert owned_inputs_user2 == []
|
||||||
|
|
||||||
tx_transfer = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
|
tx_transfer = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
|
||||||
tx_transfer = tx_transfer.sign([user_sk])
|
tx_transfer = tx_transfer.sign([user_sk])
|
||||||
b.store_bulk_transactions([tx_transfer])
|
b.models.store_bulk_transactions([tx_transfer])
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
|
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||||
|
|
||||||
assert owned_inputs_user1 == [TransactionLink(tx.id, 0)]
|
assert owned_inputs_user1 == [TransactionLink(tx.id, 0)]
|
||||||
assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0)]
|
assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0)]
|
||||||
@ -234,11 +231,11 @@ class TestMultipleInputs(object):
|
|||||||
# create divisible asset
|
# create divisible asset
|
||||||
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1)])
|
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1)])
|
||||||
tx_create_signed = tx_create.sign([alice.private_key])
|
tx_create_signed = tx_create.sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
# get input
|
# get input
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
|
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||||
|
|
||||||
expected_owned_inputs_user1 = [TransactionLink(tx_create.id, 0), TransactionLink(tx_create.id, 1)]
|
expected_owned_inputs_user1 = [TransactionLink(tx_create.id, 0), TransactionLink(tx_create.id, 1)]
|
||||||
assert owned_inputs_user1 == expected_owned_inputs_user1
|
assert owned_inputs_user1 == expected_owned_inputs_user1
|
||||||
@ -249,10 +246,10 @@ class TestMultipleInputs(object):
|
|||||||
tx_create.to_inputs(), [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
|
tx_create.to_inputs(), [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
|
||||||
)
|
)
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
|
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||||
assert owned_inputs_user1 == expected_owned_inputs_user1
|
assert owned_inputs_user1 == expected_owned_inputs_user1
|
||||||
assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0), TransactionLink(tx_transfer.id, 1)]
|
assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0), TransactionLink(tx_transfer.id, 1)]
|
||||||
|
|
||||||
@ -263,10 +260,10 @@ class TestMultipleInputs(object):
|
|||||||
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)])
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
expected_owned_inputs_user1 = [TransactionLink(tx.id, 0)]
|
expected_owned_inputs_user1 = [TransactionLink(tx.id, 0)]
|
||||||
|
|
||||||
assert owned_inputs_user1 == owned_inputs_user2
|
assert owned_inputs_user1 == owned_inputs_user2
|
||||||
@ -274,11 +271,11 @@ class TestMultipleInputs(object):
|
|||||||
|
|
||||||
tx = Transfer.generate(tx.to_inputs(), [([user3_pk], 1)], asset_ids=[tx.id])
|
tx = Transfer.generate(tx.to_inputs(), [([user3_pk], 1)], asset_ids=[tx.id])
|
||||||
tx = tx.sign([user_sk, user2_sk])
|
tx = tx.sign([user_sk, user2_sk])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk)
|
owned_inputs_user2 = b.models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||||
spent_user1 = b.get_spent(tx.id, 0)
|
spent_user1 = b.models.get_spent(tx.id, 0)
|
||||||
|
|
||||||
assert owned_inputs_user1 == owned_inputs_user2
|
assert owned_inputs_user1 == owned_inputs_user2
|
||||||
assert not spent_user1
|
assert not spent_user1
|
||||||
@ -288,21 +285,21 @@ class TestMultipleInputs(object):
|
|||||||
|
|
||||||
tx = Create.generate([alice.public_key], [([user_pk], 1)])
|
tx = Create.generate([alice.public_key], [([user_pk], 1)])
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk).pop()
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk).pop()
|
||||||
|
|
||||||
# check spents
|
# check spents
|
||||||
input_txid = owned_inputs_user1.txid
|
input_txid = owned_inputs_user1.txid
|
||||||
spent_inputs_user1 = b.get_spent(input_txid, 0)
|
spent_inputs_user1 = b.models.get_spent(input_txid, 0)
|
||||||
assert spent_inputs_user1 is None
|
assert spent_inputs_user1 is None
|
||||||
|
|
||||||
# create a transaction and send it
|
# create a transaction and send it
|
||||||
tx = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
|
tx = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_ids=[tx.id])
|
||||||
tx = tx.sign([user_sk])
|
tx = tx.sign([user_sk])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
spent_inputs_user1 = b.get_spent(input_txid, 0)
|
spent_inputs_user1 = b.models.get_spent(input_txid, 0)
|
||||||
assert spent_inputs_user1 == tx.to_dict()
|
assert spent_inputs_user1 == tx.to_dict()
|
||||||
|
|
||||||
def test_get_spent_single_tx_multiple_outputs(self, b, user_sk, user_pk, alice):
|
def test_get_spent_single_tx_multiple_outputs(self, b, user_sk, user_pk, alice):
|
||||||
@ -312,29 +309,29 @@ class TestMultipleInputs(object):
|
|||||||
# create a divisible asset with 3 outputs
|
# create a divisible asset with 3 outputs
|
||||||
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1), ([user_pk], 1)])
|
tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1), ([user_pk], 1)])
|
||||||
tx_create_signed = tx_create.sign([alice.private_key])
|
tx_create_signed = tx_create.sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
|
|
||||||
# check spents
|
# check spents
|
||||||
for input_tx in owned_inputs_user1:
|
for input_tx in owned_inputs_user1:
|
||||||
assert b.get_spent(input_tx.txid, input_tx.output) is None
|
assert b.models.get_spent(input_tx.txid, input_tx.output) is None
|
||||||
|
|
||||||
# transfer the first 2 inputs
|
# transfer the first 2 inputs
|
||||||
tx_transfer = Transfer.generate(
|
tx_transfer = Transfer.generate(
|
||||||
tx_create.to_inputs()[:2], [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
|
tx_create.to_inputs()[:2], [([user2_pk], 1), ([user2_pk], 1)], asset_ids=[tx_create.id]
|
||||||
)
|
)
|
||||||
tx_transfer_signed = tx_transfer.sign([user_sk])
|
tx_transfer_signed = tx_transfer.sign([user_sk])
|
||||||
b.store_bulk_transactions([tx_transfer_signed])
|
b.models.store_bulk_transactions([tx_transfer_signed])
|
||||||
|
|
||||||
# check that used inputs are marked as spent
|
# check that used inputs are marked as spent
|
||||||
for ffill in tx_create.to_inputs()[:2]:
|
for ffill in tx_create.to_inputs()[:2]:
|
||||||
spent_tx = b.get_spent(ffill.fulfills.txid, ffill.fulfills.output)
|
spent_tx = b.models.get_spent(ffill.fulfills.txid, ffill.fulfills.output)
|
||||||
assert spent_tx == tx_transfer_signed.to_dict()
|
assert spent_tx == tx_transfer_signed.to_dict()
|
||||||
|
|
||||||
# check if remaining transaction that was unspent is also perceived
|
# check if remaining transaction that was unspent is also perceived
|
||||||
# spendable by Planetmint
|
# spendable by Planetmint
|
||||||
assert b.get_spent(tx_create.to_inputs()[2].fulfills.txid, 2) is None
|
assert b.models.get_spent(tx_create.to_inputs()[2].fulfills.txid, 2) is None
|
||||||
|
|
||||||
def test_get_spent_multiple_owners(self, b, user_sk, user_pk, alice):
|
def test_get_spent_multiple_owners(self, b, user_sk, user_pk, alice):
|
||||||
user2_sk, user2_pk = crypto.generate_key_pair()
|
user2_sk, user2_pk = crypto.generate_key_pair()
|
||||||
@ -347,71 +344,71 @@ class TestMultipleInputs(object):
|
|||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
transactions.append(tx)
|
transactions.append(tx)
|
||||||
|
|
||||||
b.store_bulk_transactions(transactions)
|
b.models.store_bulk_transactions(transactions)
|
||||||
|
|
||||||
owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk)
|
owned_inputs_user1 = b.models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
# check spents
|
# check spents
|
||||||
for input_tx in owned_inputs_user1:
|
for input_tx in owned_inputs_user1:
|
||||||
assert b.get_spent(input_tx.txid, input_tx.output) is None
|
assert b.models.get_spent(input_tx.txid, input_tx.output) is None
|
||||||
|
|
||||||
# create a transaction
|
# create a transaction
|
||||||
tx = Transfer.generate(transactions[0].to_inputs(), [([user3_pk], 1)], asset_ids=[transactions[0].id])
|
tx = Transfer.generate(transactions[0].to_inputs(), [([user3_pk], 1)], asset_ids=[transactions[0].id])
|
||||||
tx = tx.sign([user_sk, user2_sk])
|
tx = tx.sign([user_sk, user2_sk])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
# check that used inputs are marked as spent
|
# check that used inputs are marked as spent
|
||||||
assert b.get_spent(transactions[0].id, 0) == tx.to_dict()
|
assert b.models.get_spent(transactions[0].id, 0) == tx.to_dict()
|
||||||
# check that the other remain marked as unspent
|
# check that the other remain marked as unspent
|
||||||
for unspent in transactions[1:]:
|
for unspent in transactions[1:]:
|
||||||
assert b.get_spent(unspent.id, 0) is None
|
assert b.models.get_spent(unspent.id, 0) is None
|
||||||
|
|
||||||
|
|
||||||
def test_get_outputs_filtered_only_unspent():
|
def test_get_outputs_filtered_only_unspent(b):
|
||||||
from transactions.common.transaction import TransactionLink
|
from transactions.common.transaction import TransactionLink
|
||||||
|
|
||||||
from planetmint.lib import Planetmint
|
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
|
||||||
|
|
||||||
go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key"
|
|
||||||
with patch(go) as get_outputs:
|
with patch(go) as get_outputs:
|
||||||
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
||||||
fs = "planetmint.fastquery.FastQuery.filter_spent_outputs"
|
fs = "planetmint.model.fastquery.FastQuery.filter_spent_outputs"
|
||||||
with patch(fs) as filter_spent:
|
with patch(fs) as filter_spent:
|
||||||
filter_spent.return_value = [TransactionLink("b", 2)]
|
filter_spent.return_value = [TransactionLink("b", 2)]
|
||||||
out = Planetmint().get_outputs_filtered("abc", spent=False)
|
out = b.models.get_outputs_filtered("abc", spent=False)
|
||||||
get_outputs.assert_called_once_with("abc")
|
get_outputs.assert_called_once_with("abc")
|
||||||
assert out == [TransactionLink("b", 2)]
|
assert out == [TransactionLink("b", 2)]
|
||||||
|
|
||||||
|
|
||||||
def test_get_outputs_filtered_only_spent():
|
def test_get_outputs_filtered_only_spent(b):
|
||||||
from transactions.common.transaction import TransactionLink
|
from transactions.common.transaction import TransactionLink
|
||||||
|
|
||||||
from planetmint.lib import Planetmint
|
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
|
||||||
|
|
||||||
go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key"
|
|
||||||
with patch(go) as get_outputs:
|
with patch(go) as get_outputs:
|
||||||
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
||||||
fs = "planetmint.fastquery.FastQuery.filter_unspent_outputs"
|
fs = "planetmint.model.fastquery.FastQuery.filter_unspent_outputs"
|
||||||
with patch(fs) as filter_spent:
|
with patch(fs) as filter_spent:
|
||||||
filter_spent.return_value = [TransactionLink("b", 2)]
|
filter_spent.return_value = [TransactionLink("b", 2)]
|
||||||
out = Planetmint().get_outputs_filtered("abc", spent=True)
|
out = b.models.get_outputs_filtered("abc", spent=True)
|
||||||
get_outputs.assert_called_once_with("abc")
|
get_outputs.assert_called_once_with("abc")
|
||||||
assert out == [TransactionLink("b", 2)]
|
assert out == [TransactionLink("b", 2)]
|
||||||
|
|
||||||
|
|
||||||
@patch("planetmint.fastquery.FastQuery.filter_unspent_outputs")
|
# @patch("planetmint.model.fastquery.FastQuery.filter_unspent_outputs")
|
||||||
@patch("planetmint.fastquery.FastQuery.filter_spent_outputs")
|
# @patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
|
||||||
def test_get_outputs_filtered(filter_spent, filter_unspent):
|
def test_get_outputs_filtered(
|
||||||
|
b,
|
||||||
|
mocker,
|
||||||
|
):
|
||||||
from transactions.common.transaction import TransactionLink
|
from transactions.common.transaction import TransactionLink
|
||||||
|
|
||||||
from planetmint.lib import Planetmint
|
mock_filter_spent_outputs = mocker.patch("planetmint.model.fastquery.FastQuery.filter_spent_outputs")
|
||||||
|
mock_filter_unspent_outputs = mocker.patch("planetmint.model.fastquery.FastQuery.filter_unspent_outputs")
|
||||||
|
|
||||||
go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key"
|
go = "planetmint.model.fastquery.FastQuery.get_outputs_by_public_key"
|
||||||
with patch(go) as get_outputs:
|
with patch(go) as get_outputs:
|
||||||
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)]
|
||||||
out = Planetmint().get_outputs_filtered("abc")
|
out = b.models.get_outputs_filtered("abc")
|
||||||
get_outputs.assert_called_once_with("abc")
|
get_outputs.assert_called_once_with("abc")
|
||||||
filter_spent.assert_not_called()
|
mock_filter_spent_outputs.assert_not_called()
|
||||||
filter_unspent.assert_not_called()
|
mock_filter_unspent_outputs.assert_not_called()
|
||||||
assert out == get_outputs.return_value
|
assert out == get_outputs.return_value
|
||||||
|
|
||||||
|
|
||||||
@ -425,7 +422,7 @@ def test_cant_spend_same_input_twice_in_tx(b, alice):
|
|||||||
tx_create = Create.generate([alice.public_key], [([alice.public_key], 100)])
|
tx_create = Create.generate([alice.public_key], [([alice.public_key], 100)])
|
||||||
tx_create_signed = tx_create.sign([alice.private_key])
|
tx_create_signed = tx_create.sign([alice.private_key])
|
||||||
assert b.validate_transaction(tx_create_signed) == tx_create_signed
|
assert b.validate_transaction(tx_create_signed) == tx_create_signed
|
||||||
b.store_bulk_transactions([tx_create_signed])
|
b.models.store_bulk_transactions([tx_create_signed])
|
||||||
|
|
||||||
# Create a transfer transaction with duplicated fulfillments
|
# Create a transfer transaction with duplicated fulfillments
|
||||||
dup_inputs = tx_create.to_inputs() + tx_create.to_inputs()
|
dup_inputs = tx_create.to_inputs() + tx_create.to_inputs()
|
||||||
@ -450,6 +447,6 @@ def test_transaction_unicode(b, alice):
|
|||||||
)
|
)
|
||||||
|
|
||||||
tx_1 = copy.deepcopy(tx)
|
tx_1 = copy.deepcopy(tx)
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
assert beer_json["data"] in serialize(tx_1.to_dict())
|
assert beer_json["data"] in serialize(tx_1.to_dict())
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from tests.utils import generate_election, generate_validators
|
from tests.utils import generate_election, generate_validators
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
from transactions.types.elections.election import Election
|
from transactions.types.elections.election import Election
|
||||||
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
from transactions.types.elections.chain_migration_election import ChainMigrationElection
|
||||||
from transactions.types.elections.validator_election import ValidatorElection
|
from transactions.types.elections.validator_election import ValidatorElection
|
||||||
@ -10,7 +10,7 @@ from transactions.types.elections.validator_election import ValidatorElection
|
|||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_process_block_concludes_all_elections(b):
|
def test_process_block_concludes_all_elections(b):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
new_validator = generate_validators([1])[0]
|
new_validator = generate_validators([1])[0]
|
||||||
|
|
||||||
@ -29,18 +29,18 @@ def test_process_block_concludes_all_elections(b):
|
|||||||
txs += [election]
|
txs += [election]
|
||||||
total_votes += votes
|
total_votes += votes
|
||||||
|
|
||||||
b.store_abci_chain(1, "chain-X")
|
b.models.store_abci_chain(1, "chain-X")
|
||||||
b.process_block(1, txs)
|
b.process_block(1, txs)
|
||||||
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||||
b.store_bulk_transactions(txs)
|
b.models.store_bulk_transactions(txs)
|
||||||
|
|
||||||
b.process_block(2, total_votes)
|
b.process_block(2, total_votes)
|
||||||
|
|
||||||
validators = b.get_validators()
|
validators = b.models.get_validators()
|
||||||
assert len(validators) == 5
|
assert len(validators) == 5
|
||||||
assert new_validator["storage"] in validators
|
assert new_validator["storage"] in validators
|
||||||
|
|
||||||
chain = b.get_latest_abci_chain()
|
chain = b.models.get_latest_abci_chain()
|
||||||
assert chain
|
assert chain
|
||||||
assert chain == {
|
assert chain == {
|
||||||
"height": 2,
|
"height": 2,
|
||||||
@ -49,13 +49,13 @@ def test_process_block_concludes_all_elections(b):
|
|||||||
}
|
}
|
||||||
|
|
||||||
for tx in txs:
|
for tx in txs:
|
||||||
assert b.get_election(tx.id)["is_concluded"]
|
assert b.models.get_election(tx.id)["is_concluded"]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_process_block_approves_only_one_validator_update(b):
|
def test_process_block_approves_only_one_validator_update(b):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
new_validator = generate_validators([1])[0]
|
new_validator = generate_validators([1])[0]
|
||||||
|
|
||||||
@ -78,24 +78,24 @@ def test_process_block_approves_only_one_validator_update(b):
|
|||||||
total_votes += votes
|
total_votes += votes
|
||||||
|
|
||||||
b.process_block(1, txs)
|
b.process_block(1, txs)
|
||||||
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||||
b.store_bulk_transactions(txs)
|
b.models.store_bulk_transactions(txs)
|
||||||
|
|
||||||
b.process_block(2, total_votes)
|
b.process_block(2, total_votes)
|
||||||
|
|
||||||
validators = b.get_validators()
|
validators = b.models.get_validators()
|
||||||
assert len(validators) == 5
|
assert len(validators) == 5
|
||||||
assert new_validator["storage"] in validators
|
assert new_validator["storage"] in validators
|
||||||
assert another_validator["storage"] not in validators
|
assert another_validator["storage"] not in validators
|
||||||
|
|
||||||
assert b.get_election(txs[0].id)["is_concluded"]
|
assert b.models.get_election(txs[0].id)["is_concluded"]
|
||||||
assert not b.get_election(txs[1].id)["is_concluded"]
|
assert not b.models.get_election(txs[1].id)["is_concluded"]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_process_block_approves_after_pending_validator_update(b):
|
def test_process_block_approves_after_pending_validator_update(b):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
new_validator = generate_validators([1])[0]
|
new_validator = generate_validators([1])[0]
|
||||||
|
|
||||||
@ -122,29 +122,33 @@ def test_process_block_approves_after_pending_validator_update(b):
|
|||||||
txs += [election]
|
txs += [election]
|
||||||
total_votes += votes
|
total_votes += votes
|
||||||
|
|
||||||
b.store_abci_chain(1, "chain-X")
|
b.models.store_abci_chain(1, "chain-X")
|
||||||
b.process_block(1, txs)
|
b.process_block(1, txs)
|
||||||
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||||
b.store_bulk_transactions(txs)
|
b.models.store_bulk_transactions(txs)
|
||||||
|
|
||||||
b.process_block(2, total_votes)
|
b.process_block(2, total_votes)
|
||||||
|
|
||||||
validators = b.get_validators()
|
validators = b.models.get_validators()
|
||||||
assert len(validators) == 5
|
assert len(validators) == 5
|
||||||
assert new_validator["storage"] in validators
|
assert new_validator["storage"] in validators
|
||||||
assert another_validator["storage"] not in validators
|
assert another_validator["storage"] not in validators
|
||||||
|
|
||||||
assert b.get_election(txs[0].id)["is_concluded"]
|
assert b.models.get_election(txs[0].id)["is_concluded"]
|
||||||
assert not b.get_election(txs[1].id)["is_concluded"]
|
assert not b.models.get_election(txs[1].id)["is_concluded"]
|
||||||
assert b.get_election(txs[2].id)["is_concluded"]
|
assert b.models.get_election(txs[2].id)["is_concluded"]
|
||||||
|
|
||||||
assert b.get_latest_abci_chain() == {"height": 2, "chain_id": "chain-X-migrated-at-height-1", "is_synced": False}
|
assert b.models.get_latest_abci_chain() == {
|
||||||
|
"height": 2,
|
||||||
|
"chain_id": "chain-X-migrated-at-height-1",
|
||||||
|
"is_synced": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_process_block_does_not_approve_after_validator_update(b):
|
def test_process_block_does_not_approve_after_validator_update(b):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
new_validator = generate_validators([1])[0]
|
new_validator = generate_validators([1])[0]
|
||||||
|
|
||||||
@ -158,9 +162,9 @@ def test_process_block_does_not_approve_after_validator_update(b):
|
|||||||
txs = [election]
|
txs = [election]
|
||||||
total_votes = votes
|
total_votes = votes
|
||||||
|
|
||||||
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||||
b.process_block(1, txs)
|
b.process_block(1, txs)
|
||||||
b.store_bulk_transactions(txs)
|
b.models.store_bulk_transactions(txs)
|
||||||
|
|
||||||
second_election, second_votes = generate_election(
|
second_election, second_votes = generate_election(
|
||||||
b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys
|
b, ChainMigrationElection, public_key, private_key, [{"data": {}}], voter_keys
|
||||||
@ -168,19 +172,21 @@ def test_process_block_does_not_approve_after_validator_update(b):
|
|||||||
|
|
||||||
b.process_block(2, total_votes + [second_election])
|
b.process_block(2, total_votes + [second_election])
|
||||||
|
|
||||||
b.store_block(Block(height=2, transactions=[v.id for v in total_votes + [second_election]], app_hash="")._asdict())
|
b.models.store_block(
|
||||||
|
Block(height=2, transactions=[v.id for v in total_votes + [second_election]], app_hash="")._asdict()
|
||||||
|
)
|
||||||
|
|
||||||
b.store_abci_chain(1, "chain-X")
|
b.models.store_abci_chain(1, "chain-X")
|
||||||
b.process_block(3, second_votes)
|
b.process_block(3, second_votes)
|
||||||
|
|
||||||
assert not b.get_election(second_election.id)["is_concluded"]
|
assert not b.models.get_election(second_election.id)["is_concluded"]
|
||||||
assert b.get_latest_abci_chain() == {"height": 1, "chain_id": "chain-X", "is_synced": True}
|
assert b.models.get_latest_abci_chain() == {"height": 1, "chain_id": "chain-X", "is_synced": True}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_process_block_applies_only_one_migration(b):
|
def test_process_block_applies_only_one_migration(b):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
public_key = validators[0]["public_key"]
|
public_key = validators[0]["public_key"]
|
||||||
private_key = validators[0]["private_key"]
|
private_key = validators[0]["private_key"]
|
||||||
@ -195,13 +201,13 @@ def test_process_block_applies_only_one_migration(b):
|
|||||||
txs += [election]
|
txs += [election]
|
||||||
total_votes += votes
|
total_votes += votes
|
||||||
|
|
||||||
b.store_abci_chain(1, "chain-X")
|
b.models.store_abci_chain(1, "chain-X")
|
||||||
b.process_block(1, txs)
|
b.process_block(1, txs)
|
||||||
b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict())
|
||||||
b.store_bulk_transactions(txs)
|
b.models.store_bulk_transactions(txs)
|
||||||
|
|
||||||
b.process_block(1, total_votes)
|
b.process_block(1, total_votes)
|
||||||
chain = b.get_latest_abci_chain()
|
chain = b.models.get_latest_abci_chain()
|
||||||
assert chain
|
assert chain
|
||||||
assert chain == {
|
assert chain == {
|
||||||
"height": 2,
|
"height": 2,
|
||||||
@ -209,8 +215,8 @@ def test_process_block_applies_only_one_migration(b):
|
|||||||
"chain_id": "chain-X-migrated-at-height-1",
|
"chain_id": "chain-X-migrated-at-height-1",
|
||||||
}
|
}
|
||||||
|
|
||||||
assert b.get_election(txs[0].id)["is_concluded"]
|
assert b.models.get_election(txs[0].id)["is_concluded"]
|
||||||
assert not b.get_election(txs[1].id)["is_concluded"]
|
assert not b.models.get_election(txs[1].id)["is_concluded"]
|
||||||
|
|
||||||
|
|
||||||
def test_process_block_gracefully_handles_empty_block(b):
|
def test_process_block_gracefully_handles_empty_block(b):
|
||||||
|
@ -15,14 +15,13 @@ from transactions import ValidatorElection, ChainMigrationElection
|
|||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from transactions.types.assets.transfer import Transfer
|
from transactions.types.assets.transfer import Transfer
|
||||||
from planetmint import App
|
from planetmint.abci.core import App
|
||||||
from planetmint.backend import query
|
from planetmint.backend import query
|
||||||
from planetmint.core import OkCode, CodeTypeError, rollback
|
from planetmint.abci.core import OkCode, CodeTypeError
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
from planetmint.tendermint_utils import new_validator_set
|
from planetmint.abci.tendermint_utils import new_validator_set
|
||||||
from planetmint.tendermint_utils import public_key_to_base64
|
from planetmint.abci.tendermint_utils import public_key_to_base64
|
||||||
from planetmint.version import __tm_supported_versions__
|
from planetmint.version import __tm_supported_versions__
|
||||||
from planetmint.backend.tarantool.const import TARANT_TABLE_GOVERNANCE
|
|
||||||
from tests.utils import generate_election, generate_validators
|
from tests.utils import generate_election, generate_validators
|
||||||
|
|
||||||
pytestmark = pytest.mark.bdb
|
pytestmark = pytest.mark.bdb
|
||||||
@ -52,9 +51,9 @@ def test_init_chain_successfully_registers_chain(b):
|
|||||||
request = generate_init_chain_request("chain-XYZ")
|
request = generate_init_chain_request("chain-XYZ")
|
||||||
res = App(b).init_chain(request)
|
res = App(b).init_chain(request)
|
||||||
assert res == types.ResponseInitChain()
|
assert res == types.ResponseInitChain()
|
||||||
chain = query.get_latest_abci_chain(b.connection)
|
chain = query.get_latest_abci_chain(b.models.connection)
|
||||||
assert chain == {"height": 0, "chain_id": "chain-XYZ", "is_synced": True}
|
assert chain == {"height": 0, "chain_id": "chain-XYZ", "is_synced": True}
|
||||||
assert query.get_latest_block(b.connection) == {
|
assert query.get_latest_block(b.models.connection) == {
|
||||||
"height": 0,
|
"height": 0,
|
||||||
"app_hash": "",
|
"app_hash": "",
|
||||||
"transaction_ids": [],
|
"transaction_ids": [],
|
||||||
@ -67,7 +66,7 @@ def test_init_chain_ignores_invalid_init_chain_requests(b):
|
|||||||
res = App(b).init_chain(request)
|
res = App(b).init_chain(request)
|
||||||
assert res == types.ResponseInitChain()
|
assert res == types.ResponseInitChain()
|
||||||
|
|
||||||
validator_set = query.get_validator_set(b.connection)
|
validator_set = query.get_validator_set(b.models.connection)
|
||||||
|
|
||||||
invalid_requests = [
|
invalid_requests = [
|
||||||
request, # the same request again
|
request, # the same request again
|
||||||
@ -80,11 +79,11 @@ def test_init_chain_ignores_invalid_init_chain_requests(b):
|
|||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).init_chain(r)
|
App(b).init_chain(r)
|
||||||
# assert nothing changed - neither validator set, nor chain ID
|
# assert nothing changed - neither validator set, nor chain ID
|
||||||
new_validator_set = query.get_validator_set(b.connection)
|
new_validator_set = query.get_validator_set(b.models.connection)
|
||||||
assert new_validator_set == validator_set
|
assert new_validator_set == validator_set
|
||||||
new_chain_id = query.get_latest_abci_chain(b.connection)["chain_id"]
|
new_chain_id = query.get_latest_abci_chain(b.models.connection)["chain_id"]
|
||||||
assert new_chain_id == "chain-XYZ"
|
assert new_chain_id == "chain-XYZ"
|
||||||
assert query.get_latest_block(b.connection) == {
|
assert query.get_latest_block(b.models.connection) == {
|
||||||
"height": 0,
|
"height": 0,
|
||||||
"app_hash": "",
|
"app_hash": "",
|
||||||
"transaction_ids": [],
|
"transaction_ids": [],
|
||||||
@ -97,10 +96,10 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
|
|||||||
res = App(b).init_chain(request)
|
res = App(b).init_chain(request)
|
||||||
assert res == types.ResponseInitChain()
|
assert res == types.ResponseInitChain()
|
||||||
|
|
||||||
validator_set = query.get_validator_set(b.connection)["validators"]
|
validator_set = query.get_validator_set(b.models.connection)["validators"]
|
||||||
|
|
||||||
# simulate a migration
|
# simulate a migration
|
||||||
query.store_block(b.connection, Block(app_hash="", height=1, transactions=[])._asdict())
|
query.store_block(b.models.connection, Block(app_hash="", height=1, transactions=[])._asdict())
|
||||||
b.migrate_abci_chain()
|
b.migrate_abci_chain()
|
||||||
|
|
||||||
# the same or other mismatching requests are ignored
|
# the same or other mismatching requests are ignored
|
||||||
@ -113,12 +112,12 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
|
|||||||
for r in invalid_requests:
|
for r in invalid_requests:
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).init_chain(r)
|
App(b).init_chain(r)
|
||||||
assert query.get_latest_abci_chain(b.connection) == {
|
assert query.get_latest_abci_chain(b.models.connection) == {
|
||||||
"chain_id": "chain-XYZ-migrated-at-height-1",
|
"chain_id": "chain-XYZ-migrated-at-height-1",
|
||||||
"is_synced": False,
|
"is_synced": False,
|
||||||
"height": 2,
|
"height": 2,
|
||||||
}
|
}
|
||||||
new_validator_set = query.get_validator_set(b.connection)["validators"]
|
new_validator_set = query.get_validator_set(b.models.connection)["validators"]
|
||||||
assert new_validator_set == validator_set
|
assert new_validator_set == validator_set
|
||||||
|
|
||||||
# a request with the matching chain ID and matching validator set
|
# a request with the matching chain ID and matching validator set
|
||||||
@ -126,12 +125,12 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
|
|||||||
request = generate_init_chain_request("chain-XYZ-migrated-at-height-1", validators)
|
request = generate_init_chain_request("chain-XYZ-migrated-at-height-1", validators)
|
||||||
res = App(b).init_chain(request)
|
res = App(b).init_chain(request)
|
||||||
assert res == types.ResponseInitChain()
|
assert res == types.ResponseInitChain()
|
||||||
assert query.get_latest_abci_chain(b.connection) == {
|
assert query.get_latest_abci_chain(b.models.connection) == {
|
||||||
"chain_id": "chain-XYZ-migrated-at-height-1",
|
"chain_id": "chain-XYZ-migrated-at-height-1",
|
||||||
"is_synced": True,
|
"is_synced": True,
|
||||||
"height": 2,
|
"height": 2,
|
||||||
}
|
}
|
||||||
assert query.get_latest_block(b.connection) == {
|
assert query.get_latest_block(b.models.connection) == {
|
||||||
"height": 2,
|
"height": 2,
|
||||||
"app_hash": "",
|
"app_hash": "",
|
||||||
"transaction_ids": [],
|
"transaction_ids": [],
|
||||||
@ -146,14 +145,14 @@ def test_init_chain_recognizes_new_chain_after_migration(b):
|
|||||||
for r in invalid_requests:
|
for r in invalid_requests:
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).init_chain(r)
|
App(b).init_chain(r)
|
||||||
assert query.get_latest_abci_chain(b.connection) == {
|
assert query.get_latest_abci_chain(b.models.connection) == {
|
||||||
"chain_id": "chain-XYZ-migrated-at-height-1",
|
"chain_id": "chain-XYZ-migrated-at-height-1",
|
||||||
"is_synced": True,
|
"is_synced": True,
|
||||||
"height": 2,
|
"height": 2,
|
||||||
}
|
}
|
||||||
new_validator_set = query.get_validator_set(b.connection)["validators"]
|
new_validator_set = query.get_validator_set(b.models.connection)["validators"]
|
||||||
assert new_validator_set == validator_set
|
assert new_validator_set == validator_set
|
||||||
assert query.get_latest_block(b.connection) == {
|
assert query.get_latest_block(b.models.connection) == {
|
||||||
"height": 2,
|
"height": 2,
|
||||||
"app_hash": "",
|
"app_hash": "",
|
||||||
"transaction_ids": [],
|
"transaction_ids": [],
|
||||||
@ -168,28 +167,28 @@ def test_info(b):
|
|||||||
assert res.last_block_height == 0
|
assert res.last_block_height == 0
|
||||||
assert res.last_block_app_hash == b""
|
assert res.last_block_app_hash == b""
|
||||||
|
|
||||||
b.store_block(Block(app_hash="1", height=1, transactions=[])._asdict())
|
b.models.store_block(Block(app_hash="1", height=1, transactions=[])._asdict())
|
||||||
res = app.info(r)
|
res = app.info(r)
|
||||||
assert res.last_block_height == 1
|
assert res.last_block_height == 1
|
||||||
assert res.last_block_app_hash == b"1"
|
assert res.last_block_app_hash == b"1"
|
||||||
|
|
||||||
# simulate a migration and assert the height is shifted
|
# simulate a migration and assert the height is shifted
|
||||||
b.store_abci_chain(2, "chain-XYZ")
|
b.models.store_abci_chain(2, "chain-XYZ")
|
||||||
app = App(b)
|
app = App(b)
|
||||||
b.store_block(Block(app_hash="2", height=2, transactions=[])._asdict())
|
b.models.store_block(Block(app_hash="2", height=2, transactions=[])._asdict())
|
||||||
res = app.info(r)
|
res = app.info(r)
|
||||||
assert res.last_block_height == 0
|
assert res.last_block_height == 0
|
||||||
assert res.last_block_app_hash == b"2"
|
assert res.last_block_app_hash == b"2"
|
||||||
|
|
||||||
b.store_block(Block(app_hash="3", height=3, transactions=[])._asdict())
|
b.models.store_block(Block(app_hash="3", height=3, transactions=[])._asdict())
|
||||||
res = app.info(r)
|
res = app.info(r)
|
||||||
assert res.last_block_height == 1
|
assert res.last_block_height == 1
|
||||||
assert res.last_block_app_hash == b"3"
|
assert res.last_block_app_hash == b"3"
|
||||||
|
|
||||||
# it's always the latest migration that is taken into account
|
# it's always the latest migration that is taken into account
|
||||||
b.store_abci_chain(4, "chain-XYZ-new")
|
b.models.store_abci_chain(4, "chain-XYZ-new")
|
||||||
app = App(b)
|
app = App(b)
|
||||||
b.store_block(Block(app_hash="4", height=4, transactions=[])._asdict())
|
b.models.store_block(Block(app_hash="4", height=4, transactions=[])._asdict())
|
||||||
res = app.info(r)
|
res = app.info(r)
|
||||||
assert res.last_block_height == 0
|
assert res.last_block_height == 0
|
||||||
assert res.last_block_app_hash == b"4"
|
assert res.last_block_app_hash == b"4"
|
||||||
@ -236,7 +235,7 @@ def test_deliver_tx__valid_create_updates_db_and_emits_event(b, init_chain_reque
|
|||||||
|
|
||||||
app.end_block(types.RequestEndBlock(height=99))
|
app.end_block(types.RequestEndBlock(height=99))
|
||||||
app.commit()
|
app.commit()
|
||||||
assert b.get_transaction(tx.id).id == tx.id
|
assert b.models.get_transaction(tx.id).id == tx.id
|
||||||
block_event = events.get()
|
block_event = events.get()
|
||||||
assert block_event.data["transactions"] == [tx]
|
assert block_event.data["transactions"] == [tx]
|
||||||
|
|
||||||
@ -265,7 +264,7 @@ def test_deliver_tx__double_spend_fails(b, init_chain_request):
|
|||||||
|
|
||||||
app.end_block(types.RequestEndBlock(height=99))
|
app.end_block(types.RequestEndBlock(height=99))
|
||||||
app.commit()
|
app.commit()
|
||||||
assert b.get_transaction(tx.id).id == tx.id
|
assert b.models.get_transaction(tx.id).id == tx.id
|
||||||
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
result = app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
assert result.code == CodeTypeError
|
assert result.code == CodeTypeError
|
||||||
|
|
||||||
@ -312,7 +311,7 @@ def test_end_block_return_validator_updates(b, init_chain_request):
|
|||||||
|
|
||||||
# generate a block containing a concluded validator election
|
# generate a block containing a concluded validator election
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
|
|
||||||
new_validator = generate_validators([1])[0]
|
new_validator = generate_validators([1])[0]
|
||||||
|
|
||||||
@ -323,8 +322,8 @@ def test_end_block_return_validator_updates(b, init_chain_request):
|
|||||||
election, votes = generate_election(
|
election, votes = generate_election(
|
||||||
b, ValidatorElection, public_key, private_key, [{"data": new_validator["election"]}], voter_keys
|
b, ValidatorElection, public_key, private_key, [{"data": new_validator["election"]}], voter_keys
|
||||||
)
|
)
|
||||||
b.store_block(Block(height=1, transactions=[election.id], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[election.id], app_hash="")._asdict())
|
||||||
b.store_bulk_transactions([election])
|
b.models.store_bulk_transactions([election])
|
||||||
b.process_block(1, [election])
|
b.process_block(1, [election])
|
||||||
|
|
||||||
app.block_transactions = votes
|
app.block_transactions = votes
|
||||||
@ -336,7 +335,7 @@ def test_end_block_return_validator_updates(b, init_chain_request):
|
|||||||
|
|
||||||
|
|
||||||
def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request):
|
def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request):
|
||||||
from planetmint import App
|
from planetmint.abci.core import App
|
||||||
from planetmint.backend import query
|
from planetmint.backend import query
|
||||||
|
|
||||||
tx = Create.generate(
|
tx = Create.generate(
|
||||||
@ -353,32 +352,32 @@ def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request):
|
|||||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
app.end_block(types.RequestEndBlock(height=99))
|
app.end_block(types.RequestEndBlock(height=99))
|
||||||
|
|
||||||
resp = query.get_pre_commit_state(b.connection)
|
resp = query.get_pre_commit_state(b.models.connection)
|
||||||
assert resp["height"] == 99
|
assert resp["height"] == 99
|
||||||
assert resp["transactions"] == [tx.id]
|
assert resp["transactions"] == [tx.id]
|
||||||
|
|
||||||
app.begin_block(begin_block)
|
app.begin_block(begin_block)
|
||||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
app.end_block(types.RequestEndBlock(height=100))
|
app.end_block(types.RequestEndBlock(height=100))
|
||||||
resp = query.get_pre_commit_state(b.connection)
|
resp = query.get_pre_commit_state(b.models.connection)
|
||||||
assert resp["height"] == 100
|
assert resp["height"] == 100
|
||||||
assert resp["transactions"] == [tx.id]
|
assert resp["transactions"] == [tx.id]
|
||||||
|
|
||||||
# simulate a chain migration and assert the height is shifted
|
# simulate a chain migration and assert the height is shifted
|
||||||
b.store_abci_chain(100, "new-chain")
|
b.models.store_abci_chain(100, "new-chain")
|
||||||
app = App(b)
|
app = App(b)
|
||||||
app.begin_block(begin_block)
|
app.begin_block(begin_block)
|
||||||
app.deliver_tx(encode_tx_to_bytes(tx))
|
app.deliver_tx(encode_tx_to_bytes(tx))
|
||||||
app.end_block(types.RequestEndBlock(height=1))
|
app.end_block(types.RequestEndBlock(height=1))
|
||||||
resp = query.get_pre_commit_state(b.connection)
|
resp = query.get_pre_commit_state(b.models.connection)
|
||||||
assert resp["height"] == 101
|
assert resp["height"] == 101
|
||||||
assert resp["transactions"] == [tx.id]
|
assert resp["transactions"] == [tx.id]
|
||||||
|
|
||||||
|
|
||||||
def test_rollback_pre_commit_state_after_crash(b):
|
def test_rollback_pre_commit_state_after_crash(b, test_models):
|
||||||
validators = generate_validators([1] * 4)
|
validators = generate_validators([1] * 4)
|
||||||
b.store_validator_set(1, [v["storage"] for v in validators])
|
b.models.store_validator_set(1, [v["storage"] for v in validators])
|
||||||
b.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
|
b.models.store_block(Block(height=1, transactions=[], app_hash="")._asdict())
|
||||||
|
|
||||||
public_key = validators[0]["public_key"]
|
public_key = validators[0]["public_key"]
|
||||||
private_key = validators[0]["private_key"]
|
private_key = validators[0]["private_key"]
|
||||||
@ -399,35 +398,35 @@ def test_rollback_pre_commit_state_after_crash(b):
|
|||||||
total_votes += votes
|
total_votes += votes
|
||||||
txs += [validator_election, *votes]
|
txs += [validator_election, *votes]
|
||||||
|
|
||||||
b.store_bulk_transactions(txs)
|
b.models.store_bulk_transactions(txs)
|
||||||
b.store_abci_chain(2, "new_chain")
|
b.models.store_abci_chain(2, "new_chain")
|
||||||
b.store_validator_set(2, [v["storage"] for v in validators])
|
b.models.store_validator_set(2, [v["storage"] for v in validators])
|
||||||
# TODO change to `4` when upgrading to Tendermint 0.22.4.
|
# TODO change to `4` when upgrading to Tendermint 0.22.4.
|
||||||
b.store_validator_set(3, [new_validator["storage"]])
|
b.models.store_validator_set(3, [new_validator["storage"]])
|
||||||
b.store_election(migration_election.id, 2, is_concluded=False)
|
b.models.store_election(migration_election.id, 2, is_concluded=False)
|
||||||
b.store_election(validator_election.id, 2, is_concluded=True)
|
b.models.store_election(validator_election.id, 2, is_concluded=True)
|
||||||
|
|
||||||
# no pre-commit state
|
# no pre-commit state
|
||||||
rollback(b)
|
b.rollback()
|
||||||
|
|
||||||
for tx in txs:
|
for tx in txs:
|
||||||
assert b.get_transaction(tx.id)
|
assert b.models.get_transaction(tx.id)
|
||||||
assert b.get_latest_abci_chain()
|
assert b.models.get_latest_abci_chain()
|
||||||
assert len(b.get_validator_set()["validators"]) == 1
|
assert len(b.models.get_validator_set()["validators"]) == 1
|
||||||
assert b.get_election(migration_election.id)
|
assert b.models.get_election(migration_election.id)
|
||||||
assert b.get_election(validator_election.id)
|
assert b.models.get_election(validator_election.id)
|
||||||
|
|
||||||
b.store_pre_commit_state({"height": 2, "transactions": [tx.id for tx in txs]})
|
b.models.store_pre_commit_state({"height": 2, "transactions": [tx.id for tx in txs]})
|
||||||
|
|
||||||
rollback(b)
|
b.rollback()
|
||||||
|
|
||||||
for tx in txs:
|
for tx in txs:
|
||||||
assert not b.get_transaction(tx.id)
|
assert not b.models.get_transaction(tx.id)
|
||||||
assert not b.get_latest_abci_chain()
|
assert not b.models.get_latest_abci_chain()
|
||||||
assert len(b.get_validator_set()["validators"]) == 4
|
assert len(b.models.get_validator_set()["validators"]) == 4
|
||||||
assert len(b.get_validator_set(2)["validators"]) == 4
|
assert len(b.models.get_validator_set(2)["validators"]) == 4
|
||||||
assert not b.get_election(migration_election.id)
|
assert not b.models.get_election(migration_election.id)
|
||||||
assert not b.get_election(validator_election.id)
|
assert not b.models.get_election(validator_election.id)
|
||||||
|
|
||||||
|
|
||||||
def test_new_validator_set(b):
|
def test_new_validator_set(b):
|
||||||
@ -452,8 +451,8 @@ def test_new_validator_set(b):
|
|||||||
|
|
||||||
validators = [node1]
|
validators = [node1]
|
||||||
updates = [node1_new_power, node2]
|
updates = [node1_new_power, node2]
|
||||||
b.store_validator_set(1, validators)
|
b.models.store_validator_set(1, validators)
|
||||||
updated_validator_set = new_validator_set(b.get_validators(1), updates)
|
updated_validator_set = new_validator_set(b.models.get_validators(1), updates)
|
||||||
|
|
||||||
updated_validators = []
|
updated_validators = []
|
||||||
for u in updates:
|
for u in updates:
|
||||||
@ -468,42 +467,42 @@ def test_new_validator_set(b):
|
|||||||
|
|
||||||
|
|
||||||
def test_info_aborts_if_chain_is_not_synced(b):
|
def test_info_aborts_if_chain_is_not_synced(b):
|
||||||
b.store_abci_chain(0, "chain-XYZ", False)
|
b.models.store_abci_chain(0, "chain-XYZ", False)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).info(types.RequestInfo())
|
App(b).info(types.RequestInfo())
|
||||||
|
|
||||||
|
|
||||||
def test_check_tx_aborts_if_chain_is_not_synced(b):
|
def test_check_tx_aborts_if_chain_is_not_synced(b):
|
||||||
b.store_abci_chain(0, "chain-XYZ", False)
|
b.models.store_abci_chain(0, "chain-XYZ", False)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).check_tx("some bytes")
|
App(b).check_tx("some bytes")
|
||||||
|
|
||||||
|
|
||||||
def test_begin_aborts_if_chain_is_not_synced(b):
|
def test_begin_aborts_if_chain_is_not_synced(b):
|
||||||
b.store_abci_chain(0, "chain-XYZ", False)
|
b.models.store_abci_chain(0, "chain-XYZ", False)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).info(types.RequestBeginBlock())
|
App(b).info(types.RequestBeginBlock())
|
||||||
|
|
||||||
|
|
||||||
def test_deliver_tx_aborts_if_chain_is_not_synced(b):
|
def test_deliver_tx_aborts_if_chain_is_not_synced(b):
|
||||||
b.store_abci_chain(0, "chain-XYZ", False)
|
b.models.store_abci_chain(0, "chain-XYZ", False)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).deliver_tx("some bytes")
|
App(b).deliver_tx("some bytes")
|
||||||
|
|
||||||
|
|
||||||
def test_end_block_aborts_if_chain_is_not_synced(b):
|
def test_end_block_aborts_if_chain_is_not_synced(b):
|
||||||
b.store_abci_chain(0, "chain-XYZ", False)
|
b.models.store_abci_chain(0, "chain-XYZ", False)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).info(types.RequestEndBlock())
|
App(b).info(types.RequestEndBlock())
|
||||||
|
|
||||||
|
|
||||||
def test_commit_aborts_if_chain_is_not_synced(b):
|
def test_commit_aborts_if_chain_is_not_synced(b):
|
||||||
b.store_abci_chain(0, "chain-XYZ", False)
|
b.models.store_abci_chain(0, "chain-XYZ", False)
|
||||||
|
|
||||||
with pytest.raises(SystemExit):
|
with pytest.raises(SystemExit):
|
||||||
App(b).commit()
|
App(b).commit()
|
||||||
|
@ -13,40 +13,40 @@ pytestmark = pytest.mark.bdb
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def txns(b, user_pk, user_sk, user2_pk, user2_sk):
|
def txns(b, user_pk, user_sk, user2_pk, user2_sk, test_models):
|
||||||
txs = [
|
txs = [
|
||||||
Create.generate([user_pk], [([user2_pk], 1)]).sign([user_sk]),
|
Create.generate([user_pk], [([user2_pk], 1)]).sign([user_sk]),
|
||||||
Create.generate([user2_pk], [([user_pk], 1)]).sign([user2_sk]),
|
Create.generate([user2_pk], [([user_pk], 1)]).sign([user2_sk]),
|
||||||
Create.generate([user_pk], [([user_pk], 1), ([user2_pk], 1)]).sign([user_sk]),
|
Create.generate([user_pk], [([user_pk], 1), ([user2_pk], 1)]).sign([user_sk]),
|
||||||
]
|
]
|
||||||
b.store_bulk_transactions(txs)
|
b.models.store_bulk_transactions(txs)
|
||||||
return txs
|
return txs
|
||||||
|
|
||||||
|
|
||||||
def test_get_outputs_by_public_key(b, user_pk, user2_pk, txns):
|
def test_get_outputs_by_public_key(b, user_pk, user2_pk, txns, test_models):
|
||||||
expected = [TransactionLink(txns[1].id, 0), TransactionLink(txns[2].id, 0)]
|
expected = [TransactionLink(txns[1].id, 0), TransactionLink(txns[2].id, 0)]
|
||||||
actual = b.fastquery.get_outputs_by_public_key(user_pk)
|
actual = test_models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
|
|
||||||
_all_txs = set([tx.txid for tx in expected + actual])
|
_all_txs = set([tx.txid for tx in expected + actual])
|
||||||
assert len(_all_txs) == 2
|
assert len(_all_txs) == 2
|
||||||
# assert b.fastquery.get_outputs_by_public_key(user_pk) == [ # OLD VERIFICATION
|
# assert b.models.fastquery.get_outputs_by_public_key(user_pk) == [ # OLD VERIFICATION
|
||||||
# TransactionLink(txns[1].id, 0),
|
# TransactionLink(txns[1].id, 0),
|
||||||
# TransactionLink(txns[2].id, 0)
|
# TransactionLink(txns[2].id, 0)
|
||||||
# ]
|
# ]
|
||||||
actual_1 = b.fastquery.get_outputs_by_public_key(user2_pk)
|
actual_1 = test_models.fastquery.get_outputs_by_public_key(user2_pk)
|
||||||
expected_1 = [
|
expected_1 = [
|
||||||
TransactionLink(txns[0].id, 0),
|
TransactionLink(txns[0].id, 0),
|
||||||
TransactionLink(txns[2].id, 1),
|
TransactionLink(txns[2].id, 1),
|
||||||
]
|
]
|
||||||
_all_tx_1 = set([tx.txid for tx in actual_1 + expected_1])
|
_all_tx_1 = set([tx.txid for tx in actual_1 + expected_1])
|
||||||
assert len(_all_tx_1) == 2
|
assert len(_all_tx_1) == 2
|
||||||
# assert b.fastquery.get_outputs_by_public_key(user2_pk) == [ # OLD VERIFICATION
|
# assert b.models.fastquery.get_outputs_by_public_key(user2_pk) == [ # OLD VERIFICATION
|
||||||
# TransactionLink(txns[0].id, 0),
|
# TransactionLink(txns[0].id, 0),
|
||||||
# TransactionLink(txns[2].id, 1),
|
# TransactionLink(txns[2].id, 1),
|
||||||
# ]
|
# ]
|
||||||
|
|
||||||
|
|
||||||
def test_filter_spent_outputs(b, user_pk, user_sk):
|
def test_filter_spent_outputs(b, user_pk, user_sk, test_models):
|
||||||
out = [([user_pk], 1)]
|
out = [([user_pk], 1)]
|
||||||
tx1 = Create.generate([user_pk], out * 2)
|
tx1 = Create.generate([user_pk], out * 2)
|
||||||
tx1.sign([user_sk])
|
tx1.sign([user_sk])
|
||||||
@ -57,10 +57,10 @@ def test_filter_spent_outputs(b, user_pk, user_sk):
|
|||||||
tx2.sign([user_sk])
|
tx2.sign([user_sk])
|
||||||
|
|
||||||
# tx2 produces a new unspent. inputs[1] remains unspent.
|
# tx2 produces a new unspent. inputs[1] remains unspent.
|
||||||
b.store_bulk_transactions([tx1, tx2])
|
b.models.store_bulk_transactions([tx1, tx2])
|
||||||
|
|
||||||
outputs = b.fastquery.get_outputs_by_public_key(user_pk)
|
outputs = test_models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
unspents = b.fastquery.filter_spent_outputs(outputs)
|
unspents = test_models.fastquery.filter_spent_outputs(outputs)
|
||||||
|
|
||||||
assert set(unsp for unsp in unspents) == {
|
assert set(unsp for unsp in unspents) == {
|
||||||
inputs[1].fulfills,
|
inputs[1].fulfills,
|
||||||
@ -68,7 +68,7 @@ def test_filter_spent_outputs(b, user_pk, user_sk):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_filter_unspent_outputs(b, user_pk, user_sk):
|
def test_filter_unspent_outputs(b, user_pk, user_sk, test_models):
|
||||||
out = [([user_pk], 1)]
|
out = [([user_pk], 1)]
|
||||||
tx1 = Create.generate([user_pk], out * 2)
|
tx1 = Create.generate([user_pk], out * 2)
|
||||||
tx1.sign([user_sk])
|
tx1.sign([user_sk])
|
||||||
@ -79,27 +79,27 @@ def test_filter_unspent_outputs(b, user_pk, user_sk):
|
|||||||
tx2.sign([user_sk])
|
tx2.sign([user_sk])
|
||||||
|
|
||||||
# tx2 produces a new unspent. input[1] remains unspent.
|
# tx2 produces a new unspent. input[1] remains unspent.
|
||||||
b.store_bulk_transactions([tx1, tx2])
|
b.models.store_bulk_transactions([tx1, tx2])
|
||||||
|
|
||||||
outputs = b.fastquery.get_outputs_by_public_key(user_pk)
|
outputs = test_models.fastquery.get_outputs_by_public_key(user_pk)
|
||||||
spents = b.fastquery.filter_unspent_outputs(outputs)
|
spents = test_models.fastquery.filter_unspent_outputs(outputs)
|
||||||
|
|
||||||
assert set(sp for sp in spents) == {
|
assert set(sp for sp in spents) == {
|
||||||
inputs[0].fulfills,
|
inputs[0].fulfills,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
|
def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk, test_models, test_validator):
|
||||||
from planetmint import backend
|
from planetmint import backend
|
||||||
from planetmint.backend.connection import Connection
|
from planetmint.backend.connection import Connection
|
||||||
from planetmint.backend import query
|
from planetmint.backend import query
|
||||||
|
|
||||||
tx1 = Create.generate([user_pk], [([user_pk], 3), ([user_pk], 2), ([user_pk], 1)]).sign([user_sk])
|
tx1 = Create.generate([user_pk], [([user_pk], 3), ([user_pk], 2), ([user_pk], 1)]).sign([user_sk])
|
||||||
b.store_bulk_transactions([tx1])
|
b.models.store_bulk_transactions([tx1])
|
||||||
|
|
||||||
inputs = tx1.to_inputs()
|
inputs = tx1.to_inputs()
|
||||||
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], [tx1.id]).sign([user_sk])
|
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], [tx1.id]).sign([user_sk])
|
||||||
assert b.validate_transaction(tx2)
|
assert test_validator.validate_transaction(tx2)
|
||||||
|
|
||||||
tx2_dict = tx2.to_dict()
|
tx2_dict = tx2.to_dict()
|
||||||
fulfills = tx2_dict["inputs"][0]["fulfills"]
|
fulfills = tx2_dict["inputs"][0]["fulfills"]
|
||||||
@ -107,28 +107,28 @@ def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
|
|||||||
"transaction_id": fulfills["transaction_id"],
|
"transaction_id": fulfills["transaction_id"],
|
||||||
"output_index": fulfills["output_index"],
|
"output_index": fulfills["output_index"],
|
||||||
}
|
}
|
||||||
backend.query.store_transactions(b.connection, [tx2_dict])
|
backend.query.store_transactions(test_models.connection, [tx2_dict])
|
||||||
|
|
||||||
outputs = b.get_outputs_filtered(user_pk, spent=False)
|
outputs = test_models.get_outputs_filtered(user_pk, spent=False)
|
||||||
assert len(outputs) == 2
|
assert len(outputs) == 2
|
||||||
|
|
||||||
outputs = b.get_outputs_filtered(user2_pk, spent=False)
|
outputs = test_models.get_outputs_filtered(user2_pk, spent=False)
|
||||||
assert len(outputs) == 1
|
assert len(outputs) == 1
|
||||||
|
|
||||||
# clean the transaction, metdata and asset collection
|
# clean the transaction, metdata and asset collection
|
||||||
connection = Connection()
|
connection = Connection()
|
||||||
query.delete_transactions(connection, txn_ids=[tx1.id, tx2.id])
|
query.delete_transactions(test_models.connection, txn_ids=[tx1.id, tx2.id])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx1])
|
b.models.store_bulk_transactions([tx1])
|
||||||
tx2_dict = tx2.to_dict()
|
tx2_dict = tx2.to_dict()
|
||||||
tx2_dict["inputs"][0]["fulfills"] = {
|
tx2_dict["inputs"][0]["fulfills"] = {
|
||||||
"output_index": fulfills["output_index"],
|
"output_index": fulfills["output_index"],
|
||||||
"transaction_id": fulfills["transaction_id"],
|
"transaction_id": fulfills["transaction_id"],
|
||||||
}
|
}
|
||||||
|
|
||||||
backend.query.store_transactions(b.connection, [tx2_dict])
|
backend.query.store_transactions(test_models.connection, [tx2_dict])
|
||||||
outputs = b.get_outputs_filtered(user_pk, spent=False)
|
outputs = test_models.get_outputs_filtered(user_pk, spent=False)
|
||||||
assert len(outputs) == 2
|
assert len(outputs) == 2
|
||||||
|
|
||||||
outputs = b.get_outputs_filtered(user2_pk, spent=False)
|
outputs = test_models.get_outputs_filtered(user2_pk, spent=False)
|
||||||
assert len(outputs) == 1
|
assert len(outputs) == 1
|
||||||
|
@ -13,14 +13,16 @@ from tendermint.abci import types_pb2 as types
|
|||||||
from abci.server import ProtocolHandler
|
from abci.server import ProtocolHandler
|
||||||
from abci.utils import read_messages
|
from abci.utils import read_messages
|
||||||
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT, BROADCAST_TX_SYNC
|
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT, BROADCAST_TX_SYNC
|
||||||
|
|
||||||
|
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
|
||||||
from planetmint.version import __tm_supported_versions__
|
from planetmint.version import __tm_supported_versions__
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_app(b, eventqueue_fixture, init_chain_request):
|
def test_app(b, eventqueue_fixture, init_chain_request):
|
||||||
from planetmint import App
|
from planetmint.abci.core import App
|
||||||
from planetmint.tendermint_utils import calculate_hash
|
from planetmint.abci.tendermint_utils import calculate_hash
|
||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
|
|
||||||
app = App(b, eventqueue_fixture)
|
app = App(b, eventqueue_fixture)
|
||||||
@ -31,16 +33,16 @@ def test_app(b, eventqueue_fixture, init_chain_request):
|
|||||||
assert res
|
assert res
|
||||||
assert res.info.last_block_app_hash == b""
|
assert res.info.last_block_app_hash == b""
|
||||||
assert res.info.last_block_height == 0
|
assert res.info.last_block_height == 0
|
||||||
assert not b.get_latest_block()
|
assert not b.models.get_latest_block()
|
||||||
|
|
||||||
p.process("init_chain", types.Request(init_chain=init_chain_request))
|
p.process("init_chain", types.Request(init_chain=init_chain_request))
|
||||||
block0 = b.get_latest_block()
|
block0 = b.models.get_latest_block()
|
||||||
assert block0
|
assert block0
|
||||||
assert block0["height"] == 0
|
assert block0["height"] == 0
|
||||||
assert block0["app_hash"] == ""
|
assert block0["app_hash"] == ""
|
||||||
|
|
||||||
pk = codecs.encode(init_chain_request.validators[0].pub_key.ed25519, "base64").decode().strip("\n")
|
pk = codecs.encode(init_chain_request.validators[0].pub_key.ed25519, "base64").decode().strip("\n")
|
||||||
[validator] = b.get_validators(height=1)
|
[validator] = b.models.get_validators(height=1)
|
||||||
assert validator["public_key"]["value"] == pk
|
assert validator["public_key"]["value"] == pk
|
||||||
assert validator["voting_power"] == 10
|
assert validator["voting_power"] == 10
|
||||||
|
|
||||||
@ -78,9 +80,9 @@ def test_app(b, eventqueue_fixture, init_chain_request):
|
|||||||
data = p.process("commit", None)
|
data = p.process("commit", None)
|
||||||
res = next(read_messages(BytesIO(data), types.Response))
|
res = next(read_messages(BytesIO(data), types.Response))
|
||||||
assert res.commit.data == new_block_hash.encode("utf-8")
|
assert res.commit.data == new_block_hash.encode("utf-8")
|
||||||
assert b.get_transaction(tx.id).id == tx.id
|
assert b.models.get_transaction(tx.id).id == tx.id
|
||||||
|
|
||||||
block0 = b.get_latest_block()
|
block0 = b.models.get_latest_block()
|
||||||
assert block0
|
assert block0
|
||||||
assert block0["height"] == 1
|
assert block0["height"] == 1
|
||||||
assert block0["app_hash"] == new_block_hash
|
assert block0["app_hash"] == new_block_hash
|
||||||
@ -98,7 +100,7 @@ def test_app(b, eventqueue_fixture, init_chain_request):
|
|||||||
res = next(read_messages(BytesIO(data), types.Response))
|
res = next(read_messages(BytesIO(data), types.Response))
|
||||||
assert res.commit.data == new_block_hash.encode("utf-8")
|
assert res.commit.data == new_block_hash.encode("utf-8")
|
||||||
|
|
||||||
block0 = b.get_latest_block()
|
block0 = b.models.get_latest_block()
|
||||||
assert block0
|
assert block0
|
||||||
assert block0["height"] == 2
|
assert block0["height"] == 2
|
||||||
|
|
||||||
@ -107,7 +109,7 @@ def test_app(b, eventqueue_fixture, init_chain_request):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.abci
|
@pytest.mark.abci
|
||||||
def test_post_transaction_responses(tendermint_ws_url, b):
|
def test_post_transaction_responses(tendermint_ws_url, b, test_abci_rpc):
|
||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
|
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
@ -118,14 +120,18 @@ def test_post_transaction_responses(tendermint_ws_url, b):
|
|||||||
assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}],
|
assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}],
|
||||||
).sign([alice.private_key])
|
).sign([alice.private_key])
|
||||||
|
|
||||||
code, message = b.write_transaction(tx, BROADCAST_TX_COMMIT)
|
code, message = test_abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
assert code == 202
|
assert code == 202
|
||||||
|
|
||||||
tx_transfer = Transfer.generate(tx.to_inputs(), [([bob.public_key], 1)], asset_ids=[tx.id]).sign(
|
tx_transfer = Transfer.generate(tx.to_inputs(), [([bob.public_key], 1)], asset_ids=[tx.id]).sign(
|
||||||
[alice.private_key]
|
[alice.private_key]
|
||||||
)
|
)
|
||||||
|
|
||||||
code, message = b.write_transaction(tx_transfer, BROADCAST_TX_COMMIT)
|
code, message = test_abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx_transfer, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
assert code == 202
|
assert code == 202
|
||||||
|
|
||||||
carly = generate_key_pair()
|
carly = generate_key_pair()
|
||||||
@ -135,6 +141,8 @@ def test_post_transaction_responses(tendermint_ws_url, b):
|
|||||||
asset_ids=[tx.id],
|
asset_ids=[tx.id],
|
||||||
).sign([alice.private_key])
|
).sign([alice.private_key])
|
||||||
for mode in (BROADCAST_TX_SYNC, BROADCAST_TX_COMMIT):
|
for mode in (BROADCAST_TX_SYNC, BROADCAST_TX_COMMIT):
|
||||||
code, message = b.write_transaction(double_spend, mode)
|
code, message = test_abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, double_spend, mode
|
||||||
|
)
|
||||||
assert code == 500
|
assert code == 500
|
||||||
assert message == "Transaction validation failed"
|
assert message == "Transaction validation failed"
|
||||||
|
@ -10,19 +10,20 @@ import pytest
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from transactions.types.assets.transfer import Transfer
|
from transactions.types.assets.transfer import Transfer
|
||||||
from operator import index
|
|
||||||
from hashlib import sha3_256
|
from hashlib import sha3_256
|
||||||
from pymongo import MongoClient
|
|
||||||
from planetmint import backend
|
from planetmint import backend
|
||||||
from transactions.common.transaction_mode_types import (
|
from transactions.common.transaction_mode_types import (
|
||||||
BROADCAST_TX_COMMIT,
|
BROADCAST_TX_COMMIT,
|
||||||
BROADCAST_TX_ASYNC,
|
BROADCAST_TX_ASYNC,
|
||||||
BROADCAST_TX_SYNC,
|
BROADCAST_TX_SYNC,
|
||||||
)
|
)
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
from ipld import marshal, multihash
|
from ipld import marshal, multihash
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
|
||||||
|
from tests.utils import delete_unspent_outputs, get_utxoset_merkle_root, store_unspent_outputs, update_utxoset
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_asset_is_separated_from_transaciton(b):
|
def test_asset_is_separated_from_transaciton(b):
|
||||||
@ -30,7 +31,7 @@ def test_asset_is_separated_from_transaciton(b):
|
|||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
||||||
|
|
||||||
if isinstance(b.connection, TarantoolDBConnection):
|
if isinstance(b.models.connection, TarantoolDBConnection):
|
||||||
pytest.skip("This specific function is skipped because, assets are stored differently if using Tarantool")
|
pytest.skip("This specific function is skipped because, assets are stored differently if using Tarantool")
|
||||||
|
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
@ -60,29 +61,29 @@ def test_asset_is_separated_from_transaciton(b):
|
|||||||
[alice.private_key]
|
[alice.private_key]
|
||||||
)
|
)
|
||||||
|
|
||||||
# with store_bulk_transactions we use `insert_many` where PyMongo
|
# with b.models.store_bulk_transactions we use `insert_many` where PyMongo
|
||||||
# automatically adds an `_id` field to the tx, therefore we need the
|
# automatically adds an `_id` field to the tx, therefore we need the
|
||||||
# deepcopy, for more info see:
|
# deepcopy, for more info see:
|
||||||
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
|
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
|
||||||
tx_dict = copy.deepcopy(tx.to_dict())
|
tx_dict = copy.deepcopy(tx.to_dict())
|
||||||
|
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
assert "asset" not in backend.query.get_transaction_single(b.connection, tx.id)
|
assert "asset" not in backend.query.get_transaction_single(b.models.connection, tx.id)
|
||||||
assert backend.query.get_asset(b.connection, tx.id).data == assets[0]
|
assert backend.query.get_asset(b.models.connection, tx.id).data == assets[0]
|
||||||
assert b.get_transaction(tx.id).to_dict() == tx_dict
|
assert b.models.get_transaction(tx.id).to_dict() == tx_dict
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_get_latest_block(b):
|
def test_get_latest_block(b):
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
|
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
app_hash = os.urandom(16).hex()
|
app_hash = os.urandom(16).hex()
|
||||||
txn_id = os.urandom(16).hex()
|
txn_id = os.urandom(16).hex()
|
||||||
block = Block(app_hash=app_hash, height=i, transactions=[txn_id])._asdict()
|
block = Block(app_hash=app_hash, height=i, transactions=[txn_id])._asdict()
|
||||||
b.store_block(block)
|
b.models.store_block(block)
|
||||||
|
|
||||||
block = b.get_latest_block()
|
block = b.models.get_latest_block()
|
||||||
assert block["height"] == 9
|
assert block["height"] == 9
|
||||||
|
|
||||||
|
|
||||||
@ -99,9 +100,9 @@ def test_validation_error(b):
|
|||||||
|
|
||||||
|
|
||||||
@patch("requests.post")
|
@patch("requests.post")
|
||||||
def test_write_and_post_transaction(mock_post, b):
|
def test_write_and_post_transaction(mock_post, b, test_abci_rpc):
|
||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
from planetmint.tendermint_utils import encode_transaction
|
from planetmint.abci.tendermint_utils import encode_transaction
|
||||||
|
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
tx = (
|
tx = (
|
||||||
@ -109,7 +110,9 @@ def test_write_and_post_transaction(mock_post, b):
|
|||||||
)
|
)
|
||||||
|
|
||||||
tx = b.validate_transaction(tx)
|
tx = b.validate_transaction(tx)
|
||||||
b.write_transaction(tx, BROADCAST_TX_ASYNC)
|
test_abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_ASYNC
|
||||||
|
)
|
||||||
|
|
||||||
assert mock_post.called
|
assert mock_post.called
|
||||||
args, kwargs = mock_post.call_args
|
args, kwargs = mock_post.call_args
|
||||||
@ -120,7 +123,7 @@ def test_write_and_post_transaction(mock_post, b):
|
|||||||
|
|
||||||
@patch("requests.post")
|
@patch("requests.post")
|
||||||
@pytest.mark.parametrize("mode", [BROADCAST_TX_SYNC, BROADCAST_TX_ASYNC, BROADCAST_TX_COMMIT])
|
@pytest.mark.parametrize("mode", [BROADCAST_TX_SYNC, BROADCAST_TX_ASYNC, BROADCAST_TX_COMMIT])
|
||||||
def test_post_transaction_valid_modes(mock_post, b, mode):
|
def test_post_transaction_valid_modes(mock_post, b, mode, test_abci_rpc):
|
||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
|
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
@ -128,13 +131,13 @@ def test_post_transaction_valid_modes(mock_post, b, mode):
|
|||||||
Create.generate([alice.public_key], [([alice.public_key], 1)], assets=None).sign([alice.private_key]).to_dict()
|
Create.generate([alice.public_key], [([alice.public_key], 1)], assets=None).sign([alice.private_key]).to_dict()
|
||||||
)
|
)
|
||||||
tx = b.validate_transaction(tx)
|
tx = b.validate_transaction(tx)
|
||||||
b.write_transaction(tx, mode)
|
test_abci_rpc.write_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, mode)
|
||||||
|
|
||||||
args, kwargs = mock_post.call_args
|
args, kwargs = mock_post.call_args
|
||||||
assert mode == kwargs["json"]["method"]
|
assert mode == kwargs["json"]["method"]
|
||||||
|
|
||||||
|
|
||||||
def test_post_transaction_invalid_mode(b):
|
def test_post_transaction_invalid_mode(b, test_abci_rpc):
|
||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
from transactions.common.exceptions import ValidationError
|
from transactions.common.exceptions import ValidationError
|
||||||
|
|
||||||
@ -144,18 +147,18 @@ def test_post_transaction_invalid_mode(b):
|
|||||||
)
|
)
|
||||||
tx = b.validate_transaction(tx)
|
tx = b.validate_transaction(tx)
|
||||||
with pytest.raises(ValidationError):
|
with pytest.raises(ValidationError):
|
||||||
b.write_transaction(tx, "nope")
|
test_abci_rpc.write_transaction(MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, "nope")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn):
|
def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn):
|
||||||
b.update_utxoset(signed_create_tx)
|
update_utxoset(b.models.connection, signed_create_tx)
|
||||||
utxoset = db_conn.get_space("utxos")
|
utxoset = db_conn.get_space("utxos")
|
||||||
assert utxoset.select().rowcount == 1
|
assert utxoset.select().rowcount == 1
|
||||||
utxo = utxoset.select().data
|
utxo = utxoset.select().data
|
||||||
assert utxo[0][1] == signed_create_tx.id
|
assert utxo[0][1] == signed_create_tx.id
|
||||||
assert utxo[0][2] == 0
|
assert utxo[0][2] == 0
|
||||||
b.update_utxoset(signed_transfer_tx)
|
update_utxoset(b.models.connection, signed_transfer_tx)
|
||||||
assert utxoset.select().rowcount == 1
|
assert utxoset.select().rowcount == 1
|
||||||
utxo = utxoset.select().data
|
utxo = utxoset.select().data
|
||||||
assert utxo[0][1] == signed_transfer_tx.id
|
assert utxo[0][1] == signed_transfer_tx.id
|
||||||
@ -165,26 +168,26 @@ def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn):
|
|||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_store_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
|
def test_store_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
|
||||||
mocked_store_transaction = mocker.patch("planetmint.backend.query.store_transactions")
|
mocked_store_transaction = mocker.patch("planetmint.backend.query.store_transactions")
|
||||||
b.store_bulk_transactions([signed_create_tx])
|
b.models.store_bulk_transactions([signed_create_tx])
|
||||||
mocked_store_transaction.assert_any_call(b.connection, [signed_create_tx.to_dict()], "transactions")
|
mocked_store_transaction.assert_any_call(b.models.connection, [signed_create_tx.to_dict()], "transactions")
|
||||||
mocked_store_transaction.reset_mock()
|
mocked_store_transaction.reset_mock()
|
||||||
b.store_bulk_transactions([signed_transfer_tx])
|
b.models.store_bulk_transactions([signed_transfer_tx])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_store_bulk_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
|
def test_store_bulk_transaction(mocker, b, signed_create_tx, signed_transfer_tx):
|
||||||
mocked_store_transactions = mocker.patch("planetmint.backend.query.store_transactions")
|
mocked_store_transactions = mocker.patch("planetmint.backend.query.store_transactions")
|
||||||
b.store_bulk_transactions((signed_create_tx,))
|
b.models.store_bulk_transactions((signed_create_tx,))
|
||||||
mocked_store_transactions.assert_any_call(b.connection, [signed_create_tx.to_dict()], "transactions")
|
mocked_store_transactions.assert_any_call(b.models.connection, [signed_create_tx.to_dict()], "transactions")
|
||||||
mocked_store_transactions.reset_mock()
|
mocked_store_transactions.reset_mock()
|
||||||
b.store_bulk_transactions((signed_transfer_tx,))
|
b.models.store_bulk_transactions((signed_transfer_tx,))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_delete_zero_unspent_outputs(b, utxoset):
|
def test_delete_zero_unspent_outputs(b, utxoset):
|
||||||
unspent_outputs, utxo_collection = utxoset
|
unspent_outputs, utxo_collection = utxoset
|
||||||
num_rows_before_operation = utxo_collection.select().rowcount
|
num_rows_before_operation = utxo_collection.select().rowcount
|
||||||
delete_res = b.delete_unspent_outputs() # noqa: F841
|
delete_res = delete_unspent_outputs(b.models.connection) # noqa: F841
|
||||||
num_rows_after_operation = utxo_collection.select().rowcount
|
num_rows_after_operation = utxo_collection.select().rowcount
|
||||||
# assert delete_res is None
|
# assert delete_res is None
|
||||||
assert num_rows_before_operation == num_rows_after_operation
|
assert num_rows_before_operation == num_rows_after_operation
|
||||||
@ -192,12 +195,12 @@ def test_delete_zero_unspent_outputs(b, utxoset):
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_delete_one_unspent_outputs(b, dummy_unspent_outputs):
|
def test_delete_one_unspent_outputs(b, dummy_unspent_outputs):
|
||||||
utxo_space = b.connection.get_space("utxos")
|
utxo_space = b.models.connection.get_space("utxos")
|
||||||
for utxo in dummy_unspent_outputs:
|
for utxo in dummy_unspent_outputs:
|
||||||
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
||||||
assert res
|
assert res
|
||||||
|
|
||||||
b.delete_unspent_outputs(dummy_unspent_outputs[0])
|
delete_unspent_outputs(b.models.connection, dummy_unspent_outputs[0])
|
||||||
res1 = utxo_space.select(["a", 1], index="utxo_by_transaction_id_and_output_index").data
|
res1 = utxo_space.select(["a", 1], index="utxo_by_transaction_id_and_output_index").data
|
||||||
res2 = utxo_space.select(["b", 0], index="utxo_by_transaction_id_and_output_index").data
|
res2 = utxo_space.select(["b", 0], index="utxo_by_transaction_id_and_output_index").data
|
||||||
assert len(res1) + len(res2) == 2
|
assert len(res1) + len(res2) == 2
|
||||||
@ -207,12 +210,12 @@ def test_delete_one_unspent_outputs(b, dummy_unspent_outputs):
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_delete_many_unspent_outputs(b, dummy_unspent_outputs):
|
def test_delete_many_unspent_outputs(b, dummy_unspent_outputs):
|
||||||
utxo_space = b.connection.get_space("utxos")
|
utxo_space = b.models.connection.get_space("utxos")
|
||||||
for utxo in dummy_unspent_outputs:
|
for utxo in dummy_unspent_outputs:
|
||||||
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
||||||
assert res
|
assert res
|
||||||
|
|
||||||
b.delete_unspent_outputs(*dummy_unspent_outputs[::2])
|
delete_unspent_outputs(b.models.connection, *dummy_unspent_outputs[::2])
|
||||||
res1 = utxo_space.select(["a", 0], index="utxo_by_transaction_id_and_output_index").data
|
res1 = utxo_space.select(["a", 0], index="utxo_by_transaction_id_and_output_index").data
|
||||||
res2 = utxo_space.select(["b", 0], index="utxo_by_transaction_id_and_output_index").data
|
res2 = utxo_space.select(["b", 0], index="utxo_by_transaction_id_and_output_index").data
|
||||||
assert len(res1) + len(res2) == 0
|
assert len(res1) + len(res2) == 0
|
||||||
@ -222,9 +225,9 @@ def test_delete_many_unspent_outputs(b, dummy_unspent_outputs):
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_store_zero_unspent_output(b):
|
def test_store_zero_unspent_output(b):
|
||||||
utxos = b.connection.get_space("utxos")
|
utxos = b.models.connection.get_space("utxos")
|
||||||
num_rows_before_operation = utxos.select().rowcount
|
num_rows_before_operation = utxos.select().rowcount
|
||||||
res = b.store_unspent_outputs()
|
res = store_unspent_outputs(b.models.connection)
|
||||||
num_rows_after_operation = utxos.select().rowcount
|
num_rows_after_operation = utxos.select().rowcount
|
||||||
assert res is None
|
assert res is None
|
||||||
assert num_rows_before_operation == num_rows_after_operation
|
assert num_rows_before_operation == num_rows_after_operation
|
||||||
@ -234,8 +237,8 @@ def test_store_zero_unspent_output(b):
|
|||||||
def test_store_one_unspent_output(b, unspent_output_1, utxo_collection):
|
def test_store_one_unspent_output(b, unspent_output_1, utxo_collection):
|
||||||
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
||||||
|
|
||||||
res = b.store_unspent_outputs(unspent_output_1)
|
res = store_unspent_outputs(b.models.connection, unspent_output_1)
|
||||||
if not isinstance(b.connection, TarantoolDBConnection):
|
if not isinstance(b.models.connection, TarantoolDBConnection):
|
||||||
assert res.acknowledged
|
assert res.acknowledged
|
||||||
assert len(list(res)) == 1
|
assert len(list(res)) == 1
|
||||||
assert (
|
assert (
|
||||||
@ -248,7 +251,7 @@ def test_store_one_unspent_output(b, unspent_output_1, utxo_collection):
|
|||||||
== 1
|
== 1
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
utx_space = b.connection.get_space("utxos")
|
utx_space = b.models.connection.get_space("utxos")
|
||||||
res = utx_space.select(
|
res = utx_space.select(
|
||||||
[unspent_output_1["transaction_id"], unspent_output_1["output_index"]],
|
[unspent_output_1["transaction_id"], unspent_output_1["output_index"]],
|
||||||
index="utxo_by_transaction_id_and_output_index",
|
index="utxo_by_transaction_id_and_output_index",
|
||||||
@ -258,25 +261,25 @@ def test_store_one_unspent_output(b, unspent_output_1, utxo_collection):
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_store_many_unspent_outputs(b, unspent_outputs):
|
def test_store_many_unspent_outputs(b, unspent_outputs):
|
||||||
b.store_unspent_outputs(*unspent_outputs)
|
store_unspent_outputs(b.models.connection, *unspent_outputs)
|
||||||
utxo_space = b.connection.get_space("utxos")
|
utxo_space = b.models.connection.get_space("utxos")
|
||||||
res = utxo_space.select([unspent_outputs[0]["transaction_id"]], index="utxos_by_transaction_id")
|
res = utxo_space.select([unspent_outputs[0]["transaction_id"]], index="utxos_by_transaction_id")
|
||||||
assert len(res.data) == 3
|
assert len(res.data) == 3
|
||||||
|
|
||||||
|
|
||||||
def test_get_utxoset_merkle_root_when_no_utxo(b):
|
def test_get_utxoset_merkle_root_when_no_utxo(b):
|
||||||
assert b.get_utxoset_merkle_root() == sha3_256(b"").hexdigest()
|
assert get_utxoset_merkle_root(b.models.connection) == sha3_256(b"").hexdigest()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_get_utxoset_merkle_root(b, dummy_unspent_outputs):
|
def test_get_utxoset_merkle_root(b, dummy_unspent_outputs):
|
||||||
utxo_space = b.connection.get_space("utxos")
|
utxo_space = b.models.connection.get_space("utxos")
|
||||||
for utxo in dummy_unspent_outputs:
|
for utxo in dummy_unspent_outputs:
|
||||||
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
res = utxo_space.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
||||||
assert res
|
assert res
|
||||||
|
|
||||||
expected_merkle_root = "86d311c03115bf4d287f8449ca5828505432d69b82762d47077b1c00fe426eac"
|
expected_merkle_root = "86d311c03115bf4d287f8449ca5828505432d69b82762d47077b1c00fe426eac"
|
||||||
merkle_root = b.get_utxoset_merkle_root()
|
merkle_root = get_utxoset_merkle_root(b.models.connection)
|
||||||
assert merkle_root == expected_merkle_root
|
assert merkle_root == expected_merkle_root
|
||||||
|
|
||||||
|
|
||||||
@ -300,20 +303,20 @@ def test_get_spent_transaction_double_spend(b, alice, bob, carol):
|
|||||||
tx.to_inputs() + tx.to_inputs(), [([bob.public_key], 1)], asset_ids=[tx.id]
|
tx.to_inputs() + tx.to_inputs(), [([bob.public_key], 1)], asset_ids=[tx.id]
|
||||||
).sign([alice.private_key])
|
).sign([alice.private_key])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.validate_transaction(same_input_double_spend)
|
b.validate_transaction(same_input_double_spend)
|
||||||
|
|
||||||
assert b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer])
|
assert b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer])
|
||||||
|
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer, double_spend])
|
b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer, double_spend])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_transfer])
|
b.models.store_bulk_transactions([tx_transfer])
|
||||||
|
|
||||||
with pytest.raises(DoubleSpend):
|
with pytest.raises(DoubleSpend):
|
||||||
b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [double_spend])
|
b.models.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [double_spend])
|
||||||
|
|
||||||
|
|
||||||
def test_validation_with_transaction_buffer(b):
|
def test_validation_with_transaction_buffer(b):
|
||||||
@ -340,7 +343,7 @@ def test_validation_with_transaction_buffer(b):
|
|||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_migrate_abci_chain_yields_on_genesis(b):
|
def test_migrate_abci_chain_yields_on_genesis(b):
|
||||||
b.migrate_abci_chain()
|
b.migrate_abci_chain()
|
||||||
latest_chain = b.get_latest_abci_chain()
|
latest_chain = b.models.get_latest_abci_chain()
|
||||||
assert latest_chain is None
|
assert latest_chain is None
|
||||||
|
|
||||||
|
|
||||||
@ -361,10 +364,10 @@ def test_migrate_abci_chain_yields_on_genesis(b):
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_migrate_abci_chain_generates_new_chains(b, chain, block_height, expected):
|
def test_migrate_abci_chain_generates_new_chains(b, chain, block_height, expected):
|
||||||
b.store_abci_chain(*chain)
|
b.models.store_abci_chain(*chain)
|
||||||
b.store_block(Block(app_hash="", height=block_height, transactions=[])._asdict())
|
b.models.store_block(Block(app_hash="", height=block_height, transactions=[])._asdict())
|
||||||
b.migrate_abci_chain()
|
b.migrate_abci_chain()
|
||||||
latest_chain = b.get_latest_abci_chain()
|
latest_chain = b.models.get_latest_abci_chain()
|
||||||
assert latest_chain == expected
|
assert latest_chain == expected
|
||||||
|
|
||||||
|
|
||||||
@ -377,13 +380,13 @@ def test_get_spent_key_order(b, user_pk, user_sk, user2_pk, user2_sk):
|
|||||||
bob = generate_key_pair()
|
bob = generate_key_pair()
|
||||||
|
|
||||||
tx1 = Create.generate([user_pk], [([alice.public_key], 3), ([user_pk], 2)]).sign([user_sk])
|
tx1 = Create.generate([user_pk], [([alice.public_key], 3), ([user_pk], 2)]).sign([user_sk])
|
||||||
b.store_bulk_transactions([tx1])
|
b.models.store_bulk_transactions([tx1])
|
||||||
|
|
||||||
inputs = tx1.to_inputs()
|
inputs = tx1.to_inputs()
|
||||||
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], [tx1.id]).sign([user_sk])
|
tx2 = Transfer.generate([inputs[1]], [([user2_pk], 2)], [tx1.id]).sign([user_sk])
|
||||||
assert b.validate_transaction(tx2)
|
assert b.validate_transaction(tx2)
|
||||||
|
|
||||||
b.store_bulk_transactions([tx2])
|
b.models.store_bulk_transactions([tx2])
|
||||||
|
|
||||||
tx3 = Transfer.generate([inputs[1]], [([bob.public_key], 2)], [tx1.id]).sign([user_sk])
|
tx3 = Transfer.generate([inputs[1]], [([bob.public_key], 2)], [tx1.id]).sign([user_sk])
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ from hashlib import sha3_256
|
|||||||
|
|
||||||
|
|
||||||
def test_encode_decode_transaction(b):
|
def test_encode_decode_transaction(b):
|
||||||
from planetmint.tendermint_utils import encode_transaction, decode_transaction
|
from planetmint.abci.tendermint_utils import encode_transaction, decode_transaction
|
||||||
|
|
||||||
asset = {"value": "key"}
|
asset = {"value": "key"}
|
||||||
|
|
||||||
@ -25,7 +25,7 @@ def test_encode_decode_transaction(b):
|
|||||||
|
|
||||||
|
|
||||||
def test_calculate_hash_no_key(b):
|
def test_calculate_hash_no_key(b):
|
||||||
from planetmint.tendermint_utils import calculate_hash
|
from planetmint.abci.tendermint_utils import calculate_hash
|
||||||
|
|
||||||
# pass an empty list
|
# pass an empty list
|
||||||
assert calculate_hash([]) == ""
|
assert calculate_hash([]) == ""
|
||||||
@ -33,7 +33,7 @@ def test_calculate_hash_no_key(b):
|
|||||||
|
|
||||||
# TODO test for the case of an empty list of hashes, and possibly other cases.
|
# TODO test for the case of an empty list of hashes, and possibly other cases.
|
||||||
def test_merkleroot():
|
def test_merkleroot():
|
||||||
from planetmint.tendermint_utils import merkleroot
|
from planetmint.abci.tendermint_utils import merkleroot
|
||||||
|
|
||||||
hashes = [sha3_256(i.encode()).digest() for i in "abc"]
|
hashes = [sha3_256(i.encode()).digest() for i in "abc"]
|
||||||
assert merkleroot(hashes) == ("78c7c394d3158c218916b7ae0ebdea502e0f4e85c08e3b371e3dfd824d389fa3")
|
assert merkleroot(hashes) == ("78c7c394d3158c218916b7ae0ebdea502e0f4e85c08e3b371e3dfd824d389fa3")
|
||||||
@ -49,14 +49,14 @@ SAMPLE_PUBLIC_KEY = {
|
|||||||
reason="ripemd160, the core of pulbic_key64_to_address is no longer supported by hashlib (from python 3.9.13 on)"
|
reason="ripemd160, the core of pulbic_key64_to_address is no longer supported by hashlib (from python 3.9.13 on)"
|
||||||
)
|
)
|
||||||
def test_convert_base64_public_key_to_address():
|
def test_convert_base64_public_key_to_address():
|
||||||
from planetmint.tendermint_utils import public_key64_to_address
|
from planetmint.abci.tendermint_utils import public_key64_to_address
|
||||||
|
|
||||||
address = public_key64_to_address(SAMPLE_PUBLIC_KEY["pub_key"]["value"])
|
address = public_key64_to_address(SAMPLE_PUBLIC_KEY["pub_key"]["value"])
|
||||||
assert address == SAMPLE_PUBLIC_KEY["address"]
|
assert address == SAMPLE_PUBLIC_KEY["address"]
|
||||||
|
|
||||||
|
|
||||||
def test_public_key_encoding_decoding():
|
def test_public_key_encoding_decoding():
|
||||||
from planetmint.tendermint_utils import public_key_from_base64, public_key_to_base64
|
from planetmint.abci.tendermint_utils import public_key_from_base64, public_key_to_base64
|
||||||
|
|
||||||
public_key = public_key_from_base64(SAMPLE_PUBLIC_KEY["pub_key"]["value"])
|
public_key = public_key_from_base64(SAMPLE_PUBLIC_KEY["pub_key"]["value"])
|
||||||
base64_public_key = public_key_to_base64(public_key)
|
base64_public_key = public_key_to_base64(public_key)
|
||||||
|
@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from planetmint.version import __tm_supported_versions__
|
|
||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from transactions.types.assets.transfer import Transfer
|
from transactions.types.assets.transfer import Transfer
|
||||||
|
|
||||||
@ -39,13 +38,13 @@ def config(request, monkeypatch):
|
|||||||
|
|
||||||
|
|
||||||
def test_bigchain_class_default_initialization(config):
|
def test_bigchain_class_default_initialization(config):
|
||||||
from planetmint import Planetmint
|
from planetmint.application import Validator
|
||||||
from planetmint.validation import BaseValidationRules
|
from planetmint.application.basevalidationrules import BaseValidationRules
|
||||||
|
|
||||||
planet = Planetmint()
|
validator = Validator()
|
||||||
assert planet.connection.host == config["database"]["host"]
|
assert validator.models.connection.host == config["database"]["host"]
|
||||||
assert planet.connection.port == config["database"]["port"]
|
assert validator.models.connection.port == config["database"]["port"]
|
||||||
assert planet.validation == BaseValidationRules
|
assert validator.validation == BaseValidationRules
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
@ -55,7 +54,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
|||||||
[([carol.public_key], 8)],
|
[([carol.public_key], 8)],
|
||||||
).sign([carol.private_key])
|
).sign([carol.private_key])
|
||||||
assert b.validate_transaction(tx_1)
|
assert b.validate_transaction(tx_1)
|
||||||
b.store_bulk_transactions([tx_1])
|
b.models.store_bulk_transactions([tx_1])
|
||||||
|
|
||||||
tx_2 = Transfer.generate(
|
tx_2 = Transfer.generate(
|
||||||
tx_1.to_inputs(),
|
tx_1.to_inputs(),
|
||||||
@ -63,7 +62,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
|||||||
asset_ids=[tx_1.id],
|
asset_ids=[tx_1.id],
|
||||||
).sign([carol.private_key])
|
).sign([carol.private_key])
|
||||||
assert b.validate_transaction(tx_2)
|
assert b.validate_transaction(tx_2)
|
||||||
b.store_bulk_transactions([tx_2])
|
b.models.store_bulk_transactions([tx_2])
|
||||||
|
|
||||||
tx_3 = Transfer.generate(
|
tx_3 = Transfer.generate(
|
||||||
tx_2.to_inputs()[2:3],
|
tx_2.to_inputs()[2:3],
|
||||||
@ -71,7 +70,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
|||||||
asset_ids=[tx_1.id],
|
asset_ids=[tx_1.id],
|
||||||
).sign([carol.private_key])
|
).sign([carol.private_key])
|
||||||
assert b.validate_transaction(tx_3)
|
assert b.validate_transaction(tx_3)
|
||||||
b.store_bulk_transactions([tx_3])
|
b.models.store_bulk_transactions([tx_3])
|
||||||
|
|
||||||
tx_4 = Transfer.generate(
|
tx_4 = Transfer.generate(
|
||||||
tx_2.to_inputs()[1:2] + tx_3.to_inputs()[0:1],
|
tx_2.to_inputs()[1:2] + tx_3.to_inputs()[0:1],
|
||||||
@ -79,7 +78,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
|||||||
asset_ids=[tx_1.id],
|
asset_ids=[tx_1.id],
|
||||||
).sign([alice.private_key])
|
).sign([alice.private_key])
|
||||||
assert b.validate_transaction(tx_4)
|
assert b.validate_transaction(tx_4)
|
||||||
b.store_bulk_transactions([tx_4])
|
b.models.store_bulk_transactions([tx_4])
|
||||||
|
|
||||||
tx_5 = Transfer.generate(
|
tx_5 = Transfer.generate(
|
||||||
tx_2.to_inputs()[0:1],
|
tx_2.to_inputs()[0:1],
|
||||||
@ -88,8 +87,8 @@ def test_get_spent_issue_1271(b, alice, bob, carol):
|
|||||||
).sign([bob.private_key])
|
).sign([bob.private_key])
|
||||||
assert b.validate_transaction(tx_5)
|
assert b.validate_transaction(tx_5)
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_5])
|
b.models.store_bulk_transactions([tx_5])
|
||||||
assert b.get_spent(tx_2.id, 0) == tx_5.to_dict()
|
assert b.models.get_spent(tx_2.id, 0) == tx_5.to_dict()
|
||||||
assert not b.get_spent(tx_5.id, 0)
|
assert not b.models.get_spent(tx_5.id, 0)
|
||||||
assert b.get_outputs_filtered(alice.public_key)
|
assert b.models.get_outputs_filtered(alice.public_key)
|
||||||
assert b.get_outputs_filtered(alice.public_key, spent=False)
|
assert b.models.get_outputs_filtered(alice.public_key, spent=False)
|
||||||
|
@ -5,10 +5,11 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from planetmint.ipc.events import EventTypes, Event, POISON_PILL
|
||||||
|
from planetmint.ipc.exchange import Exchange
|
||||||
|
|
||||||
|
|
||||||
def test_event_handler():
|
def test_event_handler():
|
||||||
from planetmint.events import EventTypes, Event, Exchange
|
|
||||||
|
|
||||||
# create and event
|
# create and event
|
||||||
event_data = {"msg": "some data"}
|
event_data = {"msg": "some data"}
|
||||||
event = Event(EventTypes.BLOCK_VALID, event_data)
|
event = Event(EventTypes.BLOCK_VALID, event_data)
|
||||||
@ -43,8 +44,6 @@ def test_event_handler():
|
|||||||
|
|
||||||
|
|
||||||
def test_event_handler_raises_when_called_after_start():
|
def test_event_handler_raises_when_called_after_start():
|
||||||
from planetmint.events import Exchange, POISON_PILL
|
|
||||||
|
|
||||||
exchange = Exchange()
|
exchange = Exchange()
|
||||||
publisher_queue = exchange.get_publisher_queue()
|
publisher_queue = exchange.get_publisher_queue()
|
||||||
publisher_queue.put(POISON_PILL)
|
publisher_queue.put(POISON_PILL)
|
||||||
@ -55,8 +54,6 @@ def test_event_handler_raises_when_called_after_start():
|
|||||||
|
|
||||||
|
|
||||||
def test_exchange_stops_with_poison_pill():
|
def test_exchange_stops_with_poison_pill():
|
||||||
from planetmint.events import EventTypes, Event, Exchange, POISON_PILL
|
|
||||||
|
|
||||||
# create and event
|
# create and event
|
||||||
event_data = {"msg": "some data"}
|
event_data = {"msg": "some data"}
|
||||||
event = Event(EventTypes.BLOCK_VALID, event_data)
|
event = Event(EventTypes.BLOCK_VALID, event_data)
|
||||||
|
@ -25,7 +25,7 @@ def generate_create_and_transfer(keypair=None):
|
|||||||
|
|
||||||
def test_validation_worker_process_multiple_transactions(b):
|
def test_validation_worker_process_multiple_transactions(b):
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from planetmint.parallel_validation import ValidationWorker, RESET, EXIT
|
from planetmint.abci.parallel_validation import ValidationWorker, RESET, EXIT
|
||||||
|
|
||||||
keypair = generate_key_pair()
|
keypair = generate_key_pair()
|
||||||
create_tx, transfer_tx = generate_create_and_transfer(keypair)
|
create_tx, transfer_tx = generate_create_and_transfer(keypair)
|
||||||
@ -73,7 +73,7 @@ def test_parallel_validator_routes_transactions_correctly(b, monkeypatch):
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from json import dumps
|
from json import dumps
|
||||||
from planetmint.parallel_validation import ParallelValidator
|
from planetmint.abci.parallel_validation import ParallelValidator
|
||||||
|
|
||||||
# We want to make sure that the load is distributed across all workers.
|
# We want to make sure that the load is distributed across all workers.
|
||||||
# Since introspection on an object running on a different process is
|
# Since introspection on an object running on a different process is
|
||||||
@ -87,7 +87,7 @@ def test_parallel_validator_routes_transactions_correctly(b, monkeypatch):
|
|||||||
validation_called_by.put((os.getpid(), dict_transaction["id"]))
|
validation_called_by.put((os.getpid(), dict_transaction["id"]))
|
||||||
return dict_transaction
|
return dict_transaction
|
||||||
|
|
||||||
monkeypatch.setattr("planetmint.parallel_validation.ValidationWorker.validate", validate)
|
monkeypatch.setattr("planetmint.abci.parallel_validation.ValidationWorker.validate", validate)
|
||||||
|
|
||||||
# Transaction routing uses the `id` of the transaction. This test strips
|
# Transaction routing uses the `id` of the transaction. This test strips
|
||||||
# down a transaction to just its `id`. We have two workers, so even ids
|
# down a transaction to just its `id`. We have two workers, so even ids
|
||||||
|
@ -23,7 +23,7 @@ def txlist(b, user_pk, user2_pk, user_sk, user2_sk):
|
|||||||
# Create a TRANSFER transactions
|
# Create a TRANSFER transactions
|
||||||
transfer1 = Transfer.generate(create1.to_inputs(), [([user_pk], 8)], [create1.id]).sign([user2_sk])
|
transfer1 = Transfer.generate(create1.to_inputs(), [([user_pk], 8)], [create1.id]).sign([user2_sk])
|
||||||
|
|
||||||
b.store_bulk_transactions([create1, create2, transfer1])
|
b.models.store_bulk_transactions([create1, create2, transfer1])
|
||||||
|
|
||||||
return type(
|
return type(
|
||||||
"",
|
"",
|
||||||
@ -37,11 +37,11 @@ def txlist(b, user_pk, user2_pk, user_sk, user2_sk):
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_get_txlist_by_asset(b, txlist):
|
def test_get_txlist_by_asset(b, txlist):
|
||||||
res = b.get_transactions_filtered([txlist.create1.id])
|
res = b.models.get_transactions_filtered([txlist.create1.id])
|
||||||
assert sorted(set(tx.id for tx in res)) == sorted(set([txlist.transfer1.id, txlist.create1.id]))
|
assert sorted(set(tx.id for tx in res)) == sorted(set([txlist.transfer1.id, txlist.create1.id]))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_get_txlist_by_operation(b, txlist):
|
def test_get_txlist_by_operation(b, txlist):
|
||||||
res = b.get_transactions_filtered([txlist.create1.id], operation="CREATE")
|
res = b.models.get_transactions_filtered([txlist.create1.id], operation="CREATE")
|
||||||
assert set(tx.id for tx in res) == {txlist.create1.id}
|
assert set(tx.id for tx in res) == {txlist.create1.id}
|
||||||
|
@ -118,7 +118,7 @@ def test_pool_raises_empty_exception_when_timeout(mock_queue):
|
|||||||
|
|
||||||
@patch("multiprocessing.Process")
|
@patch("multiprocessing.Process")
|
||||||
def test_process_group_instantiates_and_start_processes(mock_process):
|
def test_process_group_instantiates_and_start_processes(mock_process):
|
||||||
from planetmint.utils import ProcessGroup
|
from tests.utils import ProcessGroup
|
||||||
|
|
||||||
def noop():
|
def noop():
|
||||||
pass
|
pass
|
||||||
|
@ -25,15 +25,15 @@ def fixed_seed_election(b_mock, node_key, new_validator):
|
|||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def concluded_election(b, ongoing_validator_election, ed25519_node_keys):
|
def concluded_election(b, ongoing_validator_election, ed25519_node_keys):
|
||||||
query.store_election(b.connection, ongoing_validator_election.id, 2, is_concluded=True)
|
query.store_election(b.models.connection, ongoing_validator_election.id, 2, is_concluded=True)
|
||||||
return ongoing_validator_election
|
return ongoing_validator_election
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def inconclusive_election(b, ongoing_validator_election, new_validator):
|
def inconclusive_election(b, ongoing_validator_election, new_validator):
|
||||||
validators = b.get_validators(height=1)
|
validators = b.models.get_validators(height=1)
|
||||||
validators[0]["voting_power"] = 15
|
validators[0]["voting_power"] = 15
|
||||||
validator_update = {"validators": validators, "height": 2, "election_id": "some_other_election"}
|
validator_update = {"validators": validators, "height": 2, "election_id": "some_other_election"}
|
||||||
|
|
||||||
query.store_validator_set(b.connection, validator_update)
|
query.store_validator_set(b.models.connection, validator_update)
|
||||||
return ongoing_validator_election
|
return ongoing_validator_election
|
||||||
|
@ -6,7 +6,8 @@
|
|||||||
import pytest
|
import pytest
|
||||||
import codecs
|
import codecs
|
||||||
|
|
||||||
from planetmint.tendermint_utils import public_key_to_base64
|
from planetmint.abci.rpc import MODE_LIST, MODE_COMMIT
|
||||||
|
from planetmint.abci.tendermint_utils import public_key_to_base64
|
||||||
|
|
||||||
from transactions.types.elections.validator_election import ValidatorElection
|
from transactions.types.elections.validator_election import ValidatorElection
|
||||||
from transactions.common.exceptions import AmountError
|
from transactions.common.exceptions import AmountError
|
||||||
@ -15,6 +16,7 @@ from transactions.common.exceptions import ValidationError
|
|||||||
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
||||||
from transactions.types.elections.vote import Vote
|
from transactions.types.elections.vote import Vote
|
||||||
from transactions.types.elections.validator_utils import election_id_to_public_key
|
from transactions.types.elections.validator_utils import election_id_to_public_key
|
||||||
|
|
||||||
from tests.utils import generate_block, gen_vote
|
from tests.utils import generate_block, gen_vote
|
||||||
|
|
||||||
pytestmark = [pytest.mark.execute]
|
pytestmark = [pytest.mark.execute]
|
||||||
@ -22,7 +24,7 @@ pytestmark = [pytest.mark.execute]
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_upsert_validator_valid_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
def test_upsert_validator_valid_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
||||||
b_mock.store_bulk_transactions([valid_upsert_validator_election])
|
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||||
|
|
||||||
input0 = valid_upsert_validator_election.to_inputs()[0]
|
input0 = valid_upsert_validator_election.to_inputs()[0]
|
||||||
votes = valid_upsert_validator_election.outputs[0].amount
|
votes = valid_upsert_validator_election.outputs[0].amount
|
||||||
@ -39,7 +41,7 @@ def test_upsert_validator_valid_election_vote(b_mock, valid_upsert_validator_ele
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_upsert_validator_valid_non_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
def test_upsert_validator_valid_non_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
||||||
b_mock.store_bulk_transactions([valid_upsert_validator_election])
|
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||||
|
|
||||||
input0 = valid_upsert_validator_election.to_inputs()[0]
|
input0 = valid_upsert_validator_election.to_inputs()[0]
|
||||||
votes = valid_upsert_validator_election.outputs[0].amount
|
votes = valid_upsert_validator_election.outputs[0].amount
|
||||||
@ -59,7 +61,7 @@ def test_upsert_validator_valid_non_election_vote(b_mock, valid_upsert_validator
|
|||||||
def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
|
|
||||||
b_mock.store_bulk_transactions([valid_upsert_validator_election])
|
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||||
|
|
||||||
input0 = valid_upsert_validator_election.to_inputs()[0]
|
input0 = valid_upsert_validator_election.to_inputs()[0]
|
||||||
votes = valid_upsert_validator_election.outputs[0].amount
|
votes = valid_upsert_validator_election.outputs[0].amount
|
||||||
@ -74,7 +76,7 @@ def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_
|
|||||||
|
|
||||||
assert b_mock.validate_transaction(delegate_vote)
|
assert b_mock.validate_transaction(delegate_vote)
|
||||||
|
|
||||||
b_mock.store_bulk_transactions([delegate_vote])
|
b_mock.models.store_bulk_transactions([delegate_vote])
|
||||||
election_pub_key = election_id_to_public_key(valid_upsert_validator_election.id)
|
election_pub_key = election_id_to_public_key(valid_upsert_validator_election.id)
|
||||||
|
|
||||||
alice_votes = delegate_vote.to_inputs()[0]
|
alice_votes = delegate_vote.to_inputs()[0]
|
||||||
@ -92,7 +94,7 @@ def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_
|
|||||||
|
|
||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_upsert_validator_invalid_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
def test_upsert_validator_invalid_election_vote(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
||||||
b_mock.store_bulk_transactions([valid_upsert_validator_election])
|
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||||
|
|
||||||
input0 = valid_upsert_validator_election.to_inputs()[0]
|
input0 = valid_upsert_validator_election.to_inputs()[0]
|
||||||
votes = valid_upsert_validator_election.outputs[0].amount
|
votes = valid_upsert_validator_election.outputs[0].amount
|
||||||
@ -112,7 +114,7 @@ def test_upsert_validator_invalid_election_vote(b_mock, valid_upsert_validator_e
|
|||||||
@pytest.mark.bdb
|
@pytest.mark.bdb
|
||||||
def test_valid_election_votes_received(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
def test_valid_election_votes_received(b_mock, valid_upsert_validator_election, ed25519_node_keys):
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
b_mock.store_bulk_transactions([valid_upsert_validator_election])
|
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||||
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 0
|
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 0
|
||||||
|
|
||||||
input0 = valid_upsert_validator_election.to_inputs()[0]
|
input0 = valid_upsert_validator_election.to_inputs()[0]
|
||||||
@ -126,7 +128,7 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
|
|||||||
[([alice.public_key], 4), ([key0.public_key], votes - 4)],
|
[([alice.public_key], 4), ([key0.public_key], votes - 4)],
|
||||||
election_ids=[valid_upsert_validator_election.id],
|
election_ids=[valid_upsert_validator_election.id],
|
||||||
).sign([key0.private_key])
|
).sign([key0.private_key])
|
||||||
b_mock.store_bulk_transactions([delegate_vote])
|
b_mock.models.store_bulk_transactions([delegate_vote])
|
||||||
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 0
|
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 0
|
||||||
|
|
||||||
election_public_key = election_id_to_public_key(valid_upsert_validator_election.id)
|
election_public_key = election_id_to_public_key(valid_upsert_validator_election.id)
|
||||||
@ -140,7 +142,7 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
|
|||||||
).sign([alice.private_key])
|
).sign([alice.private_key])
|
||||||
|
|
||||||
assert b_mock.validate_transaction(alice_casted_vote)
|
assert b_mock.validate_transaction(alice_casted_vote)
|
||||||
b_mock.store_bulk_transactions([alice_casted_vote])
|
b_mock.models.store_bulk_transactions([alice_casted_vote])
|
||||||
|
|
||||||
# Check if the delegated vote is count as valid vote
|
# Check if the delegated vote is count as valid vote
|
||||||
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 2
|
assert b_mock.get_commited_votes(valid_upsert_validator_election) == 2
|
||||||
@ -150,7 +152,7 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
|
|||||||
).sign([key0.private_key])
|
).sign([key0.private_key])
|
||||||
|
|
||||||
assert b_mock.validate_transaction(key0_casted_vote)
|
assert b_mock.validate_transaction(key0_casted_vote)
|
||||||
b_mock.store_bulk_transactions([key0_casted_vote])
|
b_mock.models.store_bulk_transactions([key0_casted_vote])
|
||||||
assert b_mock.get_commited_votes(valid_upsert_validator_election) == votes - 2
|
assert b_mock.get_commited_votes(valid_upsert_validator_election) == votes - 2
|
||||||
|
|
||||||
|
|
||||||
@ -164,7 +166,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
|
|||||||
assert b_mock.validate_transaction(tx_vote0)
|
assert b_mock.validate_transaction(tx_vote0)
|
||||||
|
|
||||||
# store election
|
# store election
|
||||||
b_mock.store_bulk_transactions([valid_upsert_validator_election])
|
b_mock.models.store_bulk_transactions([valid_upsert_validator_election])
|
||||||
# cannot conclude election as not votes exist
|
# cannot conclude election as not votes exist
|
||||||
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
|
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
|
||||||
|
|
||||||
@ -172,7 +174,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
|
|||||||
assert b_mock.validate_transaction(tx_vote0)
|
assert b_mock.validate_transaction(tx_vote0)
|
||||||
assert not b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote0])
|
assert not b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote0])
|
||||||
|
|
||||||
b_mock.store_bulk_transactions([tx_vote0])
|
b_mock.models.store_bulk_transactions([tx_vote0])
|
||||||
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
|
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
|
||||||
|
|
||||||
# Node 1: cast vote
|
# Node 1: cast vote
|
||||||
@ -190,7 +192,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
|
|||||||
# 2/3 is achieved in the same block so the election can be.has_concludedd
|
# 2/3 is achieved in the same block so the election can be.has_concludedd
|
||||||
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote1, tx_vote2])
|
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote1, tx_vote2])
|
||||||
|
|
||||||
b_mock.store_bulk_transactions([tx_vote1])
|
b_mock.models.store_bulk_transactions([tx_vote1])
|
||||||
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
|
assert not b_mock.has_election_concluded(valid_upsert_validator_election)
|
||||||
|
|
||||||
assert b_mock.validate_transaction(tx_vote2)
|
assert b_mock.validate_transaction(tx_vote2)
|
||||||
@ -200,7 +202,7 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
|
|||||||
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2])
|
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2])
|
||||||
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2, tx_vote3])
|
assert b_mock.has_election_concluded(valid_upsert_validator_election, [tx_vote2, tx_vote3])
|
||||||
|
|
||||||
b_mock.store_bulk_transactions([tx_vote2])
|
b_mock.models.store_bulk_transactions([tx_vote2])
|
||||||
|
|
||||||
# Once the blockchain records >2/3 of the votes the election is assumed to be.has_concludedd
|
# Once the blockchain records >2/3 of the votes the election is assumed to be.has_concludedd
|
||||||
# so any invocation of `.has_concluded` for that election should return False
|
# so any invocation of `.has_concluded` for that election should return False
|
||||||
@ -213,18 +215,18 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.abci
|
@pytest.mark.abci
|
||||||
def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys):
|
def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys, test_abci_rpc):
|
||||||
if b.get_latest_block()["height"] == 0:
|
if b.models.get_latest_block()["height"] == 0:
|
||||||
generate_block(b)
|
generate_block(b, test_abci_rpc)
|
||||||
|
|
||||||
(node_pub, _) = list(node_keys.items())[0]
|
(node_pub, _) = list(node_keys.items())[0]
|
||||||
|
|
||||||
validators = [{"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10}]
|
validators = [{"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10}]
|
||||||
|
|
||||||
latest_block = b.get_latest_block()
|
latest_block = b.models.get_latest_block()
|
||||||
# reset the validator set
|
# reset the validator set
|
||||||
b.store_validator_set(latest_block["height"], validators)
|
b.models.store_validator_set(latest_block["height"], validators)
|
||||||
generate_block(b)
|
generate_block(b, test_abci_rpc)
|
||||||
|
|
||||||
power = 1
|
power = 1
|
||||||
public_key = "9B3119650DF82B9A5D8A12E38953EA47475C09F0C48A4E6A0ECE182944B24403"
|
public_key = "9B3119650DF82B9A5D8A12E38953EA47475C09F0C48A4E6A0ECE182944B24403"
|
||||||
@ -243,22 +245,26 @@ def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys):
|
|||||||
election = ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign(
|
election = ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign(
|
||||||
[node_key.private_key]
|
[node_key.private_key]
|
||||||
)
|
)
|
||||||
code, message = b.write_transaction(election, BROADCAST_TX_COMMIT)
|
code, message = test_abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, election, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
assert code == 202
|
assert code == 202
|
||||||
assert b.get_transaction(election.id)
|
assert b.models.get_transaction(election.id)
|
||||||
|
|
||||||
tx_vote = gen_vote(election, 0, ed25519_node_keys)
|
tx_vote = gen_vote(election, 0, ed25519_node_keys)
|
||||||
assert b.validate_transaction(tx_vote)
|
assert b.validate_transaction(tx_vote)
|
||||||
code, message = b.write_transaction(tx_vote, BROADCAST_TX_COMMIT)
|
code, message = test_abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx_vote, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
assert code == 202
|
assert code == 202
|
||||||
|
|
||||||
resp = b.get_validators()
|
resp = b.models.get_validators()
|
||||||
validator_pub_keys = []
|
validator_pub_keys = []
|
||||||
for v in resp:
|
for v in resp:
|
||||||
validator_pub_keys.append(v["public_key"]["value"])
|
validator_pub_keys.append(v["public_key"]["value"])
|
||||||
|
|
||||||
assert public_key64 in validator_pub_keys
|
assert public_key64 in validator_pub_keys
|
||||||
new_validator_set = b.get_validators()
|
new_validator_set = b.models.get_validators()
|
||||||
validator_pub_keys = []
|
validator_pub_keys = []
|
||||||
for v in new_validator_set:
|
for v in new_validator_set:
|
||||||
validator_pub_keys.append(v["public_key"]["value"])
|
validator_pub_keys.append(v["public_key"]["value"])
|
||||||
@ -285,7 +291,7 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
|
|||||||
voters = b.get_recipients_list()
|
voters = b.get_recipients_list()
|
||||||
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
|
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
|
||||||
# store election
|
# store election
|
||||||
b.store_bulk_transactions([election])
|
b.models.store_bulk_transactions([election])
|
||||||
|
|
||||||
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
|
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
|
||||||
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
|
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
|
||||||
@ -317,13 +323,13 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
|
|||||||
voters = b.get_recipients_list()
|
voters = b.get_recipients_list()
|
||||||
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
|
election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key])
|
||||||
# store election
|
# store election
|
||||||
b.store_bulk_transactions([election])
|
b.models.store_bulk_transactions([election])
|
||||||
|
|
||||||
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
|
tx_vote0 = gen_vote(election, 0, ed25519_node_keys)
|
||||||
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
|
tx_vote1 = gen_vote(election, 1, ed25519_node_keys)
|
||||||
tx_vote2 = gen_vote(election, 2, ed25519_node_keys)
|
tx_vote2 = gen_vote(election, 2, ed25519_node_keys)
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_vote0, tx_vote1])
|
b.models.store_bulk_transactions([tx_vote0, tx_vote1])
|
||||||
|
|
||||||
update = b.process_block(9, [tx_vote2])
|
update = b.process_block(9, [tx_vote2])
|
||||||
assert len(update) == 1
|
assert len(update) == 1
|
||||||
@ -331,7 +337,7 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
|
|||||||
assert update_public_key == public_key64
|
assert update_public_key == public_key64
|
||||||
|
|
||||||
# assert that the public key is not a part of the current validator set
|
# assert that the public key is not a part of the current validator set
|
||||||
for v in b.get_validators(10):
|
for v in b.models.get_validators(10):
|
||||||
assert not v["public_key"]["value"] == public_key64
|
assert not v["public_key"]["value"] == public_key64
|
||||||
|
|
||||||
|
|
||||||
@ -344,4 +350,4 @@ def reset_validator_set(b, node_keys, height):
|
|||||||
validators = []
|
validators = []
|
||||||
for node_pub, _ in node_keys.items():
|
for node_pub, _ in node_keys.items():
|
||||||
validators.append({"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10})
|
validators.append({"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10})
|
||||||
b.store_validator_set(height, validators)
|
b.models.store_validator_set(height, validators)
|
||||||
|
@ -7,7 +7,7 @@ import pytest
|
|||||||
|
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
from planetmint.tendermint_utils import public_key_to_base64
|
from planetmint.abci.tendermint_utils import public_key_to_base64
|
||||||
from transactions.types.elections.validator_election import ValidatorElection
|
from transactions.types.elections.validator_election import ValidatorElection
|
||||||
from transactions.common.exceptions import (
|
from transactions.common.exceptions import (
|
||||||
DuplicateTransaction,
|
DuplicateTransaction,
|
||||||
@ -17,6 +17,7 @@ from transactions.common.exceptions import (
|
|||||||
InvalidPowerChange,
|
InvalidPowerChange,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
pytestmark = pytest.mark.bdb
|
pytestmark = pytest.mark.bdb
|
||||||
|
|
||||||
|
|
||||||
@ -82,7 +83,7 @@ def test_upsert_validator_invalid_election(b_mock, new_validator, node_key, fixe
|
|||||||
with pytest.raises(DuplicateTransaction):
|
with pytest.raises(DuplicateTransaction):
|
||||||
b_mock.validate_election(fixed_seed_election, [duplicate_election])
|
b_mock.validate_election(fixed_seed_election, [duplicate_election])
|
||||||
|
|
||||||
b_mock.store_bulk_transactions([fixed_seed_election])
|
b_mock.models.store_bulk_transactions([fixed_seed_election])
|
||||||
|
|
||||||
with pytest.raises(DuplicateTransaction):
|
with pytest.raises(DuplicateTransaction):
|
||||||
b_mock.validate_election(duplicate_election)
|
b_mock.validate_election(duplicate_election)
|
||||||
@ -166,8 +167,8 @@ def test_get_status_inconclusive(b, inconclusive_election, new_validator):
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
b.get_validators = custom_mock_get_validators
|
b.models.get_validators = custom_mock_get_validators
|
||||||
b.get_latest_block = set_block_height_to_3
|
b.models.get_latest_block = set_block_height_to_3
|
||||||
status = ValidatorElection.INCONCLUSIVE
|
status = ValidatorElection.INCONCLUSIVE
|
||||||
resp = b.get_election_status(inconclusive_election)
|
resp = b.get_election_status(inconclusive_election)
|
||||||
assert resp == status
|
assert resp == status
|
||||||
|
110
tests/utils.py
110
tests/utils.py
@ -2,12 +2,16 @@
|
|||||||
# Planetmint and IPDB software contributors.
|
# Planetmint and IPDB software contributors.
|
||||||
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
|
||||||
# Code is Apache-2.0 and docs are CC-BY-4.0
|
# Code is Apache-2.0 and docs are CC-BY-4.0
|
||||||
|
import multiprocessing
|
||||||
|
from hashlib import sha3_256
|
||||||
|
|
||||||
import base58
|
import base58
|
||||||
import base64
|
import base64
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from functools import singledispatch
|
from functools import singledispatch
|
||||||
|
|
||||||
|
from planetmint import backend
|
||||||
from planetmint.backend.localmongodb.connection import LocalMongoDBConnection
|
from planetmint.backend.localmongodb.connection import LocalMongoDBConnection
|
||||||
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
||||||
from planetmint.backend.schema import TABLES
|
from planetmint.backend.schema import TABLES
|
||||||
@ -16,7 +20,8 @@ from transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT
|
|||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from transactions.types.elections.vote import Vote
|
from transactions.types.elections.vote import Vote
|
||||||
from transactions.types.elections.validator_utils import election_id_to_public_key
|
from transactions.types.elections.validator_utils import election_id_to_public_key
|
||||||
from planetmint.tendermint_utils import key_to_base64
|
from planetmint.abci.tendermint_utils import key_to_base64, merkleroot
|
||||||
|
from planetmint.abci.rpc import MODE_COMMIT, MODE_LIST
|
||||||
|
|
||||||
|
|
||||||
@singledispatch
|
@singledispatch
|
||||||
@ -36,7 +41,7 @@ def flush_tarantool_db(connection, dbname):
|
|||||||
connection.connect().call("init")
|
connection.connect().call("init")
|
||||||
|
|
||||||
|
|
||||||
def generate_block(planet):
|
def generate_block(planet, test_abci_rpc):
|
||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
|
|
||||||
alice = generate_key_pair()
|
alice = generate_key_pair()
|
||||||
@ -44,7 +49,9 @@ def generate_block(planet):
|
|||||||
[alice.private_key]
|
[alice.private_key]
|
||||||
)
|
)
|
||||||
|
|
||||||
code, message = planet.write_transaction(tx, BROADCAST_TX_COMMIT)
|
code, message = test_abci_rpc.write_transaction(
|
||||||
|
MODE_LIST, test_abci_rpc.tendermint_rpc_endpoint, MODE_COMMIT, tx, BROADCAST_TX_COMMIT
|
||||||
|
)
|
||||||
assert code == 202
|
assert code == 202
|
||||||
|
|
||||||
|
|
||||||
@ -118,3 +125,100 @@ def generate_election(b, cls, public_key, private_key, asset_data, voter_keys):
|
|||||||
v.sign([key])
|
v.sign([key])
|
||||||
|
|
||||||
return election, votes
|
return election, votes
|
||||||
|
|
||||||
|
|
||||||
|
def delete_unspent_outputs(connection, *unspent_outputs):
|
||||||
|
"""Deletes the given ``unspent_outputs`` (utxos).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
|
||||||
|
length tuple or list of unspent outputs.
|
||||||
|
"""
|
||||||
|
if unspent_outputs:
|
||||||
|
return backend.query.delete_unspent_outputs(connection, *unspent_outputs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_utxoset_merkle_root(connection):
|
||||||
|
"""Returns the merkle root of the utxoset. This implies that
|
||||||
|
the utxoset is first put into a merkle tree.
|
||||||
|
|
||||||
|
For now, the merkle tree and its root will be computed each
|
||||||
|
time. This obviously is not efficient and a better approach
|
||||||
|
that limits the repetition of the same computation when
|
||||||
|
unnecesary should be sought. For instance, future optimizations
|
||||||
|
could simply re-compute the branches of the tree that were
|
||||||
|
affected by a change.
|
||||||
|
|
||||||
|
The transaction hash (id) and output index should be sufficient
|
||||||
|
to uniquely identify a utxo, and consequently only that
|
||||||
|
information from a utxo record is needed to compute the merkle
|
||||||
|
root. Hence, each node of the merkle tree should contain the
|
||||||
|
tuple (txid, output_index).
|
||||||
|
|
||||||
|
.. important:: The leaves of the tree will need to be sorted in
|
||||||
|
some kind of lexicographical order.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Merkle root in hexadecimal form.
|
||||||
|
"""
|
||||||
|
utxoset = backend.query.get_unspent_outputs(connection)
|
||||||
|
# TODO Once ready, use the already pre-computed utxo_hash field.
|
||||||
|
# See common/transactions.py for details.
|
||||||
|
hashes = [
|
||||||
|
sha3_256("{}{}".format(utxo["transaction_id"], utxo["output_index"]).encode()).digest() for utxo in utxoset
|
||||||
|
]
|
||||||
|
# TODO Notice the sorted call!
|
||||||
|
return merkleroot(sorted(hashes))
|
||||||
|
|
||||||
|
|
||||||
|
def store_unspent_outputs(connection, *unspent_outputs):
|
||||||
|
"""Store the given ``unspent_outputs`` (utxos).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*unspent_outputs (:obj:`tuple` of :obj:`dict`): Variable
|
||||||
|
length tuple or list of unspent outputs.
|
||||||
|
"""
|
||||||
|
if unspent_outputs:
|
||||||
|
return backend.query.store_unspent_outputs(connection, *unspent_outputs)
|
||||||
|
|
||||||
|
|
||||||
|
def update_utxoset(connection, transaction):
|
||||||
|
"""
|
||||||
|
Update the UTXO set given ``transaction``. That is, remove
|
||||||
|
the outputs that the given ``transaction`` spends, and add the
|
||||||
|
outputs that the given ``transaction`` creates.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
transaction (:obj:`~planetmint.models.Transaction`): A new
|
||||||
|
transaction incoming into the system for which the UTXOF
|
||||||
|
set needs to be updated.
|
||||||
|
"""
|
||||||
|
spent_outputs = [spent_output for spent_output in transaction.spent_outputs]
|
||||||
|
if spent_outputs:
|
||||||
|
delete_unspent_outputs(connection, *spent_outputs)
|
||||||
|
store_unspent_outputs(connection, *[utxo._asdict() for utxo in transaction.unspent_outputs])
|
||||||
|
|
||||||
|
|
||||||
|
class ProcessGroup(object):
|
||||||
|
def __init__(self, concurrency=None, group=None, target=None, name=None, args=None, kwargs=None, daemon=None):
|
||||||
|
self.concurrency = concurrency or multiprocessing.cpu_count()
|
||||||
|
self.group = group
|
||||||
|
self.target = target
|
||||||
|
self.name = name
|
||||||
|
self.args = args or ()
|
||||||
|
self.kwargs = kwargs or {}
|
||||||
|
self.daemon = daemon
|
||||||
|
self.processes = []
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
for i in range(self.concurrency):
|
||||||
|
proc = multiprocessing.Process(
|
||||||
|
group=self.group,
|
||||||
|
target=self.target,
|
||||||
|
name=self.name,
|
||||||
|
args=self.args,
|
||||||
|
kwargs=self.kwargs,
|
||||||
|
daemon=self.daemon,
|
||||||
|
)
|
||||||
|
proc.start()
|
||||||
|
self.processes.append(proc)
|
||||||
|
@ -9,10 +9,10 @@ import pytest
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def app(request):
|
def app(request):
|
||||||
from planetmint.web import server
|
from planetmint.web import server
|
||||||
from planetmint.lib import Planetmint
|
from planetmint.application import Validator
|
||||||
|
|
||||||
if request.config.getoption("--database-backend") == "localmongodb":
|
if request.config.getoption("--database-backend") == "localmongodb":
|
||||||
app = server.create_app(debug=True, planetmint_factory=Planetmint)
|
app = server.create_app(debug=True, planetmint_factory=Validator)
|
||||||
else:
|
else:
|
||||||
app = server.create_app(debug=True)
|
app = server.create_app(debug=True)
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ import pytest
|
|||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from ipld import marshal, multihash
|
from ipld import marshal, multihash
|
||||||
|
|
||||||
|
|
||||||
ASSETS_ENDPOINT = "/api/v1/assets/"
|
ASSETS_ENDPOINT = "/api/v1/assets/"
|
||||||
|
|
||||||
|
|
||||||
@ -17,7 +18,7 @@ def test_get_assets_tendermint(client, b, alice):
|
|||||||
assets = [{"data": multihash(marshal({"msg": "abc"}))}]
|
assets = [{"data": multihash(marshal({"msg": "abc"}))}]
|
||||||
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
|
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
res = client.get(ASSETS_ENDPOINT + assets[0]["data"])
|
res = client.get(ASSETS_ENDPOINT + assets[0]["data"])
|
||||||
assert res.status_code == 200
|
assert res.status_code == 200
|
||||||
@ -32,7 +33,7 @@ def test_get_assets_tendermint_limit(client, b, alice, bob):
|
|||||||
tx_1 = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
|
tx_1 = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=assets).sign([alice.private_key])
|
||||||
tx_2 = Create.generate([bob.public_key], [([bob.public_key], 1)], assets=assets).sign([bob.private_key])
|
tx_2 = Create.generate([bob.public_key], [([bob.public_key], 1)], assets=assets).sign([bob.private_key])
|
||||||
|
|
||||||
b.store_bulk_transactions([tx_1, tx_2])
|
b.models.store_bulk_transactions([tx_1, tx_2])
|
||||||
|
|
||||||
res = client.get(ASSETS_ENDPOINT + assets[0]["data"] + "?limit=1")
|
res = client.get(ASSETS_ENDPOINT + assets[0]["data"] + "?limit=1")
|
||||||
assert res.status_code == 200
|
assert res.status_code == 200
|
||||||
|
@ -6,9 +6,10 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from planetmint.lib import Block
|
from planetmint.abci.block import Block
|
||||||
from ipld import marshal, multihash
|
from ipld import marshal, multihash
|
||||||
|
|
||||||
|
|
||||||
BLOCKS_ENDPOINT = "/api/v1/blocks/"
|
BLOCKS_ENDPOINT = "/api/v1/blocks/"
|
||||||
|
|
||||||
|
|
||||||
@ -22,15 +23,15 @@ def test_get_block_endpoint(b, client, alice):
|
|||||||
)
|
)
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
|
|
||||||
# with store_bulk_transactions we use `insert_many` where PyMongo
|
# with b.models.store_bulk_transactions we use `insert_many` where PyMongo
|
||||||
# automatically adds an `_id` field to the tx, therefore we need the
|
# automatically adds an `_id` field to the tx, therefore we need the
|
||||||
# deepcopy, for more info see:
|
# deepcopy, for more info see:
|
||||||
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
|
# https://api.mongodb.com/python/current/faq.html#writes-and-ids
|
||||||
tx_dict = copy.deepcopy(tx.to_dict())
|
tx_dict = copy.deepcopy(tx.to_dict())
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
block = Block(app_hash="random_utxo", height=31, transactions=[tx.id])
|
block = Block(app_hash="random_utxo", height=31, transactions=[tx.id])
|
||||||
b.store_block(block._asdict())
|
b.models.store_block(block._asdict())
|
||||||
|
|
||||||
res = client.get(BLOCKS_ENDPOINT + str(block.height))
|
res = client.get(BLOCKS_ENDPOINT + str(block.height))
|
||||||
expected_response = {"app_hash": "random_utxo", "height": block.height, "transaction_ids": [tx.id]}
|
expected_response = {"app_hash": "random_utxo", "height": block.height, "transaction_ids": [tx.id]}
|
||||||
@ -54,10 +55,10 @@ def test_get_block_containing_transaction(b, client, alice):
|
|||||||
[alice.public_key], [([alice.public_key], 1)], assets=[{"data": multihash(marshal({"cycle": "hero"}))}]
|
[alice.public_key], [([alice.public_key], 1)], assets=[{"data": multihash(marshal({"cycle": "hero"}))}]
|
||||||
)
|
)
|
||||||
tx = tx.sign([alice.private_key])
|
tx = tx.sign([alice.private_key])
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
block = Block(app_hash="random_utxo", height=13, transactions=[tx.id])
|
block = Block(app_hash="random_utxo", height=13, transactions=[tx.id])
|
||||||
b.store_block(block._asdict())
|
b.models.store_block(block._asdict())
|
||||||
res = client.get("{}?transaction_id={}".format(BLOCKS_ENDPOINT, tx.id))
|
res = client.get("{}?transaction_id={}".format(BLOCKS_ENDPOINT, tx.id))
|
||||||
expected_height = block.height
|
expected_height = block.height
|
||||||
assert res.json["height"] == expected_height
|
assert res.json["height"] == expected_height
|
||||||
|
@ -8,6 +8,7 @@ import pytest
|
|||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from ipld import marshal, multihash
|
from ipld import marshal, multihash
|
||||||
|
|
||||||
|
|
||||||
METADATA_ENDPOINT = "/api/v1/metadata/"
|
METADATA_ENDPOINT = "/api/v1/metadata/"
|
||||||
|
|
||||||
|
|
||||||
@ -26,7 +27,7 @@ def test_get_metadata_tendermint(client, b, alice):
|
|||||||
[alice.private_key]
|
[alice.private_key]
|
||||||
)
|
)
|
||||||
|
|
||||||
b.store_bulk_transactions([tx])
|
b.models.store_bulk_transactions([tx])
|
||||||
|
|
||||||
# test that metadata is returned
|
# test that metadata is returned
|
||||||
res = client.get(METADATA_ENDPOINT + metadata)
|
res = client.get(METADATA_ENDPOINT + metadata)
|
||||||
@ -43,13 +44,13 @@ def test_get_metadata_limit_tendermint(client, b, alice):
|
|||||||
tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets1).sign(
|
tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets1).sign(
|
||||||
[alice.private_key]
|
[alice.private_key]
|
||||||
)
|
)
|
||||||
b.store_bulk_transactions([tx1])
|
b.models.store_bulk_transactions([tx1])
|
||||||
|
|
||||||
assets2 = [{"data": multihash(marshal({"msg": "abc 2"}))}]
|
assets2 = [{"data": multihash(marshal({"msg": "abc 2"}))}]
|
||||||
tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets2).sign(
|
tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta, assets=assets2).sign(
|
||||||
[alice.private_key]
|
[alice.private_key]
|
||||||
)
|
)
|
||||||
b.store_bulk_transactions([tx2])
|
b.models.store_bulk_transactions([tx2])
|
||||||
|
|
||||||
# test that both assets are returned without limit
|
# test that both assets are returned without limit
|
||||||
res = client.get(METADATA_ENDPOINT + meta)
|
res = client.get(METADATA_ENDPOINT + meta)
|
||||||
|
@ -9,7 +9,6 @@ from transactions.types.assets.create import Create
|
|||||||
from transactions.types.assets.transfer import Transfer
|
from transactions.types.assets.transfer import Transfer
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
|
||||||
OUTPUTS_ENDPOINT = "/api/v1/outputs/"
|
OUTPUTS_ENDPOINT = "/api/v1/outputs/"
|
||||||
|
|
||||||
|
|
||||||
@ -19,7 +18,7 @@ def test_get_outputs_endpoint(client, user_pk):
|
|||||||
m = MagicMock()
|
m = MagicMock()
|
||||||
m.txid = "a"
|
m.txid = "a"
|
||||||
m.output = 0
|
m.output = 0
|
||||||
with patch("planetmint.Planetmint.get_outputs_filtered") as gof:
|
with patch("planetmint.model.models.Models.get_outputs_filtered") as gof:
|
||||||
gof.return_value = [m, m]
|
gof.return_value = [m, m]
|
||||||
res = client.get(OUTPUTS_ENDPOINT + "?public_key={}".format(user_pk))
|
res = client.get(OUTPUTS_ENDPOINT + "?public_key={}".format(user_pk))
|
||||||
assert res.json == [{"transaction_id": "a", "output_index": 0}, {"transaction_id": "a", "output_index": 0}]
|
assert res.json == [{"transaction_id": "a", "output_index": 0}, {"transaction_id": "a", "output_index": 0}]
|
||||||
@ -31,7 +30,7 @@ def test_get_outputs_endpoint_unspent(client, user_pk):
|
|||||||
m = MagicMock()
|
m = MagicMock()
|
||||||
m.txid = "a"
|
m.txid = "a"
|
||||||
m.output = 0
|
m.output = 0
|
||||||
with patch("planetmint.Planetmint.get_outputs_filtered") as gof:
|
with patch("planetmint.model.models.Models.get_outputs_filtered") as gof:
|
||||||
gof.return_value = [m]
|
gof.return_value = [m]
|
||||||
params = "?spent=False&public_key={}".format(user_pk)
|
params = "?spent=False&public_key={}".format(user_pk)
|
||||||
res = client.get(OUTPUTS_ENDPOINT + params)
|
res = client.get(OUTPUTS_ENDPOINT + params)
|
||||||
@ -46,7 +45,7 @@ def test_get_outputs_endpoint_spent(client, user_pk):
|
|||||||
m = MagicMock()
|
m = MagicMock()
|
||||||
m.txid = "a"
|
m.txid = "a"
|
||||||
m.output = 0
|
m.output = 0
|
||||||
with patch("planetmint.Planetmint.get_outputs_filtered") as gof:
|
with patch("planetmint.model.models.Models.get_outputs_filtered") as gof:
|
||||||
gof.return_value = [m]
|
gof.return_value = [m]
|
||||||
params = "?spent=true&public_key={}".format(user_pk)
|
params = "?spent=true&public_key={}".format(user_pk)
|
||||||
res = client.get(OUTPUTS_ENDPOINT + params)
|
res = client.get(OUTPUTS_ENDPOINT + params)
|
||||||
@ -92,7 +91,7 @@ def test_get_divisble_transactions_returns_500_phase_one(b, client):
|
|||||||
TX_ENDPOINT = "/api/v1/transactions"
|
TX_ENDPOINT = "/api/v1/transactions"
|
||||||
|
|
||||||
def mine(tx_list):
|
def mine(tx_list):
|
||||||
b.store_bulk_transactions(tx_list)
|
b.models.store_bulk_transactions(tx_list)
|
||||||
|
|
||||||
alice_priv, alice_pub = crypto.generate_key_pair()
|
alice_priv, alice_pub = crypto.generate_key_pair()
|
||||||
# bob_priv, bob_pub = crypto.generate_key_pair()
|
# bob_priv, bob_pub = crypto.generate_key_pair()
|
||||||
@ -119,7 +118,7 @@ def test_get_divisble_transactions_returns_500(b, client):
|
|||||||
TX_ENDPOINT = "/api/v1/transactions"
|
TX_ENDPOINT = "/api/v1/transactions"
|
||||||
|
|
||||||
def mine(tx_list):
|
def mine(tx_list):
|
||||||
b.store_bulk_transactions(tx_list)
|
b.models.store_bulk_transactions(tx_list)
|
||||||
|
|
||||||
alice_priv, alice_pub = crypto.generate_key_pair()
|
alice_priv, alice_pub = crypto.generate_key_pair()
|
||||||
bob_priv, bob_pub = crypto.generate_key_pair()
|
bob_priv, bob_pub = crypto.generate_key_pair()
|
||||||
|
@ -30,7 +30,6 @@ from transactions.common.transaction import (
|
|||||||
from transactions.common.utils import _fulfillment_from_details
|
from transactions.common.utils import _fulfillment_from_details
|
||||||
from transactions.common.crypto import generate_key_pair
|
from transactions.common.crypto import generate_key_pair
|
||||||
|
|
||||||
|
|
||||||
TX_ENDPOINT = "/api/v1/transactions/"
|
TX_ENDPOINT = "/api/v1/transactions/"
|
||||||
|
|
||||||
|
|
||||||
@ -107,7 +106,7 @@ def test_post_create_transaction_endpoint(b, client):
|
|||||||
def test_post_create_transaction_with_language(b, client, nested, language, expected_status_code):
|
def test_post_create_transaction_with_language(b, client, nested, language, expected_status_code):
|
||||||
from planetmint.backend.localmongodb.connection import LocalMongoDBConnection
|
from planetmint.backend.localmongodb.connection import LocalMongoDBConnection
|
||||||
|
|
||||||
if isinstance(b.connection, LocalMongoDBConnection):
|
if isinstance(b.models.connection, LocalMongoDBConnection):
|
||||||
user_priv, user_pub = crypto.generate_key_pair()
|
user_priv, user_pub = crypto.generate_key_pair()
|
||||||
lang_obj = {"language": language}
|
lang_obj = {"language": language}
|
||||||
|
|
||||||
@ -148,7 +147,7 @@ def test_post_create_transaction_with_invalid_key(b, client, field, value, err_k
|
|||||||
|
|
||||||
user_priv, user_pub = crypto.generate_key_pair()
|
user_priv, user_pub = crypto.generate_key_pair()
|
||||||
|
|
||||||
if isinstance(b.connection, LocalMongoDBConnection):
|
if isinstance(b.models.connection, LocalMongoDBConnection):
|
||||||
if field == "asset":
|
if field == "asset":
|
||||||
tx = Create.generate([user_pub], [([user_pub], 1)], assets=value)
|
tx = Create.generate([user_pub], [([user_pub], 1)], assets=value)
|
||||||
elif field == "metadata":
|
elif field == "metadata":
|
||||||
@ -405,7 +404,7 @@ def test_transactions_get_list_good(client):
|
|||||||
|
|
||||||
asset_ids = ["1" * 64]
|
asset_ids = ["1" * 64]
|
||||||
|
|
||||||
with patch("planetmint.Planetmint.get_transactions_filtered", get_txs_patched):
|
with patch("planetmint.model.models.Models.get_transactions_filtered", get_txs_patched):
|
||||||
url = TX_ENDPOINT + "?asset_ids=" + ",".join(asset_ids)
|
url = TX_ENDPOINT + "?asset_ids=" + ",".join(asset_ids)
|
||||||
assert client.get(url).json == [
|
assert client.get(url).json == [
|
||||||
["asset_ids", asset_ids],
|
["asset_ids", asset_ids],
|
||||||
@ -431,7 +430,7 @@ def test_transactions_get_list_bad(client):
|
|||||||
assert False
|
assert False
|
||||||
|
|
||||||
with patch(
|
with patch(
|
||||||
"planetmint.Planetmint.get_transactions_filtered",
|
"planetmint.model.models.Models.get_transactions_filtered",
|
||||||
lambda *_, **__: should_not_be_called(),
|
lambda *_, **__: should_not_be_called(),
|
||||||
):
|
):
|
||||||
# Test asset id validated
|
# Test asset id validated
|
||||||
@ -487,7 +486,7 @@ def test_post_transaction_compose_valid_wo_abci(b, _bdb):
|
|||||||
assets=[{"data": "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK97"}],
|
assets=[{"data": "QmW5GVMW98D3mktSDfWHS8nX2UiCd8gP1uCiujnFX4yK97"}],
|
||||||
).sign([alice.private_key])
|
).sign([alice.private_key])
|
||||||
validated = b.validate_transaction(tx)
|
validated = b.validate_transaction(tx)
|
||||||
b.store_bulk_transactions([validated])
|
b.models.store_bulk_transactions([validated])
|
||||||
|
|
||||||
tx_obj = tx
|
tx_obj = tx
|
||||||
tx = tx.to_dict()
|
tx = tx.to_dict()
|
||||||
@ -500,7 +499,7 @@ def test_post_transaction_compose_valid_wo_abci(b, _bdb):
|
|||||||
compose_dict = signed_compose_tx.to_dict()
|
compose_dict = signed_compose_tx.to_dict()
|
||||||
compose_obj = Transaction.from_dict(compose_dict)
|
compose_obj = Transaction.from_dict(compose_dict)
|
||||||
validated_compose = b.validate_transaction(compose_obj)
|
validated_compose = b.validate_transaction(compose_obj)
|
||||||
b.store_bulk_transactions([validated_compose])
|
b.models.store_bulk_transactions([validated_compose])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.abci
|
@pytest.mark.abci
|
||||||
|
@ -14,7 +14,7 @@ def test_get_validators_endpoint(b, client):
|
|||||||
"voting_power": 10,
|
"voting_power": 10,
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
b.store_validator_set(23, validator_set)
|
b.models.store_validator_set(23, validator_set)
|
||||||
|
|
||||||
res = client.get(VALIDATORS_ENDPOINT)
|
res = client.get(VALIDATORS_ENDPOINT)
|
||||||
assert is_validator(res.json[0])
|
assert is_validator(res.json[0])
|
||||||
|
@ -13,7 +13,7 @@ import pytest
|
|||||||
from transactions.types.assets.create import Create
|
from transactions.types.assets.create import Create
|
||||||
from transactions.types.assets.transfer import Transfer
|
from transactions.types.assets.transfer import Transfer
|
||||||
from transactions.common import crypto
|
from transactions.common import crypto
|
||||||
from planetmint import events
|
from planetmint.ipc import events
|
||||||
from planetmint.web.websocket_server import init_app, EVENTS_ENDPOINT, EVENTS_ENDPOINT_BLOCKS
|
from planetmint.web.websocket_server import init_app, EVENTS_ENDPOINT, EVENTS_ENDPOINT_BLOCKS
|
||||||
from ipld import multihash, marshal
|
from ipld import multihash, marshal
|
||||||
|
|
||||||
@ -195,7 +195,7 @@ async def test_websocket_transaction_event(aiohttp_client, event_loop):
|
|||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_websocket_string_event(aiohttp_client, event_loop):
|
async def test_websocket_string_event(aiohttp_client, event_loop):
|
||||||
from planetmint.events import POISON_PILL
|
from planetmint.ipc.events import POISON_PILL
|
||||||
from planetmint.web.websocket_server import init_app, EVENTS_ENDPOINT
|
from planetmint.web.websocket_server import init_app, EVENTS_ENDPOINT
|
||||||
|
|
||||||
blk_source = asyncio.Queue(loop=event_loop)
|
blk_source = asyncio.Queue(loop=event_loop)
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
# This is a TOML config file.
|
|
||||||
# For more information, see https://github.com/toml-lang/toml
|
|
||||||
|
|
||||||
proxy_app = "tcp://planetmint:26658"
|
|
||||||
moniker = "anonymous"
|
|
||||||
fast_sync = true
|
|
||||||
db_backend = "leveldb"
|
|
||||||
log_level = "state:debug,*:error"
|
|
||||||
|
|
||||||
[consensus]
|
|
||||||
create_empty_blocks = false
|
|
||||||
|
|
||||||
[rpc]
|
|
||||||
laddr = "tcp://0.0.0.0:26657"
|
|
||||||
|
|
||||||
[p2p]
|
|
||||||
laddr = "tcp://0.0.0.0:26656"
|
|
||||||
seeds = ""
|
|
Loading…
x
Reference in New Issue
Block a user