Merge branch 'master' into activating-tests

This commit is contained in:
codegeschrei 2018-08-14 16:59:26 +02:00
commit 0901717cd7
18 changed files with 258 additions and 425 deletions

View File

@ -21,7 +21,7 @@ RUN apk --update add sudo bash \
RUN apk --update add mongodb monit
# Install Tendermint
RUN wget https://github.com/tendermint/tendermint/releases/download/v${TM_VERSION}-autodraft/tendermint_${TM_VERSION}_linux_amd64.zip \
RUN wget https://github.com/tendermint/tendermint/releases/download/v${TM_VERSION}/tendermint_${TM_VERSION}_linux_amd64.zip \
&& unzip tendermint_${TM_VERSION}_linux_amd64.zip \
&& mv tendermint /usr/local/bin/ \
&& rm tendermint_${TM_VERSION}_linux_amd64.zip

View File

@ -34,4 +34,3 @@ COPY . /usr/src/app/
WORKDIR /usr/src/app
RUN pip install --no-cache-dir --process-dependency-links -e .[dev]
RUN bigchaindb -y configure

View File

@ -2,9 +2,8 @@
(including pre-release versions) from PyPI,
so show the latest GitHub release instead.
--->
<!--- Codecov isn't working for us lately, so comment it out for now:
[![Codecov branch](https://img.shields.io/codecov/c/github/bigchaindb/bigchaindb/master.svg)](https://codecov.io/github/bigchaindb/bigchaindb?branch=master)
--->
[![Latest release](https://img.shields.io/github/release/bigchaindb/bigchaindb/all.svg)](https://github.com/bigchaindb/bigchaindb/releases)
[![Status on PyPI](https://img.shields.io/pypi/status/bigchaindb.svg)](https://pypi.org/project/BigchainDB/)
[![Travis branch](https://img.shields.io/travis/bigchaindb/bigchaindb/master.svg)](https://travis-ci.org/bigchaindb/bigchaindb)

View File

@ -9,19 +9,19 @@ import copy
import json
import sys
from bigchaindb.utils import load_node_key
from bigchaindb.common.exceptions import (DatabaseAlreadyExists,
DatabaseDoesNotExist,
MultipleValidatorOperationError)
OperationError)
import bigchaindb
from bigchaindb import backend
from bigchaindb import backend, ValidatorElection, BigchainDB
from bigchaindb.backend import schema
from bigchaindb.backend import query
from bigchaindb.backend.query import VALIDATOR_UPDATE_ID, PRE_COMMIT_ID
from bigchaindb.backend.query import PRE_COMMIT_ID
from bigchaindb.commands import utils
from bigchaindb.commands.utils import (configure_bigchaindb,
input_on_stderr)
from bigchaindb.log import setup_logging
from bigchaindb.tendermint_utils import public_key_from_base64
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
@ -95,21 +95,48 @@ def run_configure(args):
@configure_bigchaindb
def run_upsert_validator(args):
"""Store validators which should be synced with Tendermint"""
"""Initiate and manage elections to change the validator set"""
b = bigchaindb.BigchainDB()
public_key = public_key_from_base64(args.public_key)
validator = {'pub_key': {'type': 'ed25519',
'data': public_key},
'power': args.power}
validator_update = {'validator': validator,
'update_id': VALIDATOR_UPDATE_ID}
try:
query.store_validator_update(b.connection, validator_update)
except MultipleValidatorOperationError:
logger.error('A validator update is pending to be applied. '
'Please re-try after the current update has '
'been processed.')
b = BigchainDB()
# Call the function specified by args.action, as defined above
globals()[f'run_upsert_validator_{args.action}'](args, b)
def run_upsert_validator_new(args, bigchain):
"""Initiates an election to add/update/remove a validator to an existing BigchainDB network
:param args: dict
args = {
'public_key': the public key of the proposed peer, (str)
'power': the proposed validator power for the new peer, (str)
'node_id': the node_id of the new peer (str)
'sk': the path to the private key of the node calling the election (str)
}
:param bigchain: an instance of BigchainDB
:return: election_id (tx_id)
:raises: OperationError if the write transaction fails for any reason
"""
new_validator = {
'public_key': args.public_key,
'power': args.power,
'node_id': args.node_id
}
key = load_node_key(args.sk)
voters = ValidatorElection.recipients(bigchain)
election = ValidatorElection.generate([key.public_key],
voters,
new_validator, None).sign([key.private_key])
election.validate(bigchain)
resp = bigchain.write_transaction(election, 'broadcast_tx_commit')
if resp == (202, ''):
return election.id
else:
raise OperationError('Failed to commit election')
def _run_init():
@ -208,16 +235,30 @@ def create_parser():
help='The backend to use. It can only be '
'"localmongodb", currently.')
# parser for managing validator elections
validator_parser = subparsers.add_parser('upsert-validator',
help='Add/update/delete a validator')
help='Add/update/delete a validator.')
validator_parser.add_argument('public_key',
help='Public key of the validator.')
validator_subparser = validator_parser.add_subparsers(title='Action',
dest='action')
validator_parser.add_argument('power',
new_election_parser = validator_subparser.add_parser('new',
help='Calls a new election.')
new_election_parser.add_argument('public_key',
help='Public key of the validator to be added/updated/removed.')
new_election_parser.add_argument('power',
type=int,
help='Voting power of the validator. '
'Setting it to 0 will delete the validator.')
help='The proposed power for the validator. '
'Setting to 0 will remove the validator.')
new_election_parser.add_argument('node_id',
help='The node_id of the validator.')
new_election_parser.add_argument('--private-key',
dest='sk',
help='Path to the private key of the election initiator.')
# parsers for showing/exporting config values
subparsers.add_parser('show-config',

View File

@ -96,10 +96,6 @@ class ThresholdTooDeep(ValidationError):
"""Raised if threshold condition is too deep"""
class GenesisBlockAlreadyExistsError(ValidationError):
"""Raised when trying to create the already existing genesis block"""
class MultipleValidatorOperationError(ValidationError):
"""Raised when a validator update pending but new request is submited"""

View File

@ -4,7 +4,3 @@ class BigchainDBError(Exception):
class CriticalDoubleSpend(BigchainDBError):
"""Data integrity error that requires attention"""
class CriticalDoubleInclusion(BigchainDBError):
"""Data integrity error that requires attention"""

View File

@ -420,6 +420,7 @@ class BigchainDB(object):
validators = result['validators']
for v in validators:
v.pop('address')
v['voting_power'] = int(v['voting_power'])
return validators

View File

@ -2,9 +2,13 @@ import contextlib
import threading
import queue
import multiprocessing as mp
import json
import setproctitle
from bigchaindb.tendermint_utils import key_from_base64
from bigchaindb.common.crypto import key_pair_from_ed25519_key
class ProcessGroup(object):
@ -31,7 +35,8 @@ class ProcessGroup(object):
class Process(mp.Process):
"""Wrapper around multiprocessing.Process that uses
setproctitle to set the name of the process when running
the target task."""
the target task.
"""
def run(self):
setproctitle.setproctitle(self.name)
@ -167,3 +172,12 @@ class Lazy:
self.stack = []
return last
# Load Tendermint's public and private key from the file path
def load_node_key(path):
with open(path) as json_data:
priv_validator = json.load(json_data)
priv_key = priv_validator['priv_key']['value']
hex_private_key = key_from_base64(priv_key)
return key_pair_from_ed25519_key(hex_private_key)

View File

@ -23,7 +23,7 @@ Find [the version number of the latest Tendermint release](https://github.com/te
```bash
$ sudo apt install -y unzip
$ wget https://github.com/tendermint/tendermint/releases/download/v0.22.8-autodraft/tendermint_0.22.8_linux_amd64.zip
$ wget https://github.com/tendermint/tendermint/releases/download/v0.22.8/tendermint_0.22.8_linux_amd64.zip
$ unzip tendermint_0.22.8_linux_amd64.zip
$ rm tendermint_0.22.8_linux_amd64.zip
$ sudo mv tendermint /usr/local/bin

View File

@ -2,13 +2,14 @@
The word _immutable_ means "unchanging over time or unable to be changed." For example, the decimal digits of π are immutable (3.14159…).
The blockchain community often describes blockchains as “immutable.” If we interpret that word literally, it means that blockchain data is unchangeable or permanent, which is absurd. The data _can_ be changed. For example, a plague might drive humanity extinct; the data would then get corrupted over time due to water damage, thermal noise, and the general increase of entropy. In the case of Bitcoin, nothing so drastic is required: a 51% attack will suffice.
The blockchain community often describes blockchains as “immutable.” If we interpret that word literally, it means that blockchain data is unchangeable or permanent, which is absurd. The data _can_ be changed. For example, a plague might drive humanity extinct; the data would then get corrupted over time due to water damage, thermal noise, and the general increase of entropy.
Its true that blockchain data is more difficult to change (or delete) than usual. It's more than just "tamper-resistant" (which implies intent), blockchain data also resists random changes that can happen without any intent, such as data corruption on a hard drive. Therefore, in the context of blockchains, we interpret the word “immutable” to mean *practically* immutable, for all intents and purposes. (Linguists would say that the word “immutable” is a _term of art_ in the blockchain community.)
Blockchain data can achieve immutability in several ways:
Blockchain data can be made immutable in several ways:
1. **Replication.** All data is replicated (copied) to several different places. The replication factor can be set by the consortium. The higher the replication factor, the more difficult it becomes to change or delete all replicas.
1. **No APIs for changing or deleting data.** Blockchain software usually doesn't expose any APIs for changing or deleting the data stored in the blockchain. BigchainDB has no such APIs. This doesn't prevent changes or deletions from happening in _other_ ways; it's just one line of defense.
1. **Replication.** All data is replicated (copied) to several different places. The higher the replication factor, the more difficult it becomes to change or delete all replicas.
1. **Internal watchdogs.** All nodes monitor all changes and if some unallowed change happens, then appropriate action can be taken.
1. **External watchdogs.** A consortium may opt to have trusted third-parties to monitor and audit their data, looking for irregularities. For a consortium with publicly-readable data, the public can act as an auditor.
1. **Economic incentives.** Some blockchain systems make it very expensive to change old stored data. Examples include proof-of-work and proof-of-stake systems. BigchainDB doesn't use explicit incentives like those.
@ -17,5 +18,3 @@ Blockchain data can achieve immutability in several ways:
1. **Full or partial backups** may be recorded from time to time, possibly on magnetic tape storage, other blockchains, printouts, etc.
1. **Strong security.** Node owners can adopt and enforce strong security policies.
1. **Node diversity.** Diversity makes it so that no one thing (e.g. natural disaster or operating system bug) can compromise enough of the nodes. See [the section on the kinds of node diversity](diversity.html).
Some of these things come "for free" as part of the BigchainDB software, and others require some extra effort from the consortium and node owners.

View File

@ -11,10 +11,11 @@ Libraries and Tools Maintained by the BigchainDB Team
Community-Driven Libraries and Tools
------------------------------------
.. note::
.. warning::
Some of these projects are a work in progress,
but may still be useful.
Others might not work with the latest version of BigchainDB.
* `Haskell transaction builder <https://github.com/bigchaindb/bigchaindb-hs>`_
* `Go driver <https://github.com/zbo14/envoke/blob/master/bigchain/bigchain.go>`_

View File

@ -1,19 +0,0 @@
Glossary
========
.. glossary::
:sorted:
associative array
A collection of key/value (or name/value) pairs
such that each possible key appears at most once
in the collection.
In JavaScript (and JSON), all objects behave as associative arrays
with string-valued keys.
In Python and .NET, associative arrays are called *dictionaries*.
In Java and Go, they are called *maps*.
In Ruby, they are called *hashes*.
See also: Wikipedia's articles for
`Associative array <https://en.wikipedia.org/wiki/Associative_array>`_
and
`Comparison of programming languages (associative array) <https://en.wikipedia.org/wiki/Comparison_of_programming_languages_(associative_array)>`_

View File

@ -301,7 +301,7 @@ Assets
Currently this endpoint is only supported if using MongoDB.
.. http:get:: /api/v1/assets?search={search}
.. http:get:: /api/v1/assets/?search={search}
Return all assets that match a given text search.
@ -310,6 +310,10 @@ Assets
The ``id`` of the asset
is the same ``id`` of the CREATE transaction that created the asset.
.. note::
You can use ``assets/?search`` or ``assets?search``.
If no assets match the text search it returns an empty list.
If the text string is empty or the server does not support text search,
@ -425,6 +429,10 @@ Transaction Metadata
The ``id`` of the metadata
is the same ``id`` of the transaction where it was defined.
.. note::
You can use ``metadata/?search`` or ``metadata?search``.
If no metadata objects match the text search it returns an empty list.
If the text string is empty or the server does not support text search,

View File

@ -18,5 +18,4 @@ BigchainDB Server Documentation
data-models/index
k8s-deployment-template/index
release-notes
glossary
appendices/index

View File

@ -80,22 +80,27 @@ configuration file as documented under
**This is an experimental feature. Users are advised not to use it in production.**
Add, update, or remove a validator from the validators set of the local node. The command implements [3/UPSERT-VALIDATORS](https://github.com/bigchaindb/BEPs/tree/master/3), check it out if you need more details on how this is orchestrated.
Manage elections to add, update, or remove a validator from the validators set of the local node. The upsert-validator subcommands implement [BEP-21](https://github.com/bigchaindb/BEPs/tree/master/21). Check it out if you need more details on how this is orchestrated.
Below is the command line syntax,
Election management is broken into several subcommands. Below is the command line syntax for each,
#### upsert-validator new
Calls a new election, proposing a change to the validator set.
Below is the command line syntax and the return value,
```bash
$ bigchaindb upsert-validator PUBLIC_KEY_OF_VALIDATOR POWER
$ bigchaindb upsert-validator new E_PUBKEY E_POWER E_NODE_ID --private-key PATH_TO_YOUR_PRIVATE_KEY
<election_id>
```
Here, `E_PUBKEY`, `E_POWER`, and `E_NODE_ID` are the public key, proposed power, and node id of the validator being voted on. `--private-key` should be the path to wherever the private key for your validator node is stored, (*not* the private key itself.). For example, to add a new validator, provide the public key and node id for some node not already in the validator set, along with whatever voting power you'd like them to have. To remove an existing validator, provide their public key and node id, and set `E_POWER` to `0`.
Example usage,
```bash
$ bigchaindb upsert-validator B0E42D2589A455EAD339A035D6CE1C8C3E25863F268120AA0162AD7D003A4014 10
$ bigchaindb upsert-validator new B0E42D2589A455EAD339A035D6CE1C8C3E25863F268120AA0162AD7D003A4014 1 12345 --private-key /home/user/.tendermint/config/priv_validator.json
```
If the command is returns without any error then a request to update the validator set has been successfully submitted. So, even if the command has been successfully executed it doesn't imply that the validator set has been updated. In order to check whether the change has been applied, the node operator can execute `curl http://node_ip:9984/api/v1/validators` which will list the current validators set. Refer to the [validators](/http-client-server-api.html#validators) section of the HTTP API docs for more detail.
Note:
- When `POWER`is set to `0` then the validator will be removed from the validator set.
- Upsert requests are handled once per block i.e. the validators set is updated once a new block is committed. So, the node operator is not allowed to submit a new upsert request until the current request has been processed. Furthermore, if Tendermint is started with `--consensus.create_empty_blocks=false`, and there are no new incoming transactions then the validators set update is delayed until any new transactions are received and a new block can be committed.
If the command succeeds, it will create an election and return an `election_id`. Elections consist of one vote token per voting power, issued to the members of the validator set. Validators can cast their votes to approve the change to the validator set by spending their vote tokens. The status of the election can be monitored by providing the `election_id` to the `show` subcommand.

View File

@ -18,7 +18,8 @@ def test_make_sure_we_dont_remove_any_command():
assert parser.parse_args(['init']).command
assert parser.parse_args(['drop']).command
assert parser.parse_args(['start']).command
assert parser.parse_args(['upsert-validator', 'TEMP_PUB_KEYPAIR', '10']).command
assert parser.parse_args(['upsert-validator', 'new', 'TEMP_PUB_KEYPAIR', '10', 'TEMP_NODE_ID',
'--private-key', 'TEMP_PATH_TO_PRIVATE_KEY']).command
@pytest.mark.tendermint
@ -341,15 +342,70 @@ class MockResponse():
return {'result': {'latest_block_height': self.height}}
# @pytest.mark.execute
# @patch('bigchaindb.lib.BigchainDB.get_validators')
# @pytest.mark.abci
@pytest.mark.skip
@patch('bigchaindb.config_utils.autoconfigure')
@patch('bigchaindb.backend.query.store_validator_update')
def test_upsert_validator_new_with_tendermint(b, priv_validator_path, user_sk, monkeypatch):
"""WIP: Will be fixed and activated in the next PR
"""
from bigchaindb.commands.bigchaindb import run_upsert_validator_new
import time
time.sleep(3)
def mock_get():
return [
{'pub_key': {'value': 'zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=',
'type': 'tendermint/PubKeyEd25519'},
'voting_power': 10}
]
# b.get_validators = mock_get
# mock_get_validators = mock_get
# monkeypatch.setattr('requests.get', mock_get)
proposer_key = b.get_validators()[0]['pub_key']['value']
args = Namespace(action='new',
public_key=proposer_key,
power=1,
node_id='12345',
sk=priv_validator_path,
config={})
resp = run_upsert_validator_new(args, b)
time.sleep(3)
assert b.get_transaction(resp)
@pytest.mark.tendermint
def test_upsert_validator(mock_autoconfigure, mock_store_validator_update):
from bigchaindb.commands.bigchaindb import run_upsert_validator
@pytest.mark.bdb
def test_upsert_validator_new_without_tendermint(b, priv_validator_path, user_sk, monkeypatch):
from bigchaindb.commands.bigchaindb import run_upsert_validator_new
args = Namespace(public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=',
power='10', config={})
run_upsert_validator(args)
def mock_get():
return [
{'pub_key': {'value': 'zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=',
'type': 'tendermint/PubKeyEd25519'},
'voting_power': 10}
]
assert mock_store_validator_update.called
def mock_write(tx, mode):
b.store_transaction(tx)
return (202, '')
b.get_validators = mock_get
b.write_transaction = mock_write
monkeypatch.setattr('requests.get', mock_get)
args = Namespace(action='new',
public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=',
power=1,
node_id='12345',
sk=priv_validator_path,
config={})
resp = run_upsert_validator_new(args, b)
assert b.get_transaction(resp)

View File

@ -4,10 +4,11 @@ Tasks:
1. setup test database before starting the tests
2. delete test database after running the tests
"""
import json
import os
import copy
import random
import tempfile
from collections import namedtuple
from logging import getLogger
from logging.config import dictConfig
@ -329,17 +330,19 @@ def _get_height(b):
def inputs(user_pk, b, alice):
from bigchaindb.models import Transaction
# create blocks with transactions for `USER` to spend
for block in range(4):
for height in range(1, 4):
transactions = [
Transaction.create(
[alice_pubkey(alice)],
[([user_pk], 1)],
metadata={'msg': random.random()},
).sign([alice_privkey(alice)]).to_dict()
).sign([alice_privkey(alice)])
for _ in range(10)
]
block = Block(app_hash='', height=_get_height(b), transactions=transactions)
tx_ids = [tx.id for tx in transactions]
block = Block(app_hash='hash'+str(height), height=height, transactions=tx_ids)
b.store_block(block._asdict())
b.store_bulk_transactions(transactions)
@pytest.fixture
@ -668,3 +671,27 @@ def node_keys():
'83VINXdj2ynOHuhvSZz5tGuOE5oYzIi0mEximkX1KYMlt/Csu8JUjA4+by2Pz3fqSLshhuYYeM+IpvqcBl6BEA==',
'PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=':
'uz8bYgoL4rHErWT1gjjrnA+W7bgD/uDQWSRKDmC8otc95wnnxJo1GxYlmh0OaqOkJaobpu13BcUcvITjRFiVgw=='}
@pytest.fixture(scope='session')
def priv_validator_path(node_keys):
(public_key, private_key) = list(node_keys.items())[0]
priv_validator = {
'address': '84F787D95E196DC5DE5F972666CFECCA36801426',
'pub_key': {
'type': 'AC26791624DE60',
'value': public_key
},
'last_height': 0,
'last_round': 0,
'last_step': 0,
'priv_key': {
'type': '954568A3288910',
'value': private_key
}
}
fd, path = tempfile.mkstemp()
socket = os.fdopen(fd, 'w')
json.dump(priv_validator, socket)
socket.close()
return path

View File

@ -7,196 +7,74 @@ from base58 import b58decode
pytestmark = pytest.mark.bdb
@pytest.mark.skipif(reason='Some tests throw a ResourceWarning that might result in some weird '
'exceptions while running the tests. The problem seems to *not* '
'interfere with the correctness of the tests. ')
def test_remove_unclosed_sockets():
pass
class TestBigchainApi(object):
@pytest.mark.genesis
def test_get_last_voted_block_cyclic_blockchain(self, b, monkeypatch, alice):
from bigchaindb.common.crypto import PrivateKey
from bigchaindb.common.exceptions import CyclicBlockchainError
from bigchaindb.common.utils import serialize
from bigchaindb.models import Transaction
tx = Transaction.create([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
monkeypatch.setattr('time.time', lambda: 1)
block1 = b.create_block([tx])
b.write_block(block1)
# Manipulate vote to create a cyclic Blockchain
vote = b.vote(block1.id, b.get_last_voted_block().id, True)
vote['vote']['previous_block'] = block1.id
vote_data = serialize(vote['vote'])
vote['signature'] = PrivateKey(alice.private_key).sign(vote_data.encode())
b.write_vote(vote)
with pytest.raises(CyclicBlockchainError):
b.get_last_voted_block()
@pytest.mark.genesis
def test_try_voting_while_constructing_cyclic_blockchain(self, b,
monkeypatch, alice):
from bigchaindb.common.exceptions import CyclicBlockchainError
from bigchaindb.models import Transaction
tx = Transaction.create([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
block1 = b.create_block([tx])
# We can simply submit twice the same block id and check if `Bigchain`
# throws
with pytest.raises(CyclicBlockchainError):
b.vote(block1.id, block1.id, True)
@pytest.mark.genesis
def test_has_previous_vote_when_already_voted(self, b, monkeypatch, alice):
@pytest.mark.tendermint
def test_get_spent_with_double_inclusion_detected(self, b, alice):
from bigchaindb.models import Transaction
from bigchaindb.backend.exceptions import OperationError
tx = Transaction.create([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
monkeypatch.setattr('time.time', lambda: 1)
block = b.create_block([tx])
b.write_block(block)
b.store_bulk_transactions([tx])
assert b.has_previous_vote(block.id) is False
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
assert b.has_previous_vote(block.id) is True
@pytest.mark.genesis
def test_get_spent_with_double_inclusion_detected(self, b, monkeypatch, alice):
from bigchaindb.exceptions import CriticalDoubleInclusion
from bigchaindb.models import Transaction
tx = Transaction.create([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
monkeypatch.setattr('time.time', lambda: 1000000000)
block1 = b.create_block([tx])
b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 1000000020)
transfer_tx = Transaction.transfer(tx.to_inputs(), [([alice.public_key], 1)],
asset_id=tx.id)
transfer_tx = transfer_tx.sign([alice.private_key])
block2 = b.create_block([transfer_tx])
b.write_block(block2)
b.store_bulk_transactions([transfer_tx])
monkeypatch.setattr('time.time', lambda: 1000000030)
transfer_tx2 = Transaction.transfer(tx.to_inputs(), [([alice.public_key], 1)],
asset_id=tx.id)
transfer_tx2 = transfer_tx2.sign([alice.private_key])
block3 = b.create_block([transfer_tx2])
b.write_block(block3)
with pytest.raises(OperationError):
b.store_bulk_transactions([transfer_tx2])
# Vote both block2 and block3 valid
vote = b.vote(block2.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
vote = b.vote(block3.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
with pytest.raises(CriticalDoubleInclusion):
b.get_spent(tx.id, 0)
@pytest.mark.genesis
def test_get_spent_with_double_spend_detected(self, b, monkeypatch, alice):
from bigchaindb.exceptions import CriticalDoubleSpend
@pytest.mark.tendermint
def test_get_spent_with_double_spend_detected(self, b, alice):
from bigchaindb.models import Transaction
from bigchaindb.common.exceptions import DoubleSpend
from bigchaindb.exceptions import CriticalDoubleSpend
tx = Transaction.create([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
monkeypatch.setattr('time.time', lambda: 1000000000)
block1 = b.create_block([tx])
b.write_block(block1)
b.store_bulk_transactions([tx])
monkeypatch.setattr('time.time', lambda: 1000000020)
transfer_tx = Transaction.transfer(tx.to_inputs(), [([alice.public_key], 1)],
asset_id=tx.id)
transfer_tx = transfer_tx.sign([alice.private_key])
block2 = b.create_block([transfer_tx])
b.write_block(block2)
monkeypatch.setattr('time.time', lambda: 1000000030)
transfer_tx2 = Transaction.transfer(tx.to_inputs(), [([alice.public_key], 2)],
asset_id=tx.id)
transfer_tx2 = transfer_tx2.sign([alice.private_key])
block3 = b.create_block([transfer_tx2])
b.write_block(block3)
# Vote both block2 and block3 valid
vote = b.vote(block2.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
vote = b.vote(block3.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
with pytest.raises(DoubleSpend):
b.validate_transaction(transfer_tx2, [transfer_tx])
b.store_bulk_transactions([transfer_tx])
with pytest.raises(DoubleSpend):
b.validate_transaction(transfer_tx2)
b.store_bulk_transactions([transfer_tx2])
with pytest.raises(CriticalDoubleSpend):
b.get_spent(tx.id, 0)
@pytest.mark.genesis
def test_get_block_status_for_tx_with_double_inclusion(self, b, monkeypatch, alice):
from bigchaindb.exceptions import CriticalDoubleInclusion
@pytest.mark.tendermint
def test_get_block_status_for_tx_with_double_inclusion(self, b, alice):
from bigchaindb.models import Transaction
from bigchaindb.backend.exceptions import OperationError
tx = Transaction.create([alice.public_key], [([alice.public_key], 1)])
tx = tx.sign([alice.private_key])
monkeypatch.setattr('time.time', lambda: 1000000000)
block1 = b.create_block([tx])
b.write_block(block1)
b.store_bulk_transactions([tx])
monkeypatch.setattr('time.time', lambda: 1000000020)
block2 = b.create_block([tx])
b.write_block(block2)
with pytest.raises(OperationError):
b.store_bulk_transactions([tx])
# Vote both blocks valid (creating a double spend)
vote = b.vote(block1.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
vote = b.vote(block2.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
with pytest.raises(CriticalDoubleInclusion):
b.get_blocks_status_containing_tx(tx.id)
@pytest.mark.genesis
def test_get_transaction_in_invalid_and_valid_block(self, monkeypatch, b, alice):
from bigchaindb.models import Transaction
monkeypatch.setattr('time.time', lambda: 1000000000)
tx1 = Transaction.create([alice.public_key], [([alice.public_key], 1)],
metadata={'msg': 1})
tx1 = tx1.sign([alice.private_key])
block1 = b.create_block([tx1])
b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 1000000020)
tx2 = Transaction.create([alice.public_key], [([alice.public_key], 1)],
metadata={'msg': 2})
tx2 = tx2.sign([alice.private_key])
block2 = b.create_block([tx2])
b.write_block(block2)
# vote the first block invalid
vote = b.vote(block1.id, b.get_last_voted_block().id, False)
b.write_vote(vote)
# vote the second block valid
vote = b.vote(block2.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
assert b.get_transaction(tx1.id) is None
assert b.get_transaction(tx2.id) == tx2
@pytest.mark.genesis
@pytest.mark.tendermint
def test_text_search(self, b, alice):
from bigchaindb.models import Transaction
from bigchaindb.backend.exceptions import OperationError
@ -215,13 +93,8 @@ class TestBigchainApi(object):
tx3 = Transaction.create([alice.public_key], [([alice.public_key], 1)],
asset=asset3).sign([alice.private_key])
# create the block
block = b.create_block([tx1, tx2, tx3])
b.write_block(block)
# vote valid
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
# write the transactions to the DB
b.store_bulk_transactions([tx1, tx2, tx3])
# get the assets through text search
# this query only works with MongoDB
@ -232,182 +105,13 @@ class TestBigchainApi(object):
else:
assert len(assets) == 3
@pytest.mark.genesis
def test_text_search_returns_valid_only(self, monkeypatch, b, alice):
from bigchaindb.models import Transaction
from bigchaindb.backend.exceptions import OperationError
from bigchaindb.backend.localmongodb.connection import LocalMongoDBConnection
asset_valid = {'msg': 'Hello BigchainDB!'}
asset_invalid = {'msg': 'Goodbye BigchainDB!'}
monkeypatch.setattr('time.time', lambda: 1000000000)
tx1 = Transaction.create([alice.public_key], [([alice.public_key], 1)],
asset=asset_valid)
tx1 = tx1.sign([alice.private_key])
block1 = b.create_block([tx1])
b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 1000000020)
tx2 = Transaction.create([alice.public_key], [([alice.public_key], 1)],
asset=asset_invalid)
tx2 = tx2.sign([alice.private_key])
block2 = b.create_block([tx2])
b.write_block(block2)
# vote the first block valid
vote = b.vote(block1.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
# vote the second block invalid
vote = b.vote(block2.id, b.get_last_voted_block().id, False)
b.write_vote(vote)
# get assets with text search
try:
assets = list(b.text_search('bigchaindb'))
except OperationError:
assert not isinstance(b.connection, LocalMongoDBConnection)
return
# should only return one asset
assert len(assets) == 1
# should return the asset created by tx1
assert assets[0] == {
'data': {'msg': 'Hello BigchainDB!'},
'id': tx1.id
}
@pytest.mark.usefixtures('inputs')
def test_write_transaction(self, b, user_pk, user_sk):
from bigchaindb.models import Transaction
input_tx = b.get_owned_ids(user_pk).pop()
input_tx = b.get_transaction(input_tx.txid)
inputs = input_tx.to_inputs()
tx = Transaction.transfer(inputs, [([user_pk], 1)],
asset_id=input_tx.id)
tx = tx.sign([user_sk])
b.write_transaction(tx)
tx_from_db, status = b.get_transaction(tx.id, include_status=True)
assert tx_from_db.to_dict() == tx.to_dict()
@pytest.mark.usefixtures('inputs')
def test_read_transaction(self, b, user_pk, user_sk):
from bigchaindb.models import Transaction
input_tx = b.get_owned_ids(user_pk).pop()
input_tx = b.get_transaction(input_tx.txid)
inputs = input_tx.to_inputs()
tx = Transaction.transfer(inputs, [([user_pk], 1)],
asset_id=input_tx.id)
tx = tx.sign([user_sk])
b.write_transaction(tx)
# create block and write it to the bighcain before retrieving the transaction
block = b.create_block([tx])
b.write_block(block)
response, status = b.get_transaction(tx.id, include_status=True)
# add validity information, which will be returned
assert tx.to_dict() == response.to_dict()
@pytest.mark.usefixtures('inputs')
def test_read_transaction_invalid_block(self, b, user_pk, user_sk):
from bigchaindb.models import Transaction
input_tx = b.get_owned_ids(user_pk).pop()
input_tx = b.get_transaction(input_tx.txid)
inputs = input_tx.to_inputs()
tx = Transaction.transfer(inputs, [([user_pk], 1)],
asset_id=input_tx.id)
tx = tx.sign([user_sk])
# There's no need to b.write_transaction(tx) to the backlog
# create block
block = b.create_block([tx])
b.write_block(block)
# vote the block invalid
vote = b.vote(block.id, b.get_last_voted_block().id, False)
b.write_vote(vote)
response = b.get_transaction(tx.id)
# should be None, because invalid blocks are ignored
# and a copy of the tx is not in the backlog
assert response is None
@pytest.mark.usefixtures('inputs')
def test_genesis_block(self, b):
from bigchaindb.backend import query
block = query.get_genesis_block(b.connection)
assert len(block['block']['transactions']) == 1
assert block['block']['transactions'][0]['operation'] == 'GENESIS'
assert block['block']['transactions'][0]['inputs'][0]['fulfills'] is None
@pytest.mark.genesis
def test_create_genesis_block_fails_if_table_not_empty(self, b):
from bigchaindb.common.exceptions import GenesisBlockAlreadyExistsError
with pytest.raises(GenesisBlockAlreadyExistsError):
b.create_genesis_block()
@pytest.mark.skipif(reason='This test may not make sense after changing the chainification mode')
def test_get_last_block(self, b):
from bigchaindb.backend import query
# get the number of blocks
num_blocks = query.count_blocks(b.connection)
# get the last block
last_block = b.get_last_block()
assert last_block['block']['block_number'] == num_blocks - 1
@pytest.mark.skipif(reason='This test may not make sense after changing the chainification mode')
def test_get_last_block_id(self, b):
last_block = b.get_last_block()
last_block_id = b.get_last_block_id()
assert last_block_id == last_block['id']
@pytest.mark.skipif(reason='This test may not make sense after changing the chainification mode')
def test_get_previous_block(self, b):
last_block = b.get_last_block()
new_block = b.create_block([])
b.write_block(new_block)
prev_block = b.get_previous_block(new_block)
assert prev_block == last_block
@pytest.mark.skipif(reason='This test may not make sense after changing the chainification mode')
def test_get_previous_block_id(self, b):
last_block = b.get_last_block()
new_block = b.create_block([])
b.write_block(new_block)
prev_block_id = b.get_previous_block_id(new_block)
assert prev_block_id == last_block['id']
def test_create_empty_block(self, b):
from bigchaindb.common.exceptions import OperationError
with pytest.raises(OperationError) as excinfo:
b.create_block([])
assert excinfo.value.args[0] == 'Empty block creation is not allowed'
@pytest.mark.usefixtures('inputs')
@pytest.mark.tendermint
def test_non_create_input_not_found(self, b, user_pk):
from cryptoconditions import Ed25519Sha256
from bigchaindb.common.exceptions import InputDoesNotExist
from bigchaindb.common.transaction import Input, TransactionLink
from bigchaindb.models import Transaction
from bigchaindb import BigchainDB
# Create an input for a non existing transaction
input = Input(Ed25519Sha256(public_key=b58decode(user_pk)),
@ -415,21 +119,28 @@ class TestBigchainApi(object):
TransactionLink('somethingsomething', 0))
tx = Transaction.transfer([input], [([user_pk], 1)],
asset_id='mock_asset_link')
with pytest.raises(InputDoesNotExist):
tx.validate(BigchainDB())
tx.validate(b)
def test_count_backlog(self, b, user_pk, alice):
from bigchaindb.backend import query
@pytest.mark.tendermint
def test_write_transaction(self, b, user_sk, user_pk, alice, create_tx):
from bigchaindb.models import Transaction
for i in range(4):
tx = Transaction.create([alice.public_key], [([user_pk], 1)],
metadata={'msg': i}) \
.sign([alice.private_key])
b.write_transaction(tx)
asset1 = {'msg': 'BigchainDB 1'}
assert query.count_backlog(b.connection) == 4
tx = Transaction.create([alice.public_key], [([alice.public_key], 1)],
asset=asset1).sign([alice.private_key])
b.store_bulk_transactions([tx])
tx_from_db = b.get_transaction(tx.id)
before = tx.to_dict()
after = tx_from_db.to_dict()
assert before['asset']['data'] == after['asset']['data']
before.pop('asset', None)
after.pop('asset', None)
assert before == after
class TestTransactionValidation(object):