Merge remote-tracking branch 'remotes/origin/master' into feat/api/votes

This commit is contained in:
diminator 2017-01-12 09:59:39 +01:00
commit af2e8e14a5
No known key found for this signature in database
GPG Key ID: C3D8590E6D0D439A
27 changed files with 687 additions and 147 deletions

View File

@ -4,6 +4,9 @@ set -e -x
if [[ "${TOXENV}" == *-rdb ]]; then
rethinkdb --daemon
elif [[ "${TOXENV}" == *-mdb ]]; then
sudo service mongod start
elif [[ "${BIGCHAINDB_DATABASE_BACKEND}" == mongodb ]]; then
wget http://downloads.mongodb.org/linux/mongodb-linux-x86_64-3.4.1.tgz -O /tmp/mongodb.tgz
tar -xvf /tmp/mongodb.tgz
mkdir /tmp/mongodb-data
${PWD}/mongodb-linux-x86_64-3.4.1/bin/mongod --dbpath=/tmp/mongodb-data --replSet=rs0 &> /dev/null &
fi

View File

@ -4,6 +4,8 @@ set -e -x
if [[ -n ${TOXENV} ]]; then
tox -e ${TOXENV}
elif [[ "${BIGCHAINDB_DATABASE_BACKEND}" == mongodb ]]; then
pytest -v --database-backend=mongodb --cov=bigchaindb
else
pytest -v -n auto --cov=bigchaindb
fi

View File

@ -12,8 +12,6 @@ env:
matrix:
fast_finish: true
allow_failures:
- env: BIGCHAINDB_DATABASE_BACKEND=mongodb
exclude:
- python: 3.4
env: TOXENV=flake8
@ -26,18 +24,19 @@ matrix:
addons:
rethinkdb: '2.3.5'
env: BIGCHAINDB_DATABASE_BACKEND=rethinkdb
- python: 3.5
services: mongodb
env: BIGCHAINDB_DATABASE_BACKEND=mongodb
- python: 3.5
addons:
rethinkdb: '2.3.5'
env: BIGCHAINDB_DATABASE_BACKEND=rethinkdb
- python: 3.5
env: BIGCHAINDB_DATABASE_BACKEND=mongodb
before_install: sudo .ci/travis-before-install.sh
install: .ci/travis-install.sh
before_script: .ci/travis-before-script.sh
script: .ci/travis_script.sh
after_success: .ci/travis-after-success.sh

View File

@ -1,6 +1,16 @@
FROM rethinkdb:2.3
RUN apt-get update
# From http://stackoverflow.com/a/38553499
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y locales
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && \
echo 'LANG="en_US.UTF-8"'>/etc/default/locale && \
dpkg-reconfigure --frontend=noninteractive locales && \
update-locale LANG=en_US.UTF-8
ENV LANG en_US.UTF-8
RUN apt-get -y install python3 python3-pip libffi-dev
RUN pip3 install --upgrade pip
RUN pip3 install --upgrade setuptools

View File

@ -2,9 +2,9 @@
This document is for whoever has the ability to merge pull requests in the Git repositories associated with BigchainDB.
If the pull request is from an employee of ascribe GmbH, then you can ignore this document.
If the pull request is from an employee of BigchainDB GmbH, then you can ignore this document.
If the pull request is from someone who is _not_ an employee of ascribe, then:
If the pull request is from someone who is _not_ an employee of BigchainDB, then:
* Have they agreed to the Individual Contributor Agreement in the past? (Troy, Greg, and others have a list.) If yes, then you can merge the PR and ignore the rest of this document.
* Do they belong to a company or organization which agreed to the Entity Contributor Agreement in the past, and will they be contributing on behalf of that company or organization? (Troy, Greg, and others have a list.) If yes, then you can merge the PR and ignore the rest of this document.
@ -34,7 +34,7 @@ We will email you (or your employer) with further instructions.
(END OF COMMENT)
Once they click SEND, we (ascribe) will get an email with the information in the form. (Troy gets those emails for sure, I'm not sure who else.) The next step is to send an email to the email address submitted in the form, saying something like (where the stuff in [square brackets] should be replaced):
Once they click SEND, we (BigchainDB) will get an email with the information in the form. (Troy gets those emails for sure, I'm not sure who else.) The next step is to send an email to the email address submitted in the form, saying something like (where the stuff in [square brackets] should be replaced):
Hi [NAME],

View File

@ -67,10 +67,10 @@ class MongoDBChangeFeed(ChangeFeed):
# See https://github.com/bigchaindb/bigchaindb/issues/992
if is_insert and (self.operation & ChangeFeed.INSERT):
record['o'].pop('_id', None)
doc = record['o']
self.outqueue.put(record['o'])
elif is_delete and (self.operation & ChangeFeed.DELETE):
# on delete it only returns the id of the document
doc = record['o']
self.outqueue.put(record['o'])
elif is_update and (self.operation & ChangeFeed.UPDATE):
# the oplog entry for updates only returns the update
# operations to apply to the document and not the
@ -78,7 +78,7 @@ class MongoDBChangeFeed(ChangeFeed):
# and then return it.
doc = self.connection.conn[dbname][table]\
.find_one(record['o2'], projection={'_id': False})
self.outqueue.put(doc)
self.outqueue.put(doc)
@register_changefeed(MongoDBConnection)

View File

@ -1,8 +1,10 @@
"""Query implementation for MongoDB"""
from time import time
from itertools import chain
from pymongo import ReturnDocument
from pymongo import errors
from bigchaindb import backend
from bigchaindb.common.exceptions import CyclicBlockchainError
@ -15,7 +17,10 @@ register_query = module_dispatch_registrar(backend.query)
@register_query(MongoDBConnection)
def write_transaction(conn, signed_transaction):
return conn.db['backlog'].insert_one(signed_transaction)
try:
return conn.db['backlog'].insert_one(signed_transaction)
except errors.DuplicateKeyError:
return
@register_query(MongoDBConnection)
@ -36,29 +41,39 @@ def delete_transaction(conn, *transaction_id):
@register_query(MongoDBConnection)
def get_stale_transactions(conn, reassign_delay):
return conn.db['backlog']\
.find({'assignment_timestamp': {'$lt': time() - reassign_delay}})
.find({'assignment_timestamp': {'$lt': time() - reassign_delay}},
projection={'_id': False})
@register_query(MongoDBConnection)
def get_transaction_from_block(conn, transaction_id, block_id):
return conn.db['bigchain'].aggregate([
{'$match': {'id': block_id}},
{'$project': {
'block.transactions': {
'$filter': {
'input': '$block.transactions',
'as': 'transaction',
'cond': {
'$eq': ['$$transaction.id', transaction_id]
try:
return conn.db['bigchain'].aggregate([
{'$match': {'id': block_id}},
{'$project': {
'block.transactions': {
'$filter': {
'input': '$block.transactions',
'as': 'transaction',
'cond': {
'$eq': ['$$transaction.id', transaction_id]
}
}
}
}
}}]).next()['block']['transactions'][0]
}}]).next()['block']['transactions'].pop()
except (StopIteration, IndexError):
# StopIteration is raised if the block was not found
# IndexError is returned if the block is found but no transactions
# match
return
@register_query(MongoDBConnection)
def get_transaction_from_backlog(conn, transaction_id):
return conn.db['backlog'].find_one({'id': transaction_id})
return conn.db['backlog']\
.find_one({'id': transaction_id},
projection={'_id': False, 'assignee': False,
'assignment_timestamp': False})
@register_query(MongoDBConnection)
@ -70,33 +85,83 @@ def get_blocks_status_from_transaction(conn, transaction_id):
@register_query(MongoDBConnection)
def get_txids_by_asset_id(conn, asset_id):
return conn.db['bigchain']\
.find({'block.transactions.asset.id': asset_id},
projection=['id'])
# get the txid of the create transaction for asset_id
cursor = conn.db['bigchain'].aggregate([
{'$match': {
'block.transactions.id': asset_id,
'block.transactions.operation': 'CREATE'
}},
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.id': asset_id,
'block.transactions.operation': 'CREATE'
}},
{'$project': {'block.transactions.id': True}}
])
create_tx_txids = (elem['block']['transactions']['id'] for elem in cursor)
# get txids of transfer transaction with asset_id
cursor = conn.db['bigchain'].aggregate([
{'$match': {
'block.transactions.asset.id': asset_id
}},
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.asset.id': asset_id
}},
{'$project': {'block.transactions.id': True}}
])
transfer_tx_ids = (elem['block']['transactions']['id'] for elem in cursor)
return chain(create_tx_txids, transfer_tx_ids)
@register_query(MongoDBConnection)
def get_asset_by_id(conn, asset_id):
return conn.db['bigchain']\
.find_one({'block.transactions.asset.id': asset_id,
'block.transactions.asset.operation': 'CREATE'},
projection=['block.transactions.asset'])
cursor = conn.db['bigchain'].aggregate([
{'$match': {
'block.transactions.id': asset_id,
'block.transactions.operation': 'CREATE'
}},
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.id': asset_id,
'block.transactions.operation': 'CREATE'
}},
{'$project': {'block.transactions.asset': True}}
])
# we need to access some nested fields before returning so lets use a
# generator to avoid having to read all records on the cursor at this point
return (elem['block']['transactions'] for elem in cursor)
@register_query(MongoDBConnection)
def get_spent(conn, transaction_id, condition_id):
return conn.db['bigchain']\
.find_one({'block.transactions.fulfillments.input.txid':
transaction_id,
'block.transactions.fulfillments.input.cid':
condition_id})
def get_spent(conn, transaction_id, output):
cursor = conn.db['bigchain'].aggregate([
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.inputs.fulfills.txid': transaction_id,
'block.transactions.inputs.fulfills.output': output
}}
])
# we need to access some nested fields before returning so lets use a
# generator to avoid having to read all records on the cursor at this point
return (elem['block']['transactions'] for elem in cursor)
@register_query(MongoDBConnection)
def get_owned_ids(conn, owner):
return conn.db['bigchain']\
.find({'block.transactions.transaction.conditions.owners_after':
owner})
cursor = conn.db['bigchain'].aggregate([
{'$unwind': '$block.transactions'},
{'$match': {
'block.transactions.outputs.public_keys': {
'$elemMatch': {'$eq': owner}
}
}}
])
# we need to access some nested fields before returning so lets use a
# generator to avoid having to read all records on the cursor at this point
return (elem['block']['transactions'] for elem in cursor)
@register_query(MongoDBConnection)
@ -121,7 +186,8 @@ def write_block(conn, block):
@register_query(MongoDBConnection)
def get_block(conn, block_id):
return conn.db['bigchain'].find_one({'id': block_id})
return conn.db['bigchain'].find_one({'id': block_id},
projection={'_id': False})
@register_query(MongoDBConnection)
@ -147,9 +213,10 @@ def write_vote(conn, vote):
@register_query(MongoDBConnection)
def get_genesis_block(conn):
return conn.db['bigchain'].find_one({
'block.transactions.0.operation': 'GENESIS'
})
return conn.db['bigchain'].find_one(
{'block.transactions.0.operation': 'GENESIS'},
{'_id': False}
)
@register_query(MongoDBConnection)
@ -184,4 +251,18 @@ def get_last_voted_block(conn, node_pubkey):
@register_query(MongoDBConnection)
def get_unvoted_blocks(conn, node_pubkey):
pass
return conn.db['bigchain'].aggregate([
{'$lookup': {
'from': 'votes',
'localField': 'id',
'foreignField': 'vote.voting_for_block',
'as': 'votes'
}},
{'$match': {
'votes.node_pubkey': {'$ne': node_pubkey},
'block.transactions.operation': {'$ne': 'GENESIS'}
}},
{'$project': {
'votes': False, '_id': False
}}
])

View File

@ -72,6 +72,12 @@ def create_bigchain_secondary_index(conn, dbname):
def create_backlog_secondary_index(conn, dbname):
logger.info('Create `backlog` secondary index.')
# secondary index on the transaction id with a uniqueness constraint
# to make sure there are no duplicated transactions in the backlog
conn.conn[dbname]['backlog'].create_index('id',
name='transaction_id',
unique=True)
# compound index to read transactions from the backlog per assignee
conn.conn[dbname]['backlog']\
.create_index([('assignee', ASCENDING),

View File

@ -1,28 +1,29 @@
"""Custom exceptions used in the `bigchaindb` package.
"""
from bigchaindb.exceptions import BigchainDBError
class ConfigurationError(Exception):
class ConfigurationError(BigchainDBError):
"""Raised when there is a problem with server configuration"""
class OperationError(Exception):
class OperationError(BigchainDBError):
"""Raised when an operation cannot go through"""
class TransactionDoesNotExist(Exception):
class TransactionDoesNotExist(BigchainDBError):
"""Raised if the transaction is not in the database"""
class TransactionOwnerError(Exception):
class TransactionOwnerError(BigchainDBError):
"""Raised if a user tries to transfer a transaction they don't own"""
class DoubleSpend(Exception):
class DoubleSpend(BigchainDBError):
"""Raised if a double spend is found"""
class ValidationError(Exception):
class ValidationError(BigchainDBError):
"""Raised if there was an error in validation"""
@ -35,56 +36,56 @@ class SchemaValidationError(ValidationError):
"""Raised if there was any error validating an object's schema"""
class InvalidSignature(Exception):
class InvalidSignature(BigchainDBError):
"""Raised if there was an error checking the signature for a particular
operation"""
class DatabaseAlreadyExists(Exception):
class DatabaseAlreadyExists(BigchainDBError):
"""Raised when trying to create the database but the db is already there"""
class DatabaseDoesNotExist(Exception):
class DatabaseDoesNotExist(BigchainDBError):
"""Raised when trying to delete the database but the db is not there"""
class KeypairNotFoundException(Exception):
class KeypairNotFoundException(BigchainDBError):
"""Raised if operation cannot proceed because the keypair was not given"""
class KeypairMismatchException(Exception):
class KeypairMismatchException(BigchainDBError):
"""Raised if the private key(s) provided for signing don't match any of the
current owner(s)"""
class StartupError(Exception):
class StartupError(BigchainDBError):
"""Raised when there is an error starting up the system"""
class ImproperVoteError(Exception):
class ImproperVoteError(BigchainDBError):
"""Raised if a vote is not constructed correctly, or signed incorrectly"""
class MultipleVotesError(Exception):
class MultipleVotesError(BigchainDBError):
"""Raised if a voter has voted more than once"""
class GenesisBlockAlreadyExistsError(Exception):
class GenesisBlockAlreadyExistsError(BigchainDBError):
"""Raised when trying to create the already existing genesis block"""
class CyclicBlockchainError(Exception):
class CyclicBlockchainError(BigchainDBError):
"""Raised when there is a cycle in the blockchain"""
class TransactionNotInValidBlock(Exception):
class TransactionNotInValidBlock(BigchainDBError):
"""Raised when a transfer transaction is attempting to fulfill the
outputs of a transaction that is in an invalid or undecided block"""
class AssetIdMismatch(Exception):
class AssetIdMismatch(BigchainDBError):
"""Raised when multiple transaction inputs related to different assets"""
class AmountError(Exception):
class AmountError(BigchainDBError):
"""Raised when there is a problem with a transaction's output amounts"""

2
bigchaindb/exceptions.py Normal file
View File

@ -0,0 +1,2 @@
class BigchainDBError(Exception):
"""Base class for BigchainDB exceptions."""

View File

@ -3,7 +3,8 @@ from flask_restful import Api
from bigchaindb.web.views import (
info,
statuses,
transactions as tx, unspents,
transactions as tx,
unspents,
votes,
)

View File

@ -23,25 +23,25 @@ class StatusApi(Resource):
parser.add_argument('block_id', type=str)
args = parser.parse_args(strict=True)
tx_id = args['tx_id']
block_id = args['block_id']
if sum(arg is not None for arg in args.values()) != 1:
# logical xor - exactly one query argument required
if bool(tx_id) == bool(block_id):
return make_error(400, "Provide exactly one query parameter. Choices are: block_id, tx_id")
pool = current_app.config['bigchain_pool']
status, links = None, None
with pool() as bigchain:
if args['tx_id']:
status = bigchain.get_status(args['tx_id'])
if tx_id:
status = bigchain.get_status(tx_id)
links = {
"tx": "/transactions/{}".format(args['tx_id'])
"tx": "/transactions/{}".format(tx_id)
}
elif args['block_id']:
block = bigchain.get_block(block_id=args['block_id'])
if not block:
return make_error(404)
status = bigchain.block_election_status(block['id'], block['block']['voters'])
elif block_id:
_, status = bigchain.get_block(block_id=block_id, include_status=True)
# TODO: enable once blocks endpoint is available
# links = {
# "block": "/blocks/{}".format(args['block_id'])

View File

@ -2,9 +2,10 @@ version: '2'
services:
mdb:
image: mongo
image: mongo:3.4.1
ports:
- "27017"
command: mongod --replSet=rs0
rdb:
image: rethinkdb
@ -15,7 +16,7 @@ services:
- rdb-data
rdb-data:
image: rethinkdb
image: rethinkdb:2.3.5
volumes:
- /data
command: "true"

View File

@ -127,3 +127,26 @@ If there is only one *current owner*, the fulfillment will be a simple signature
See the reference on :ref:`inputs <Input>` for descriptions of the meaning of each field.
Multiple Current Owners
```````````````````````
If there are multiple *current owners*, the fulfillment will be a little different from `One Current Owner`_. Suppose it has two current owners.
.. code-block:: json
{
"owners_before": ["<public key of the first owner before the transaction happened>","<public key of the second owner before the transaction happened>"],
"fulfillment": "cf:2:AQIBAgEBYwAEYEv6O5HjHGl7OWo2Tu5mWcWQcL_OGrFuUjyej-dK3LM99TbZsRd8c9luQhU30xCH5AdNaupxg-pLHuk8DoSaDA1MHQGXUZ80a_cV-4UaaaCpdey8K0CEcJxre0X96hTHCwABAWMABGBnsuHExhuSj5Mdm-q0KoPgX4nAt0s00k1WTMCzuUpQIp6aStLoTSMlsvS4fmDtOSv9gubekKLuHTMAk-LQFSKF1JdzwaVWAA2UOv0v_OS2gY3A-r0kRq8HtzjYdcmVswUA",
"fulfills": {
"output": 0,
"txid": "e4805f1bfc999d6409b38e3a4c3b2fafad7c1280eb0d441da7083e945dd89eb8"
}
}
- ``owners_before``: A list of public keys of the owners before the transaction; in this case it has two owners, hence two public keys.
- ``fulfillment``: A crypto-conditions URI that encodes the cryptographic fulfillments like signatures and others;'cf' indicates this is a fulfillment, '2' indicates the condition type is THRESHOLD-SHA-256 (while '4' in `One Current Owner`_ indicates its condition type is ED25519).
- ``fulfills``: Pointer to an output from a previous transaction that is being spent
- ``output``: The index of the output in a previous transaction
- ``txid``: ID of the transaction

View File

@ -4,3 +4,6 @@ test=pytest
[coverage:run]
source = .
omit = *test*
[flake8]
max_line_length = 119

View File

@ -8,7 +8,8 @@ A few notes:
- [`tests/common/`](./common/) contains self-contained tests only testing
[`bigchaindb/common/`](../bigchaindb/common/)
- [`tests/db/`](./db/) contains tests requiring the database backend (e.g. RethinkDB)
- [`tests/backend/`](./backend/) contains tests requiring
the database backend (RethinkDB or MongoDB)
## Writing Tests
@ -20,9 +21,24 @@ We write unit and integration tests for our Python code using the [pytest](http:
### Running Tests Directly
If you installed BigchainDB Server using `pip install bigchaindb`, then you didn't install the tests. Before you can run all the tests, you must install BigchainDB from source. The [`CONTRIBUTING.md` file](../CONTRIBUTING.md) has instructions for how to do that.
If you installed BigchainDB Server using `pip install bigchaindb`, then you
didn't install the tests. Before you can run all the tests, you must install
BigchainDB from source. The [`CONTRIBUTING.md` file](../CONTRIBUTING.md) has
instructions for how to do that.
Next, make sure you have RethinkDB running in the background (e.g. using `rethinkdb --daemon`).
Next, make sure you have RethinkDB or MongoDB running in the background. You
can run RethinkDB using `rethinkdb --daemon` or MongoDB using `mongod
--replSet=rs0`.
The `pytest` command has many options. If you want to learn about all the
things you can do with pytest, see [the pytest
documentation](http://pytest.org/latest/). We've also added a customization to
pytest:
`--database-backend`: Defines the backend to use for the tests. It defaults to
`rethinkdb`
It must be one of the backends available in the [server
configuration](https://docs.bigchaindb.com/projects/server/en/latest/server-reference/configuration.html).
Now you can run all tests using:
```text
@ -39,18 +55,21 @@ or:
python setup.py test
```
**Note**: the above pytest commands default to use RethinkDB as the backend. If
you wish to run the tests against MongoDB add the `--database-backend=mongodb`
to the `pytest` command.
How does `python setup.py test` work? The documentation for [pytest-runner](https://pypi.python.org/pypi/pytest-runner) explains.
The `pytest` command has many options. If you want to learn about all the things you can do with pytest, see [the pytest documentation](http://pytest.org/latest/). We've also added a customization to pytest:
`--database-backend`: Defines the backend to use for the tests.
It must be one of the backends available in the [server configuration](https://docs.bigchaindb.com/projects/server/en/latest/server-reference/configuration.html).
### Running Tests with Docker Compose
You can also use [Docker Compose](https://docs.docker.com/compose/) to run all the tests.
#### With RethinkDB as the backend
First, start `RethinkDB` in the background:
```text
@ -63,14 +82,29 @@ then run the tests using:
$ docker-compose run --rm bdb py.test -v
```
If you've upgraded to a newer version of BigchainDB, you might have to rebuild the images before
being able to run the tests. Run:
#### With MongoDB as the backend
First, start `MongoDB` in the background:
```text
$ docker-compose up -d mdb
```
then run the tests using:
```text
$ docker-compose run --rm bdb-mdb py.test -v
```
If you've upgraded to a newer version of BigchainDB, you might have to rebuild
the images before being able to run the tests. Run:
```text
$ docker-compose build
```
to rebuild all the images (usually you only need to rebuild the `bdb` image).
to rebuild all the images (usually you only need to rebuild the `bdb` and
`bdb-mdb` images).
## Automated Testing of All Pull Requests

View File

@ -0,0 +1 @@
"""MongoDB changefeed tests"""

View File

@ -0,0 +1,367 @@
import pytest
pytestmark = pytest.mark.bdb
def test_write_transaction(signed_create_tx):
from bigchaindb.backend import connect, query
conn = connect()
# write the transaction
query.write_transaction(conn, signed_create_tx.to_dict())
# get the transaction
tx_db = conn.db.backlog.find_one({'id': signed_create_tx.id},
{'_id': False})
assert tx_db == signed_create_tx.to_dict()
def test_update_transaction(signed_create_tx):
from bigchaindb.backend import connect, query
conn = connect()
# update_transaction can update any field we want, but lets update the
# same fields that are updated by bigchaindb core.
signed_create_tx = signed_create_tx.to_dict()
signed_create_tx.update({'assignee': 'aaa', 'assignment_timestamp': 10})
conn.db.backlog.insert_one(signed_create_tx)
query.update_transaction(conn, signed_create_tx['id'],
{'assignee': 'bbb', 'assignment_timestamp': 20})
tx_db = conn.db.backlog.find_one({'id': signed_create_tx['id']},
{'_id': False})
assert tx_db['assignee'] == 'bbb'
assert tx_db['assignment_timestamp'] == 20
def test_delete_transaction(signed_create_tx):
from bigchaindb.backend import connect, query
conn = connect()
# write_the transaction
result = conn.db.backlog.insert_one(signed_create_tx.to_dict())
# delete transaction
query.delete_transaction(conn, signed_create_tx.id)
tx_db = conn.db.backlog.find_one({'_id': result.inserted_id})
assert tx_db is None
def test_get_stale_transactions(signed_create_tx):
import time
from bigchaindb.backend import connect, query
conn = connect()
# create two transaction, one of them stale
tx1 = signed_create_tx.to_dict()
tx1.update({'id': 'notstale', 'assignment_timestamp': time.time()})
tx2 = signed_create_tx.to_dict()
tx2.update({'id': 'stale', 'assignment_timestamp': time.time() - 60})
# write the transactions
conn.db.backlog.insert_one(tx1)
conn.db.backlog.insert_one(tx2)
# get stale transactions
stale_txs = list(query.get_stale_transactions(conn, 30))
assert len(stale_txs) == 1
assert stale_txs[0]['id'] == 'stale'
def test_get_transaction_from_block(user_pk):
from bigchaindb.backend import connect, query
from bigchaindb.models import Transaction, Block
conn = connect()
# create a block with 2 transactions
txs = [
Transaction.create([user_pk], [([user_pk], 1)]),
Transaction.create([user_pk], [([user_pk], 1)]),
]
block = Block(transactions=txs)
conn.db.bigchain.insert_one(block.to_dict())
tx_db = query.get_transaction_from_block(conn, txs[0].id, block.id)
assert tx_db == txs[0].to_dict()
assert query.get_transaction_from_block(conn, txs[0].id, 'aaa') is None
assert query.get_transaction_from_block(conn, 'aaa', block.id) is None
def test_get_transaction_from_backlog(create_tx):
from bigchaindb.backend import connect, query
conn = connect()
# insert transaction
conn.db.backlog.insert_one(create_tx.to_dict())
# query the backlog
tx_db = query.get_transaction_from_backlog(conn, create_tx.id)
assert tx_db == create_tx.to_dict()
def test_get_block_status_from_transaction(create_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create a block
block = Block(transactions=[create_tx], voters=['aaa', 'bbb', 'ccc'])
# insert block
conn.db.bigchain.insert_one(block.to_dict())
block_db = list(query.get_blocks_status_from_transaction(conn,
create_tx.id))
assert len(block_db) == 1
block_db = block_db.pop()
assert block_db['id'] == block.id
assert block_db['block']['voters'] == block.voters
def test_get_txids_by_asset_id(signed_create_tx, signed_transfer_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert two blocks, one for the create and one for the
# transfer transaction
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
block = Block(transactions=[signed_transfer_tx])
conn.db.bigchain.insert_one(block.to_dict())
txids = list(query.get_txids_by_asset_id(conn, signed_create_tx.id))
assert len(txids) == 2
assert txids == [signed_create_tx.id, signed_transfer_tx.id]
def test_get_asset_by_id(create_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create asset and block
create_tx.asset = {'msg': 'aaa'}
block = Block(transactions=[create_tx])
conn.db.bigchain.insert_one(block.to_dict())
asset = list(query.get_asset_by_id(conn, create_tx.id))
assert len(asset) == 1
assert asset[0]['asset'] == create_tx.asset
def test_get_spent(signed_create_tx, signed_transfer_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert two blocks, one for the create and one for the
# transfer transaction
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
block = Block(transactions=[signed_transfer_tx])
conn.db.bigchain.insert_one(block.to_dict())
spents = list(query.get_spent(conn, signed_create_tx.id, 0))
assert len(spents) == 1
assert spents[0] == signed_transfer_tx.to_dict()
def test_get_owned_ids(signed_create_tx, user_pk):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert a block
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
owned_ids = list(query.get_owned_ids(conn, user_pk))
assert len(owned_ids) == 1
assert owned_ids[0] == signed_create_tx.to_dict()
def test_get_votes_by_block_id(signed_create_tx, structurally_valid_vote):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert a block
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
# create and insert some votes
structurally_valid_vote['vote']['voting_for_block'] = block.id
conn.db.votes.insert_one(structurally_valid_vote)
structurally_valid_vote['vote']['voting_for_block'] = block.id
structurally_valid_vote.pop('_id')
conn.db.votes.insert_one(structurally_valid_vote)
votes = list(query.get_votes_by_block_id(conn, block.id))
assert len(votes) == 2
assert votes[0]['vote']['voting_for_block'] == block.id
assert votes[1]['vote']['voting_for_block'] == block.id
def test_get_votes_by_block_id_and_voter(signed_create_tx,
structurally_valid_vote):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert a block
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
# create and insert some votes
structurally_valid_vote['vote']['voting_for_block'] = block.id
structurally_valid_vote['node_pubkey'] = 'aaa'
conn.db.votes.insert_one(structurally_valid_vote)
structurally_valid_vote['vote']['voting_for_block'] = block.id
structurally_valid_vote['node_pubkey'] = 'bbb'
structurally_valid_vote.pop('_id')
conn.db.votes.insert_one(structurally_valid_vote)
votes = list(query.get_votes_by_block_id_and_voter(conn, block.id, 'aaa'))
assert len(votes) == 1
assert votes[0]['node_pubkey'] == 'aaa'
def test_write_block(signed_create_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and write block
block = Block(transactions=[signed_create_tx])
query.write_block(conn, block)
block_db = conn.db.bigchain.find_one({'id': block.id}, {'_id': False})
assert block_db == block.to_dict()
def test_get_block(signed_create_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert block
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
block_db = query.get_block(conn, block.id)
assert block_db == block.to_dict()
def test_has_transaction(signed_create_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert block
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
assert query.has_transaction(conn, signed_create_tx.id)
assert query.has_transaction(conn, 'aaa') is False
def test_count_blocks(signed_create_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert some blocks
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
conn.db.bigchain.insert_one(block.to_dict())
assert query.count_blocks(conn) == 2
def test_count_backlog(signed_create_tx):
from bigchaindb.backend import connect, query
conn = connect()
# create and insert some transations
conn.db.backlog.insert_one(signed_create_tx.to_dict())
signed_create_tx.metadata = {'msg': 'aaa'}
conn.db.backlog.insert_one(signed_create_tx.to_dict())
assert query.count_backlog(conn) == 2
def test_write_vote(structurally_valid_vote):
from bigchaindb.backend import connect, query
conn = connect()
# write a vote
query.write_vote(conn, structurally_valid_vote)
# retrieve the vote
vote_db = conn.db.votes.find_one(
{'node_pubkey': structurally_valid_vote['node_pubkey']}
)
assert vote_db == structurally_valid_vote
def test_get_genesis_block(genesis_block):
from bigchaindb.backend import connect, query
conn = connect()
assert query.get_genesis_block(conn) == genesis_block.to_dict()
def test_get_last_voted_block(genesis_block, signed_create_tx, b):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
from bigchaindb.common.exceptions import CyclicBlockchainError
conn = connect()
# check that the last voted block is the genesis block
assert query.get_last_voted_block(conn, b.me) == genesis_block.to_dict()
# create and insert a new vote and block
block = Block(transactions=[signed_create_tx])
conn.db.bigchain.insert_one(block.to_dict())
vote = b.vote(block.id, genesis_block.id, True)
conn.db.votes.insert_one(vote)
assert query.get_last_voted_block(conn, b.me) == block.to_dict()
# force a bad chain
vote.pop('_id')
vote['vote']['voting_for_block'] = genesis_block.id
vote['vote']['previous_block'] = block.id
conn.db.votes.insert_one(vote)
with pytest.raises(CyclicBlockchainError):
query.get_last_voted_block(conn, b.me)
def test_get_unvoted_blocks(signed_create_tx):
from bigchaindb.backend import connect, query
from bigchaindb.models import Block
conn = connect()
# create and insert a block
block = Block(transactions=[signed_create_tx], node_pubkey='aaa')
conn.db.bigchain.insert_one(block.to_dict())
unvoted_blocks = list(query.get_unvoted_blocks(conn, 'aaa'))
assert len(unvoted_blocks) == 1
assert unvoted_blocks[0] == block.to_dict()

View File

@ -25,7 +25,8 @@ def test_init_creates_db_tables_and_indexes():
'transaction_id']
indexes = conn.conn[dbname]['backlog'].index_information().keys()
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp']
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp',
'transaction_id']
indexes = conn.conn[dbname]['votes'].index_information().keys()
assert sorted(indexes) == ['_id_', 'block_and_voter']
@ -85,26 +86,22 @@ def test_create_secondary_indexes():
# Backlog table
indexes = conn.conn[dbname]['backlog'].index_information().keys()
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp']
assert sorted(indexes) == ['_id_', 'assignee__transaction_timestamp',
'transaction_id']
# Votes table
indexes = conn.conn[dbname]['votes'].index_information().keys()
assert sorted(indexes) == ['_id_', 'block_and_voter']
def test_drop():
import bigchaindb
def test_drop(dummy_db):
from bigchaindb import backend
from bigchaindb.backend import schema
conn = backend.connect()
dbname = bigchaindb.config['database']['name']
# The db is set up by fixtures
assert dbname in conn.conn.database_names()
schema.drop_database(conn, dbname)
assert dbname not in conn.conn.database_names()
assert dummy_db in conn.conn.database_names()
schema.drop_database(conn, dummy_db)
assert dummy_db not in conn.conn.database_names()
def test_get_replica_set_name_not_enabled():

View File

@ -154,8 +154,7 @@ def test_invalid_output_initialization(cond_uri, user_pub):
Output(cond_uri, [user_pub], 0)
def test_generate_output_split_half_recursive(user_pub, user2_pub,
user3_pub):
def test_generate_output_split_half_recursive(user_pub, user2_pub, user3_pub):
from bigchaindb.common.transaction import Output
from cryptoconditions import Ed25519Fulfillment, ThresholdSha256Fulfillment
@ -174,8 +173,8 @@ def test_generate_output_split_half_recursive(user_pub, user2_pub,
assert cond.fulfillment.to_dict() == expected.to_dict()
def test_generate_outputs_split_half_single_owner(user_pub, user2_pub,
user3_pub):
def test_generate_outputs_split_half_single_owner(user_pub,
user2_pub, user3_pub):
from bigchaindb.common.transaction import Output
from cryptoconditions import Ed25519Fulfillment, ThresholdSha256Fulfillment
@ -231,8 +230,7 @@ def test_generate_output_single_owner_with_output(user_pub):
assert cond.fulfillment.to_dict() == expected.to_dict()
def test_generate_output_invalid_parameters(user_pub, user2_pub,
user3_pub):
def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub):
from bigchaindb.common.transaction import Output
with raises(ValueError):
@ -289,7 +287,6 @@ def test_create_default_asset_on_tx_initialization(asset_definition):
def test_transaction_serialization(user_input, user_output, data):
from bigchaindb.common.transaction import Transaction
from .utils import validate_transaction_model
tx_id = 'l0l'
@ -559,7 +556,7 @@ def test_validate_multiple_inputs(user_input, user_output, user_priv,
expected_first_bytes = str(expected_first).encode()
expected_first.inputs[0].fulfillment.sign(expected_first_bytes,
PrivateKey(user_priv))
PrivateKey(user_priv))
expected_second_bytes = str(expected_second).encode()
expected_second.inputs[0].fulfillment.sign(expected_second_bytes,
PrivateKey(user_priv))

View File

@ -136,11 +136,18 @@ def _configure_bigchaindb(request):
def _setup_database(_configure_bigchaindb):
from bigchaindb import config
from bigchaindb.backend import connect, schema
from bigchaindb.backend.mongodb.schema import initialize_replica_set
from bigchaindb.common.exceptions import DatabaseDoesNotExist
print('Initializing test db')
dbname = config['database']['name']
conn = connect()
# if we are setting up mongodb for the first time we need to make sure
# that the replica set is initialized before doing any operation in the
# database
if config['database']['backend'] == 'mongodb':
initialize_replica_set(conn)
try:
schema.drop_database(conn, dbname)
except DatabaseDoesNotExist:
@ -268,9 +275,11 @@ def inputs(user_pk, b, genesis_block):
prev_block_id = genesis_block.id
for block in range(4):
transactions = [
Transaction.create([b.me], [([user_pk], 1)],
metadata={'msg': random.random()})
.sign([b.me_private])
Transaction.create(
[b.me],
[([user_pk], 1)],
metadata={'msg': random.random()},
).sign([b.me_private])
for _ in range(10)
]
block = b.create_block(transactions)
@ -290,9 +299,11 @@ def inputs_shared(user_pk, user2_pk, genesis_block):
prev_block_id = genesis_block.id
for block in range(4):
transactions = [
Transaction.create([b.me], [user_pk, user2_pk],
metadata={'msg': random.random()})
.sign([b.me_private])
Transaction.create(
[b.me],
[user_pk, user2_pk],
metadata={'msg': random.random()},
).sign([b.me_private])
for _ in range(10)
]
block = b.create_block(transactions)
@ -315,10 +326,10 @@ def dummy_db(request):
if xdist_suffix:
dbname = '{}_{}'.format(dbname, xdist_suffix)
try:
schema.create_database(conn, dbname)
schema.init_database(conn, dbname)
except DatabaseAlreadyExists:
schema.drop_database(conn, dbname)
schema.create_database(conn, dbname)
schema.init_database(conn, dbname)
yield dbname
try:
schema.drop_database(conn, dbname)

View File

@ -97,18 +97,18 @@ class TestBigchainApi(object):
tx = Transaction.create([b.me], [([b.me], 1)])
tx = tx.sign([b.me_private])
monkeypatch.setattr('time.time', lambda: 1)
monkeypatch.setattr('time.time', lambda: 1000000000)
block1 = b.create_block([tx])
b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 2)
monkeypatch.setattr('time.time', lambda: 1000000020)
transfer_tx = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
asset_id=tx.id)
transfer_tx = transfer_tx.sign([b.me_private])
block2 = b.create_block([transfer_tx])
b.write_block(block2)
monkeypatch.setattr('time.time', lambda: 3333333333)
monkeypatch.setattr('time.time', lambda: 1000000030)
transfer_tx2 = Transaction.transfer(tx.to_inputs(), [([b.me], 1)],
asset_id=tx.id)
transfer_tx2 = transfer_tx2.sign([b.me_private])
@ -132,11 +132,11 @@ class TestBigchainApi(object):
tx = Transaction.create([b.me], [([b.me], 1)])
tx = tx.sign([b.me_private])
monkeypatch.setattr('time.time', lambda: 1)
monkeypatch.setattr('time.time', lambda: 1000000000)
block1 = b.create_block([tx])
b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 2222222222)
monkeypatch.setattr('time.time', lambda: 1000000020)
block2 = b.create_block([tx])
b.write_block(block2)
@ -160,7 +160,7 @@ class TestBigchainApi(object):
block1 = b.create_block([tx1])
b.write_block(block1)
monkeypatch.setattr('time.time', lambda: 2000000000)
monkeypatch.setattr('time.time', lambda: 1000000020)
tx2 = Transaction.create([b.me], [([b.me], 1)],
metadata={'msg': random.random()})
tx2 = tx2.sign([b.me_private])
@ -180,6 +180,7 @@ class TestBigchainApi(object):
@pytest.mark.usefixtures('inputs')
def test_write_transaction(self, b, user_pk, user_sk):
from bigchaindb import Bigchain
from bigchaindb.models import Transaction
input_tx = b.get_owned_ids(user_pk).pop()
@ -188,14 +189,12 @@ class TestBigchainApi(object):
tx = Transaction.transfer(inputs, [([user_pk], 1)],
asset_id=input_tx.id)
tx = tx.sign([user_sk])
response = b.write_transaction(tx)
b.write_transaction(tx)
assert response['skipped'] == 0
assert response['deleted'] == 0
assert response['unchanged'] == 0
assert response['errors'] == 0
assert response['replaced'] == 0
assert response['inserted'] == 1
tx_from_db, status = b.get_transaction(tx.id, include_status=True)
assert tx_from_db.to_dict() == tx.to_dict()
assert status == Bigchain.TX_IN_BACKLOG
@pytest.mark.usefixtures('inputs')
def test_read_transaction(self, b, user_pk, user_sk):

View File

@ -279,7 +279,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
tx = Transaction.create([b.me], [([test_user_pub], 1)])
tx = tx.sign([b.me_private])
monkeypatch.setattr('time.time', lambda: 1111111111)
monkeypatch.setattr('time.time', lambda: 1000000000)
block = b.create_block([tx])
b.write_block(block)
@ -289,7 +289,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
asset_id=tx.id)
tx2 = tx2.sign([test_user_priv])
monkeypatch.setattr('time.time', lambda: 2222222222)
monkeypatch.setattr('time.time', lambda: 2000000000)
block2 = b.create_block([tx2])
b.write_block(block2)
@ -314,7 +314,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
'previous_block': genesis_block.id,
'is_block_valid': True,
'invalid_reason': None,
'timestamp': '2222222222'}
'timestamp': '2000000000'}
serialized_vote = utils.serialize(vote_doc['vote']).encode()
assert vote_doc['node_pubkey'] == b.me
@ -328,7 +328,7 @@ def test_valid_block_voting_with_transfer_transactions(monkeypatch,
'previous_block': block.id,
'is_block_valid': True,
'invalid_reason': None,
'timestamp': '2222222222'}
'timestamp': '2000000000'}
serialized_vote2 = utils.serialize(vote2_doc['vote']).encode()
assert vote2_doc['node_pubkey'] == b.me
@ -498,15 +498,15 @@ def test_voter_considers_unvoted_blocks_when_single_node(monkeypatch, b):
outpipe = Pipe()
monkeypatch.setattr('time.time', lambda: 1111111111)
monkeypatch.setattr('time.time', lambda: 1000000000)
block_ids = []
# insert blocks in the database while the voter process is not listening
# (these blocks won't appear in the changefeed)
monkeypatch.setattr('time.time', lambda: 2222222222)
monkeypatch.setattr('time.time', lambda: 1000000020)
block_1 = dummy_block(b)
block_ids.append(block_1.id)
monkeypatch.setattr('time.time', lambda: 3333333333)
monkeypatch.setattr('time.time', lambda: 1000000030)
b.write_block(block_1)
block_2 = dummy_block(b)
block_ids.append(block_2.id)
@ -522,7 +522,7 @@ def test_voter_considers_unvoted_blocks_when_single_node(monkeypatch, b):
outpipe.get()
# create a new block that will appear in the changefeed
monkeypatch.setattr('time.time', lambda: 4444444444)
monkeypatch.setattr('time.time', lambda: 1000000040)
block_3 = dummy_block(b)
block_ids.append(block_3.id)
b.write_block(block_3)
@ -546,15 +546,15 @@ def test_voter_chains_blocks_with_the_previous_ones(monkeypatch, b):
outpipe = Pipe()
monkeypatch.setattr('time.time', lambda: 1111111111)
monkeypatch.setattr('time.time', lambda: 1000000000)
block_ids = []
monkeypatch.setattr('time.time', lambda: 2222222222)
monkeypatch.setattr('time.time', lambda: 1000000020)
block_1 = dummy_block(b)
block_ids.append(block_1.id)
b.write_block(block_1)
monkeypatch.setattr('time.time', lambda: 3333333333)
monkeypatch.setattr('time.time', lambda: 1000000030)
block_2 = dummy_block(b)
block_ids.append(block_2.id)
b.write_block(block_2)
@ -588,9 +588,9 @@ def test_voter_checks_for_previous_vote(monkeypatch, b):
inpipe = Pipe()
outpipe = Pipe()
monkeypatch.setattr('time.time', lambda: 1111111111)
monkeypatch.setattr('time.time', lambda: 1000000000)
monkeypatch.setattr('time.time', lambda: 2222222222)
monkeypatch.setattr('time.time', lambda: 1000000020)
block_1 = dummy_block(b)
inpipe.put(block_1.to_dict())
assert len(list(query.get_votes_by_block_id(b.connection, block_1.id))) == 0
@ -603,11 +603,11 @@ def test_voter_checks_for_previous_vote(monkeypatch, b):
outpipe.get()
# queue block for voting AGAIN
monkeypatch.setattr('time.time', lambda: 3333333333)
monkeypatch.setattr('time.time', lambda: 1000000030)
inpipe.put(block_1.to_dict())
# queue another block
monkeypatch.setattr('time.time', lambda: 4444444444)
monkeypatch.setattr('time.time', lambda: 1000000040)
block_2 = dummy_block(b)
inpipe.put(block_2.to_dict())

View File

@ -102,7 +102,10 @@ def test_env_config(monkeypatch):
def test_autoconfigure_read_both_from_file_and_env(monkeypatch, request):
file_config = {
'database': {'host': 'test-host'},
'database': {
'host': 'test-host',
'backend': request.config.getoption('--database-backend')
},
'backlog_reassign_delay': 5
}
monkeypatch.setattr('bigchaindb.config_utils.file_config', lambda *args, **kwargs: file_config)

View File

@ -158,4 +158,3 @@ def test_post_invalid_transfer_transaction_returns_400(b, client, user_pk, user_
res = client.post(TX_ENDPOINT, data=json.dumps(transfer_tx.to_dict()))
assert res.status_code == 400

View File

@ -23,7 +23,7 @@ deps =
flake8
skip_install = True
extras = None
commands = flake8 --max-line-length 119 bigchaindb
commands = flake8 bigchaindb tests
[testenv:docsroot]
basepython = {[base]basepython}