Merge remote-tracking branch 'remotes/origin/master' into kyber-master

This commit is contained in:
diminator 2017-05-01 12:53:10 +02:00
commit 5bf2736bf1
25 changed files with 873 additions and 271 deletions

View File

@ -132,7 +132,8 @@ definitions:
- public_keys - public_keys
properties: properties:
amount: amount:
type: integer type: string
pattern: "^[0-9]{1,20}$"
description: | description: |
Integral amount of the asset represented by this output. Integral amount of the asset represented by this output.
In the case of a non divisible asset, this will always be 1. In the case of a non divisible asset, this will always be 1.
@ -158,10 +159,6 @@ definitions:
"$ref": "#/definitions/public_keys" "$ref": "#/definitions/public_keys"
description: | description: |
List of public keys associated with the conditions on an output. List of public keys associated with the conditions on an output.
amount:
type: integer
description: |
Integral amount of the asset represented by this condition.
input: input:
type: "object" type: "object"
description: description:

View File

@ -212,6 +212,8 @@ class Output(object):
owners before a Transaction was confirmed. owners before a Transaction was confirmed.
""" """
MAX_AMOUNT = 9 * 10 ** 18
def __init__(self, fulfillment, public_keys=None, amount=1): def __init__(self, fulfillment, public_keys=None, amount=1):
"""Create an instance of a :class:`~.Output`. """Create an instance of a :class:`~.Output`.
@ -232,6 +234,8 @@ class Output(object):
raise TypeError('`amount` must be an int') raise TypeError('`amount` must be an int')
if amount < 1: if amount < 1:
raise AmountError('`amount` must be greater than 0') raise AmountError('`amount` must be greater than 0')
if amount > self.MAX_AMOUNT:
raise AmountError('`amount` must be <= %s' % self.MAX_AMOUNT)
self.fulfillment = fulfillment self.fulfillment = fulfillment
self.amount = amount self.amount = amount
@ -267,7 +271,7 @@ class Output(object):
output = { output = {
'public_keys': self.public_keys, 'public_keys': self.public_keys,
'condition': condition, 'condition': condition,
'amount': self.amount 'amount': str(self.amount),
} }
return output return output
@ -384,7 +388,11 @@ class Output(object):
except KeyError: except KeyError:
# NOTE: Hashlock condition case # NOTE: Hashlock condition case
fulfillment = data['condition']['uri'] fulfillment = data['condition']['uri']
return cls(fulfillment, data['public_keys'], data['amount']) try:
amount = int(data['amount'])
except ValueError:
raise AmountError('Invalid amount: %s' % data['amount'])
return cls(fulfillment, data['public_keys'], amount)
class Transaction(object): class Transaction(object):

View File

@ -37,6 +37,7 @@ services:
- ./bigchaindb:/usr/src/app/bigchaindb - ./bigchaindb:/usr/src/app/bigchaindb
- ./tests:/usr/src/app/tests - ./tests:/usr/src/app/tests
- ./docs:/usr/src/app/docs - ./docs:/usr/src/app/docs
- ./k8s:/usr/src/app/k8s
- ./setup.py:/usr/src/app/setup.py - ./setup.py:/usr/src/app/setup.py
- ./setup.cfg:/usr/src/app/setup.cfg - ./setup.cfg:/usr/src/app/setup.cfg
- ./pytest.ini:/usr/src/app/pytest.ini - ./pytest.ini:/usr/src/app/pytest.ini
@ -58,6 +59,7 @@ services:
- ./bigchaindb:/usr/src/app/bigchaindb - ./bigchaindb:/usr/src/app/bigchaindb
- ./tests:/usr/src/app/tests - ./tests:/usr/src/app/tests
- ./docs:/usr/src/app/docs - ./docs:/usr/src/app/docs
- ./k8s:/usr/src/app/k8s
- ./setup.py:/usr/src/app/setup.py - ./setup.py:/usr/src/app/setup.py
- ./setup.cfg:/usr/src/app/setup.cfg - ./setup.cfg:/usr/src/app/setup.cfg
- ./pytest.ini:/usr/src/app/pytest.ini - ./pytest.ini:/usr/src/app/pytest.ini

View File

@ -18,3 +18,4 @@ If you find the cloud deployment templates for nodes helpful, then you may also
add-node-on-kubernetes add-node-on-kubernetes
upgrade-on-kubernetes upgrade-on-kubernetes
first-node first-node
log-analytics

View File

@ -0,0 +1,256 @@
Log Analytics on Azure
======================
This section documents how to create and configure a Log Analytics workspace on
Azure, for a Kubernetes-based deployment.
The documented approach is based on an integration of Microsoft's Operations
Management Suite (OMS) with a Kubernetes-based Azure Container Service cluster.
The :ref:`oms-k8s-references` contains links to more detailed documentation on
Azure, and Kubernetes.
There are three main steps involved:
1. Create a workspace (``LogAnalyticsOMS``).
2. Create a ``ContainersOMS`` solution under the workspace.
3. Deploy the OMS agent(s).
Steps 1 and 2 rely on `Azure Resource Manager templates`_ and can be done with
one template so we'll cover them together. Step 3 relies on a
`Kubernetes DaemonSet`_ and will be covered separately.
Minimum Requirements
--------------------
This document assumes that you have already deployed a Kubernetes cluster, and
that you have the Kubernetes command line ``kubectl`` installed.
Creating a workspace and adding a containers solution
-----------------------------------------------------
For the sake of this document and example, we'll assume an existing resource
group named:
* ``resource_group``
and the workspace we'll create will be named:
* ``work_space``
If you feel creative you may replace these names by more interesting ones.
.. code-block:: bash
$ az group deployment create --debug \
--resource-group resource_group \
--name "Microsoft.LogAnalyticsOMS" \
--template-file log_analytics_oms.json \
--parameters @log_analytics_oms.parameters.json
An example of a simple tenplate file (``--template-file``):
.. code-block:: json
{
"$schema": "http://schema.management.azure.com/schemas/2014-04-01-preview/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"sku": {
"type": "String"
},
"workspaceName": {
"type": "String"
},
"solutionType": {
"type": "String"
},
},
"resources": [
{
"apiVersion": "2015-03-20",
"type": "Microsoft.OperationalInsights/workspaces",
"name": "[parameters('workspaceName')]",
"location": "[resourceGroup().location]",
"properties": {
"sku": {
"name": "[parameters('sku')]"
}
},
"resources": [
{
"apiVersion": "2015-11-01-preview",
"location": "[resourceGroup().location]",
"name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]",
"type": "Microsoft.OperationsManagement/solutions",
"id": "[Concat(resourceGroup().id, '/providers/Microsoft.OperationsManagement/solutions/', parameters('solutionType'), '(', parameters('workspaceName'), ')')]",
"dependsOn": [
"[concat('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]"
],
"properties": {
"workspaceResourceId": "[resourceId('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]"
},
"plan": {
"publisher": "Microsoft",
"product": "[Concat('OMSGallery/', parameters('solutionType'))]",
"name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]",
"promotionCode": ""
}
}
]
}
]
}
An example of the associated parameter file (``--parameters``):
.. code-block:: json
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"sku": {
"value": "Free"
},
"workspaceName": {
"value": "work_space"
},
"solutionType": {
"value": "Containers"
},
}
}
Deploying the OMS agent(s)
--------------------------
In order to deploy an OMS agent two important pieces of information are needed:
* workspace id
* workspace key
Obtaining the workspace id:
.. code-block:: bash
$ az resource show \
--resource-group resource_group
--resource-type Microsoft.OperationalInsights/workspaces
--name work_space \
| grep customerId
"customerId": "12345678-1234-1234-1234-123456789012",
Obtaining the workspace key:
Until we figure out a way to this via the command line please see instructions
under `Obtain your workspace ID and key
<https://docs.microsoft.com/en-us/azure/container-service/container-service-kubernetes-oms#obtain-your-workspace-id-and-key>`_.
Once you have the workspace id and key you can include them in the following
YAML file (:download:`oms-daemonset.yaml
<../../../../k8s/logging-and-monitoring/oms-daemonset.yaml>`):
.. code-block:: yaml
# oms-daemonset.yaml
apiVersion: extensions/v1beta1
kind: DaemonSet
metadata:
name: omsagent
spec:
template:
metadata:
labels:
app: omsagent
spec:
containers:
- env:
- name: WSID
value: <workspace_id>
- name: KEY
value: <workspace_key>
image: microsoft/oms
name: omsagent
ports:
- containerPort: 25225
protocol: TCP
securityContext:
privileged: true
volumeMounts:
- mountPath: /var/run/docker.sock
name: docker-sock
volumes:
- name: docker-sock
hostPath:
path: /var/run/docker.sock
To deploy the agent simply run the following command:
.. code-block:: bash
$ kubectl create -f oms-daemonset.yaml
Some useful management tasks
----------------------------
List workspaces:
.. code-block:: bash
$ az resource list \
--resource-group resource_group \
--resource-type Microsoft.OperationalInsights/workspaces
List solutions:
.. code-block:: bash
$ az resource list \
--resource-group resource_group \
--resource-type Microsoft.OperationsManagement/solutions
Deleting the containers solution:
.. code-block:: bash
$ az group deployment delete --debug \
--resource-group resource_group \
--name Microsoft.ContainersOMS
.. code-block:: bash
$ az resource delete \
--resource-group resource_group \
--resource-type Microsoft.OperationsManagement/solutions \
--name "Containers(work_space)"
Deleting the workspace:
.. code-block:: bash
$ az group deployment delete --debug \
--resource-group resource_group \
--name Microsoft.LogAnalyticsOMS
.. code-block:: bash
$ az resource delete \
--resource-group resource_group \
--resource-type Microsoft.OperationalInsights/workspaces \
--name work_space
.. _oms-k8s-references:
References
----------
* `Monitor an Azure Container Service cluster with Microsoft Operations Management Suite (OMS) <https://docs.microsoft.com/en-us/azure/container-service/container-service-kubernetes-oms>`_
* `Manage Log Analytics using Azure Resource Manager templates <https://docs.microsoft.com/en-us/azure/log-analytics/log-analytics-template-workspace-configuration>`_
* `azure commands for deployments <https://docs.microsoft.com/en-us/cli/azure/group/deployment>`_
(``az group deployment``)
* `Understand the structure and syntax of Azure Resource Manager templates <https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-authoring-templates>`_
* `Kubernetes DaemonSet`_
.. _Azure Resource Manager templates: https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-authoring-templates
.. _Kubernetes DaemonSet: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/

View File

@ -1,29 +1,27 @@
Drivers & Clients Drivers & Clients
================= =================
Currently, the only language-native driver is written in the Python language. Libraries and Tools Maintained by the BigchainDB Team
-----------------------------------------------------
We also provide the Transaction CLI to be able to script the building of * `The Python Driver <https://docs.bigchaindb.com/projects/py-driver/en/latest/index.html>`_
transactions. You may be able to wrap this tool inside the language of * `The Transaction CLI <https://docs.bigchaindb.com/projects/cli/en/latest/>`_ is
your choice, and then use the HTTP API directly to post transactions. a command-line interface for building BigchainDB transactions.
You may be able to call it from inside the language of
If you use a language other than Python, you may want to look at the current your choice, and then use :ref:`the HTTP API <The HTTP Client-Server API>`
community projects listed below. to post transactions.
.. toctree::
:maxdepth: 1
The Python Driver <https://docs.bigchaindb.com/projects/py-driver/en/latest/index.html>
Transaction CLI <https://docs.bigchaindb.com/projects/cli/en/latest/>
Community-Driven Libraries and Tools Community-Driven Libraries and Tools
------------------------------------ ------------------------------------
Please note that some of these projects may be work in progress, but may
nevertheless be very useful. .. note::
Some of these projects are a work in progress,
but may still be useful.
* `Javascript transaction builder <https://github.com/sohkai/js-bigchaindb-quickstart>`_ * `Javascript transaction builder <https://github.com/sohkai/js-bigchaindb-quickstart>`_
* `Haskell transaction builder <https://github.com/bigchaindb/bigchaindb-hs>`_ * `Haskell transaction builder <https://github.com/bigchaindb/bigchaindb-hs>`_
* `Go driver <https://github.com/zbo14/envoke/blob/master/bigchain/bigchain.go>`_ * `Go driver <https://github.com/zbo14/envoke/blob/master/bigchain/bigchain.go>`_
* `Java driver <https://github.com/mgrand/bigchaindb-java-driver>`_ * `Java driver <https://github.com/mgrand/bigchaindb-java-driver>`_
* `Ruby driver <https://github.com/LicenseRocks/bigchaindb_ruby>`_

View File

@ -0,0 +1,49 @@
{
"$schema": "http://schema.management.azure.com/schemas/2014-04-01-preview/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"sku": {
"type": "String"
},
"workspaceName": {
"type": "String"
},
"solutionType": {
"type": "String"
}
},
"resources": [
{
"apiVersion": "2015-03-20",
"type": "Microsoft.OperationalInsights/workspaces",
"name": "[parameters('workspaceName')]",
"location": "[resourceGroup().location]",
"properties": {
"sku": {
"name": "[parameters('sku')]"
}
},
"resources": [
{
"apiVersion": "2015-11-01-preview",
"location": "[resourceGroup().location]",
"name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]",
"type": "Microsoft.OperationsManagement/solutions",
"id": "[Concat(resourceGroup().id, '/providers/Microsoft.OperationsManagement/solutions/', parameters('solutionType'), '(', parameters('workspaceName'), ')')]",
"dependsOn": [
"[concat('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]"
],
"properties": {
"workspaceResourceId": "[resourceId('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]"
},
"plan": {
"publisher": "Microsoft",
"product": "[Concat('OMSGallery/', parameters('solutionType'))]",
"name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]",
"promotionCode": ""
}
}
]
}
]
}

View File

@ -0,0 +1,15 @@
{
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"sku": {
"value": "Free"
},
"workspaceName": {
"value": "rg-abc-logs"
},
"solutionType": {
"value": "Containers"
}
}
}

View File

@ -0,0 +1,30 @@
apiVersion: extensions/v1beta1
kind: DaemonSet
metadata:
name: omsagent
spec:
template:
metadata:
labels:
app: omsagent
spec:
containers:
- env:
- name: WSID
value: <insert-workspace-id-here>
- name: KEY
value: <insert-workspace-key-here>
image: microsoft/oms
name: omsagent
ports:
- containerPort: 25225
protocol: TCP
securityContext:
privileged: true
volumeMounts:
- mountPath: /var/run/docker.sock
name: docker-sock
volumes:
- name: docker-sock
hostPath:
path: /var/run/docker.sock

View File

@ -12,9 +12,10 @@ if [[ -z "${mms_api_key}" ]]; then
fi fi
sed -i '/mmsApiKey/d' $MONGODB_BACKUP_CONF_FILE sed -i '/mmsApiKey/d' $MONGODB_BACKUP_CONF_FILE
sed -i '/mothership/d' $MONGODB_BACKUP_CONF_FILE
echo "mmsApiKey="${mms_api_key} >> $MONGODB_BACKUP_CONF_FILE echo "mmsApiKey="${mms_api_key} >> $MONGODB_BACKUP_CONF_FILE
echo "mothership=api-backup.eu-west-1.mongodb.com" >> $MONGODB_BACKUP_CONF_FILE
echo "INFO: starting mdb backup..." echo "INFO: starting mdb backup..."
exec mongodb-mms-backup-agent \ exec mongodb-mms-backup-agent -c $MONGODB_BACKUP_CONF_FILE
-c $MONGODB_BACKUP_CONF_FILE

View File

@ -19,7 +19,7 @@ spec:
terminationGracePeriodSeconds: 10 terminationGracePeriodSeconds: 10
containers: containers:
- name: nginx-3scale - name: nginx-3scale
image: bigchaindb/nginx_3scale:1.0 image: bigchaindb/nginx_3scale:1.1
# TODO(Krish): Change later to IfNotPresent # TODO(Krish): Change later to IfNotPresent
imagePullPolicy: Always imagePullPolicy: Always
env: env:
@ -68,6 +68,10 @@ spec:
hostPort: 443 hostPort: 443
name: public-bdb-port name: public-bdb-port
protocol: TCP protocol: TCP
- containerPort: 80
hostPort: 80
name: https-msg-port
protocol: TCP
- containerPort: 8888 - containerPort: 8888
hostPort: 8888 hostPort: 8888
name: health-check name: health-check

View File

@ -14,6 +14,10 @@ spec:
selector: selector:
app: ngx-instance-0-dep app: ngx-instance-0-dep
ports: ports:
- port: 80
targetPort: 80
name: ngx-public-bdb-port-http
protocol: TCP
- port: 443 - port: 443
targetPort: 443 targetPort: 443
name: ngx-public-bdb-port name: ngx-public-bdb-port

View File

@ -102,7 +102,7 @@ setup(
zip_safe=False, zip_safe=False,
classifiers=[ classifiers=[
'Development Status :: 3 - Alpha', 'Development Status :: 4 - Beta',
'Intended Audience :: Developers', 'Intended Audience :: Developers',
'Topic :: Database', 'Topic :: Database',
'Topic :: Database :: Database Engines/Servers', 'Topic :: Database :: Database Engines/Servers',

12
snap/README.md Normal file
View File

@ -0,0 +1,12 @@
This is the packaging metadata for the BigchainDB snap.
Snaps and the snap store allows for the secure installation of apps that work
in most Linux distributions. For more information, go to https://snapcraft.io/
To build and install this snap in Ubuntu 16.04:
$ sudo apt install git snapcraft
$ git clone https://github.com/bigchaindb/bigchaindb
$ cd bigchaindb
$ snapcraft
$ sudo snap install *.snap --dangerous --devmode

23
snap/snapcraft.yaml Normal file
View File

@ -0,0 +1,23 @@
name: bigchaindb
version: master
summary: a scalable blockchain database
description: |
With high throughput, sub-second latency and powerful functionality to
automate business processes, BigchainDB looks, acts and feels like a database
with added blockchain characteristics.
# grade must be 'stable' to release into candidate/stable channels
grade: devel
# strict confinement requires https://github.com/snapcore/snapd/pull/2749
confinement: devmode
apps:
bigchaindb:
command: bigchaindb
plugs: [network, network-bind]
parts:
bigchaindb:
source: .
plugin: python
build-packages: [g++, libffi-dev]

View File

@ -1,4 +1,3 @@
from bigchaindb.common.exceptions import ValidationError
import pytest import pytest
import random import random
@ -19,18 +18,6 @@ def test_asset_transfer(b, user_pk, user_sk):
assert tx_transfer_signed.asset['id'] == tx_create.id assert tx_transfer_signed.asset['id'] == tx_create.id
def test_validate_bad_asset_creation(b, user_pk):
from bigchaindb.models import Transaction
# `data` needs to be a dictionary
tx = Transaction.create([b.me], [([user_pk], 1)])
tx.asset['data'] = 'a'
tx_signed = tx.sign([b.me_private])
with pytest.raises(ValidationError):
Transaction.from_dict(tx_signed.to_dict())
@pytest.mark.bdb @pytest.mark.bdb
@pytest.mark.usefixtures('inputs') @pytest.mark.usefixtures('inputs')
def test_validate_transfer_asset_id_mismatch(b, user_pk, user_sk): def test_validate_transfer_asset_id_mismatch(b, user_pk, user_sk):
@ -91,19 +78,6 @@ def test_asset_id_mismatch(b, user_pk):
Transaction.get_asset_id([tx1, tx2]) Transaction.get_asset_id([tx1, tx2])
def test_create_invalid_divisible_asset(b, user_pk, user_sk):
from bigchaindb.models import Transaction
from bigchaindb.common.exceptions import ValidationError
# Asset amount must be more than 0
tx = Transaction.create([user_pk], [([user_pk], 1)])
tx.outputs[0].amount = 0
tx.sign([user_sk])
with pytest.raises(ValidationError):
Transaction.from_dict(tx.to_dict())
def test_create_valid_divisible_asset(b, user_pk, user_sk): def test_create_valid_divisible_asset(b, user_pk, user_sk):
from bigchaindb.models import Transaction from bigchaindb.models import Transaction

View File

@ -635,88 +635,3 @@ def test_divide(b, user_pk, user_sk):
assert len(tx_transfer_signed.outputs) == 3 assert len(tx_transfer_signed.outputs) == 3
for output in tx_transfer_signed.outputs: for output in tx_transfer_signed.outputs:
assert output.amount == 1 assert output.amount == 1
# Check that negative inputs are caught when creating a TRANSFER transaction
@pytest.mark.skip(reason='part of tx structural tests')
@pytest.mark.bdb
@pytest.mark.usefixtures('inputs')
def test_non_positive_amounts_on_transfer(b, user_pk):
from bigchaindb.models import Transaction
from bigchaindb.common.exceptions import AmountError
# CREATE divisible asset with 1 output with amount 3
tx_create = Transaction.create([b.me], [([user_pk], 3)])
tx_create_signed = tx_create.sign([b.me_private])
# create block
block = b.create_block([tx_create_signed])
assert block.validate(b) == block
b.write_block(block)
# vote
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
with pytest.raises(AmountError):
Transaction.transfer(tx_create.to_inputs(),
[([b.me], 4), ([b.me], -1)],
asset_id=tx_create.id)
# Check that negative inputs are caught when validating a TRANSFER transaction
@pytest.mark.skip(reason='part of tx structural tests')
@pytest.mark.bdb
@pytest.mark.usefixtures('inputs')
def test_non_positive_amounts_on_transfer_validate(b, user_pk, user_sk):
from bigchaindb.models import Transaction
from bigchaindb.common.exceptions import AmountError
# CREATE divisible asset with 1 output with amount 3
tx_create = Transaction.create([b.me], [([user_pk], 3)])
tx_create_signed = tx_create.sign([b.me_private])
# create block
block = b.create_block([tx_create_signed])
assert block.validate(b) == block
b.write_block(block)
# vote
vote = b.vote(block.id, b.get_last_voted_block().id, True)
b.write_vote(vote)
# create a transfer transaction with 3 outputs and check if the amount
# of each output is 1
tx_transfer = Transaction.transfer(tx_create.to_inputs(),
[([b.me], 4), ([b.me], 1)],
asset_id=tx_create.id)
tx_transfer.outputs[1].amount = -1
tx_transfer_signed = tx_transfer.sign([user_sk])
with pytest.raises(AmountError):
tx_transfer_signed.validate(b)
# Check that negative inputs are caught when creating a CREATE transaction
@pytest.mark.bdb
@pytest.mark.usefixtures('inputs')
def test_non_positive_amounts_on_create(b, user_pk):
from bigchaindb.models import Transaction
from bigchaindb.common.exceptions import AmountError
# CREATE divisible asset with 1 output with amount 3
with pytest.raises(AmountError):
Transaction.create([b.me], [([user_pk], -3)])
# Check that negative inputs are caught when validating a CREATE transaction
@pytest.mark.skip(reason='part of tx structural tests')
@pytest.mark.bdb
@pytest.mark.usefixtures('inputs')
def test_non_positive_amounts_on_create_validate(b, user_pk):
from bigchaindb.models import Transaction
from bigchaindb.common.exceptions import AmountError
# CREATE divisible asset with 1 output with amount 3
tx_create = Transaction.create([b.me], [([user_pk], 3)])
tx_create.outputs[0].amount = -3
tx_create_signed = tx_create.sign([b.me_private])
with pytest.raises(AmountError):
tx_create_signed.validate(b)

View File

@ -1,60 +0,0 @@
from pytest import raises
from bigchaindb.common.exceptions import SchemaValidationError
from bigchaindb.common.schema import validate_transaction_schema
def test_validate_transaction_create(create_tx):
validate_transaction_schema(create_tx.to_dict())
def test_validate_transaction_signed_create(signed_create_tx):
validate_transaction_schema(signed_create_tx.to_dict())
def test_validate_transaction_signed_transfer(signed_transfer_tx):
validate_transaction_schema(signed_transfer_tx.to_dict())
def test_validate_transaction_fails():
with raises(SchemaValidationError):
validate_transaction_schema({})
def test_validate_fails_metadata_empty_dict(create_tx):
create_tx.metadata = {'a': 1}
validate_transaction_schema(create_tx.to_dict())
create_tx.metadata = None
validate_transaction_schema(create_tx.to_dict())
create_tx.metadata = {}
with raises(SchemaValidationError):
validate_transaction_schema(create_tx.to_dict())
def test_transfer_asset_schema(signed_transfer_tx):
tx = signed_transfer_tx.to_dict()
validate_transaction_schema(tx)
tx['asset']['data'] = {}
with raises(SchemaValidationError):
validate_transaction_schema(tx)
del tx['asset']['data']
tx['asset']['id'] = 'b' * 63
with raises(SchemaValidationError):
validate_transaction_schema(tx)
def test_create_single_input(create_tx):
tx = create_tx.to_dict()
tx['inputs'] += tx['inputs']
with raises(SchemaValidationError):
validate_transaction_schema(tx)
tx['inputs'] = []
with raises(SchemaValidationError):
validate_transaction_schema(tx)
def test_create_tx_no_fulfills(create_tx):
tx = create_tx.to_dict()
tx['inputs'][0]['fulfills'] = {'tx': 'a' * 64, 'output': 0}
with raises(SchemaValidationError):
validate_transaction_schema(tx)

View File

@ -1,13 +0,0 @@
from pytest import raises
from bigchaindb.common.exceptions import SchemaValidationError
from bigchaindb.common.schema import validate_vote_schema
def test_validate_vote(structurally_valid_vote):
validate_vote_schema(structurally_valid_vote)
def test_validate_vote_fails():
with raises(SchemaValidationError):
validate_vote_schema({})

View File

@ -1,6 +1,18 @@
from bigchaindb.common.schema import ( """
TX_SCHEMA_COMMON, VOTE_SCHEMA, drop_schema_descriptions) This module is tests related to schema checking, but _not_ of granular schematic
properties related to validation.
"""
from pytest import raises
from bigchaindb.common.exceptions import SchemaValidationError
from bigchaindb.common.schema import (
TX_SCHEMA_COMMON, VOTE_SCHEMA, drop_schema_descriptions,
validate_transaction_schema, validate_vote_schema)
################################################################################
# Test of schema utils
def _test_additionalproperties(node, path=''): def _test_additionalproperties(node, path=''):
""" """
@ -67,3 +79,37 @@ def test_drop_descriptions():
} }
drop_schema_descriptions(node) drop_schema_descriptions(node)
assert node == expected assert node == expected
################################################################################
# Test call transaction schema
def test_validate_transaction_create(create_tx):
validate_transaction_schema(create_tx.to_dict())
def test_validate_transaction_signed_create(signed_create_tx):
validate_transaction_schema(signed_create_tx.to_dict())
def test_validate_transaction_signed_transfer(signed_transfer_tx):
validate_transaction_schema(signed_transfer_tx.to_dict())
def test_validate_transaction_fails():
with raises(SchemaValidationError):
validate_transaction_schema({})
################################################################################
# Test call vote schema
def test_validate_vote(structurally_valid_vote):
validate_vote_schema(structurally_valid_vote)
def test_validate_vote_fails():
with raises(SchemaValidationError):
validate_vote_schema({})

View File

@ -1,3 +1,8 @@
"""
These are tests of the API of the Transaction class and associated classes.
Tests for transaction validation are separate.
"""
from pytest import raises from pytest import raises
@ -78,7 +83,7 @@ def test_output_serialization(user_Ed25519, user_pub):
'details': user_Ed25519.to_dict(), 'details': user_Ed25519.to_dict(),
}, },
'public_keys': [user_pub], 'public_keys': [user_pub],
'amount': 1, 'amount': '1',
} }
cond = Output(user_Ed25519, [user_pub], 1) cond = Output(user_Ed25519, [user_pub], 1)
@ -96,7 +101,7 @@ def test_output_deserialization(user_Ed25519, user_pub):
'details': user_Ed25519.to_dict() 'details': user_Ed25519.to_dict()
}, },
'public_keys': [user_pub], 'public_keys': [user_pub],
'amount': 1, 'amount': '1',
} }
cond = Output.from_dict(cond) cond = Output.from_dict(cond)
@ -115,7 +120,7 @@ def test_output_hashlock_serialization():
'uri': hashlock, 'uri': hashlock,
}, },
'public_keys': None, 'public_keys': None,
'amount': 1, 'amount': '1',
} }
cond = Output(hashlock, amount=1) cond = Output(hashlock, amount=1)
@ -135,7 +140,7 @@ def test_output_hashlock_deserialization():
'uri': hashlock 'uri': hashlock
}, },
'public_keys': None, 'public_keys': None,
'amount': 1, 'amount': '1',
} }
cond = Output.from_dict(cond) cond = Output.from_dict(cond)
@ -232,6 +237,7 @@ def test_generate_output_single_owner_with_output(user_pub):
def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub): def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub):
from bigchaindb.common.transaction import Output from bigchaindb.common.transaction import Output
from bigchaindb.common.exceptions import AmountError
with raises(ValueError): with raises(ValueError):
Output.generate([], 1) Output.generate([], 1)
@ -241,6 +247,8 @@ def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub):
Output.generate([[user_pub, [user2_pub, [user3_pub]]]], 1) Output.generate([[user_pub, [user2_pub, [user3_pub]]]], 1)
with raises(ValueError): with raises(ValueError):
Output.generate([[user_pub]], 1) Output.generate([[user_pub]], 1)
with raises(AmountError):
Output.generate([[user_pub]], -1)
def test_invalid_transaction_initialization(asset_definition): def test_invalid_transaction_initialization(asset_definition):
@ -341,28 +349,6 @@ def test_transaction_deserialization(user_input, user_output, data):
validate_transaction_model(tx) validate_transaction_model(tx)
def test_tx_serialization_with_incorrect_hash(utx):
from bigchaindb.common.transaction import Transaction
from bigchaindb.common.exceptions import InvalidHash
utx_dict = utx.to_dict()
utx_dict['id'] = 'a' * 64
with raises(InvalidHash):
Transaction.from_dict(utx_dict)
utx_dict.pop('id')
def test_tx_serialization_hash_function(tx):
import sha3
import json
tx_dict = tx.to_dict()
tx_dict['inputs'][0]['fulfillment'] = None
del tx_dict['id']
payload = json.dumps(tx_dict, skipkeys=False, sort_keys=True,
separators=(',', ':'))
assert sha3.sha3_256(payload.encode()).hexdigest() == tx.id
def test_invalid_input_initialization(user_input, user_pub): def test_invalid_input_initialization(user_input, user_pub):
from bigchaindb.common.transaction import Input from bigchaindb.common.transaction import Input
@ -984,35 +970,11 @@ def test_cant_add_empty_input():
tx.add_input(None) tx.add_input(None)
def test_validate_version(utx): def test_output_from_dict_invalid_amount(user_output):
import re from bigchaindb.common.transaction import Output
import bigchaindb.version from bigchaindb.common.exceptions import AmountError
from .utils import validate_transaction_model
from bigchaindb.common.exceptions import SchemaValidationError
short_ver = bigchaindb.version.__short_version__ out = user_output.to_dict()
assert utx.version == re.match(r'^(.*\d)', short_ver).group(1) out['amount'] = 'a'
with raises(AmountError):
validate_transaction_model(utx) Output.from_dict(out)
# At version 1, transaction version will break step with server version.
utx.version = '1.0.0'
with raises(SchemaValidationError):
validate_transaction_model(utx)
def test_create_tx_no_asset_id(b, utx):
from bigchaindb.common.exceptions import SchemaValidationError
from .utils import validate_transaction_model
utx.asset['id'] = 'b' * 64
with raises(SchemaValidationError):
validate_transaction_model(utx)
def test_transfer_tx_asset_schema(transfer_utx):
from bigchaindb.common.exceptions import SchemaValidationError
from .utils import validate_transaction_model
tx = transfer_utx
tx.asset['data'] = {}
with raises(SchemaValidationError):
validate_transaction_model(tx)

View File

@ -0,0 +1,207 @@
from copy import deepcopy
import pytest
import random
import bigchaindb
from bigchaindb.core import Bigchain
from contextlib import contextmanager
from bigchaindb.common.crypto import generate_key_pair
from tests.pipelines.stepping import create_stepper
################################################################################
# Test setup code
@contextmanager
def federation(n):
"""
Return a list of Bigchain objects and pipeline steppers to represent
a BigchainDB federation
"""
keys = [generate_key_pair() for _ in range(n)]
config_orig = bigchaindb.config
@contextmanager
def make_nodes(i):
"""
make_nodes is a recursive context manager. Essentially it is doing:
with f(a[0]) as b0:
with f(a[1]) as b1:
with f(a[2]) as b2:
yield [b0, b1, b2]
with an arbitrary depth. It is also temporarily patching global
configuration to simulate nodes with separate identities.
"""
nonlocal keys
if i == 0:
yield []
else:
config = deepcopy(config_orig)
keys = [keys[-1]] + keys[:-1] # Rotate keys
config['keyring'] = [pub for _, pub in keys[1:]]
config['keypair']['private'] = keys[0][0]
config['keypair']['public'] = keys[0][1]
bigchaindb.config = config
stepper = create_stepper()
with stepper.start():
node = (Bigchain(), stepper)
with make_nodes(i-1) as rest:
yield [node] + rest
with make_nodes(n) as steppers:
bigchaindb.config = config_orig
yield zip(*steppers)
@pytest.fixture
def federation_3():
with federation(3) as f:
yield f
def process_tx(steps):
steps.block_changefeed(timeout=1)
if steps.block_filter_tx():
steps.block_validate_tx()
steps.block_create(timeout=True)
steps.block_write()
steps.block_delete_tx()
def input_single_create(b):
from bigchaindb.common.transaction import Transaction
metadata = {'r': random.random()}
tx = Transaction.create([b.me], [([b.me], 1)], metadata).sign([b.me_private])
b.write_transaction(tx)
return tx
def process_vote(steps, result=None):
steps.vote_changefeed()
steps.vote_validate_block()
steps.vote_ungroup()
steps.vote_validate_tx()
if result is not None:
steps.queues['vote_vote'][0][0] = result
vote = steps.vote_vote()
steps.vote_write_vote()
return vote
################################################################################
# Tests here on down
@pytest.mark.bdb
@pytest.mark.genesis
def test_elect_valid(federation_3):
[bx, (s0, s1, s2)] = federation_3
tx = input_single_create(bx[0])
process_tx(s0)
process_tx(s1)
process_tx(s2)
process_vote(s2, False)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'undecided'
process_vote(s0, True)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'undecided'
process_vote(s1, True)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'valid'
@pytest.mark.bdb
@pytest.mark.genesis
def test_elect_invalid(federation_3):
[bx, (s0, s1, s2)] = federation_3
tx = input_single_create(bx[0])
process_tx(s0)
process_tx(s1)
process_tx(s2)
process_vote(s1, True)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'undecided'
process_vote(s2, False)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'undecided'
process_vote(s0, False)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] is None
@pytest.mark.bdb
@pytest.mark.genesis
def test_elect_disagree_prev_block(federation_3):
[bx, (s0, s1, s2)] = federation_3
tx = input_single_create(bx[0])
process_tx(s0)
process_tx(s1)
process_tx(s2)
process_vote(s0, True)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'undecided'
s1.vote.last_voted_id = '5' * 64
process_vote(s1, True)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'undecided'
s2.vote.last_voted_id = '6' * 64
process_vote(s2, True)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] is None
@pytest.mark.bdb
@pytest.mark.genesis
def test_elect_sybill(federation_3):
[bx, (s0, s1, s2)] = federation_3
tx = input_single_create(bx[0])
process_tx(s0)
process_tx(s1)
process_tx(s2)
# What we need is some votes from unknown nodes!
# Incorrectly signed votes are ineligible.
for s in [s0, s1, s2]:
s.vote.bigchain.me_private = generate_key_pair()[0]
process_vote(s0, True)
process_vote(s1, True)
process_vote(s2, True)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'undecided'
@pytest.mark.skip()
@pytest.mark.bdb
@pytest.mark.genesis
def test_elect_dos(federation_3):
"""
https://github.com/bigchaindb/bigchaindb/issues/1314
Test that a node cannot block another node's opportunity to vote
on a block by writing an incorrectly signed vote
"""
raise NotImplementedError()
@pytest.mark.skip('Revisit when we have block election status cache')
@pytest.mark.bdb
@pytest.mark.genesis
def test_elect_bad_block_voters_list(federation_3):
"""
See https://github.com/bigchaindb/bigchaindb/issues/1224
"""
[bx, (s0, s1, s2)] = federation_3
b = s0.block.bigchain
# First remove other nodes from node 0 so that it self assigns the tx
b.nodes_except_me = []
tx = input_single_create(b)
# Now create a block voters list which will not match other keyrings
b.nodes_except_me = [bx[1].me]
process_tx(s0)
process_vote(s0)
process_vote(s1)
process_vote(s2)
for i in range(3):
assert bx[i].get_transaction(tx.id, True)[1] == 'invalid'

View File

@ -106,7 +106,7 @@ class MultipipesStepper:
for item in items: for item in items:
if type(item) != tuple: if type(item) != tuple:
item = (item,) item = (item,)
queue.append(item) queue.append(list(item))
def step(self, name, **kwargs): def step(self, name, **kwargs):
""" Advance pipeline stage. Throws Empty if no data to consume. """ """ Advance pipeline stage. Throws Empty if no data to consume. """
@ -163,6 +163,8 @@ def _update_stepper(stepper, prefix, pipeline):
n1 = (nodes + [None])[i+1] n1 = (nodes + [None])[i+1]
f = stepper.add_input if i == 0 else stepper.add_stage f = stepper.add_input if i == 0 else stepper.add_stage
f(prefix, n0, n1) f(prefix, n0, n1)
# Expose pipeline state
setattr(stepper, prefix, nodes[-1].target.__self__)
def create_stepper(): def create_stepper():

View File

@ -0,0 +1,169 @@
"""
All tests of transaction structure. The concern here is that transaction
structural / schematic issues are caught when reading a transaction
(ie going from dict -> transaction).
"""
import pytest
from bigchaindb.common.exceptions import (AmountError, InvalidHash,
SchemaValidationError)
from bigchaindb.models import Transaction
################################################################################
# Helper functions
def validate(tx):
if isinstance(tx, Transaction):
tx = tx.to_dict()
Transaction.from_dict(tx)
def validate_raises(tx, exc=SchemaValidationError):
with pytest.raises(exc):
validate(tx)
# We should test that validation works when we expect it to
def test_validation_passes(create_tx):
validate(create_tx)
################################################################################
# ID
def test_tx_serialization_hash_function(create_tx):
import sha3
import json
tx = create_tx.to_dict()
tx['inputs'][0]['fulfillment'] = None
del tx['id']
payload = json.dumps(tx, skipkeys=False, sort_keys=True,
separators=(',', ':'))
assert sha3.sha3_256(payload.encode()).hexdigest() == create_tx.id
def test_tx_serialization_with_incorrect_hash(create_tx):
tx = create_tx.to_dict()
tx['id'] = 'a' * 64
validate_raises(tx, InvalidHash)
def test_tx_serialization_with_no_hash(create_tx):
tx = create_tx.to_dict()
del tx['id']
validate_raises(tx)
################################################################################
# Operation
def test_validate_invalid_operation(create_tx):
create_tx.operation = 'something invalid'
validate_raises(create_tx)
################################################################################
# Metadata
def test_validate_fails_metadata_empty_dict(create_tx):
create_tx.metadata = {'a': 1}
validate(create_tx)
create_tx.metadata = None
validate(create_tx)
create_tx.metadata = {}
validate_raises(create_tx)
################################################################################
# Asset
def test_transfer_asset_schema(signed_transfer_tx):
tx = signed_transfer_tx.to_dict()
validate(tx)
tx['asset']['data'] = {}
validate_raises(tx)
del tx['asset']['data']
tx['asset']['id'] = 'b' * 63
validate_raises(tx)
def test_create_tx_no_asset_id(create_tx):
create_tx.asset['id'] = 'b' * 64
validate_raises(create_tx)
def test_create_tx_asset_type(create_tx):
create_tx.asset['data'] = 'a'
validate_raises(create_tx)
################################################################################
# Inputs
def test_no_inputs(create_tx):
create_tx.inputs = []
validate_raises(create_tx)
def test_create_single_input(create_tx):
tx = create_tx.to_dict()
tx['inputs'] += tx['inputs']
validate_raises(tx)
tx['inputs'] = []
validate_raises(tx)
def test_create_tx_no_fulfills(create_tx):
tx = create_tx.to_dict()
tx['inputs'][0]['fulfills'] = {'tx': 'a' * 64, 'output': 0}
validate_raises(tx)
def test_transfer_has_inputs(signed_transfer_tx):
signed_transfer_tx.inputs = []
validate_raises(signed_transfer_tx)
################################################################################
# Outputs
def test_low_amounts(create_tx, signed_transfer_tx):
for tx in [create_tx, signed_transfer_tx]:
tx.outputs[0].amount = 0
validate_raises(tx, AmountError)
tx.outputs[0].amount = -1
validate_raises(tx)
def test_high_amounts(create_tx):
# Should raise a SchemaValidationError - don't want to allow ridiculously
# large numbers to get converted to int
create_tx.outputs[0].amount = 10 ** 21
validate_raises(create_tx)
# Should raise AmountError
create_tx.outputs[0].amount = 9 * 10 ** 18 + 1
validate_raises(create_tx, AmountError)
# Should pass
create_tx.outputs[0].amount -= 1
validate(create_tx)
################################################################################
# Version
def test_validate_version(create_tx):
import re
import bigchaindb.version
short_ver = bigchaindb.version.__short_version__
assert create_tx.version == re.match(r'^(.*\d)', short_ver).group(1)
validate(create_tx)
# At version 1, transaction version will break step with server version.
create_tx.version = '1.0.0'
validate_raises(create_tx)