diff --git a/bigchaindb/common/schema/transaction.yaml b/bigchaindb/common/schema/transaction.yaml index 86e5947b..f63b652e 100644 --- a/bigchaindb/common/schema/transaction.yaml +++ b/bigchaindb/common/schema/transaction.yaml @@ -132,7 +132,8 @@ definitions: - public_keys properties: amount: - type: integer + type: string + pattern: "^[0-9]{1,20}$" description: | Integral amount of the asset represented by this output. In the case of a non divisible asset, this will always be 1. @@ -158,10 +159,6 @@ definitions: "$ref": "#/definitions/public_keys" description: | List of public keys associated with the conditions on an output. - amount: - type: integer - description: | - Integral amount of the asset represented by this condition. input: type: "object" description: diff --git a/bigchaindb/common/transaction.py b/bigchaindb/common/transaction.py index 3b7be321..5feb6abf 100644 --- a/bigchaindb/common/transaction.py +++ b/bigchaindb/common/transaction.py @@ -212,6 +212,8 @@ class Output(object): owners before a Transaction was confirmed. """ + MAX_AMOUNT = 9 * 10 ** 18 + def __init__(self, fulfillment, public_keys=None, amount=1): """Create an instance of a :class:`~.Output`. @@ -232,6 +234,8 @@ class Output(object): raise TypeError('`amount` must be an int') if amount < 1: raise AmountError('`amount` must be greater than 0') + if amount > self.MAX_AMOUNT: + raise AmountError('`amount` must be <= %s' % self.MAX_AMOUNT) self.fulfillment = fulfillment self.amount = amount @@ -267,7 +271,7 @@ class Output(object): output = { 'public_keys': self.public_keys, 'condition': condition, - 'amount': self.amount + 'amount': str(self.amount), } return output @@ -384,7 +388,11 @@ class Output(object): except KeyError: # NOTE: Hashlock condition case fulfillment = data['condition']['uri'] - return cls(fulfillment, data['public_keys'], data['amount']) + try: + amount = int(data['amount']) + except ValueError: + raise AmountError('Invalid amount: %s' % data['amount']) + return cls(fulfillment, data['public_keys'], amount) class Transaction(object): diff --git a/docker-compose.yml b/docker-compose.yml index 322cbcf6..c7f3c584 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -37,6 +37,7 @@ services: - ./bigchaindb:/usr/src/app/bigchaindb - ./tests:/usr/src/app/tests - ./docs:/usr/src/app/docs + - ./k8s:/usr/src/app/k8s - ./setup.py:/usr/src/app/setup.py - ./setup.cfg:/usr/src/app/setup.cfg - ./pytest.ini:/usr/src/app/pytest.ini @@ -58,6 +59,7 @@ services: - ./bigchaindb:/usr/src/app/bigchaindb - ./tests:/usr/src/app/tests - ./docs:/usr/src/app/docs + - ./k8s:/usr/src/app/k8s - ./setup.py:/usr/src/app/setup.py - ./setup.cfg:/usr/src/app/setup.cfg - ./pytest.ini:/usr/src/app/pytest.ini diff --git a/docs/server/source/cloud-deployment-templates/index.rst b/docs/server/source/cloud-deployment-templates/index.rst index d5b60a0e..41eec0ed 100644 --- a/docs/server/source/cloud-deployment-templates/index.rst +++ b/docs/server/source/cloud-deployment-templates/index.rst @@ -18,3 +18,4 @@ If you find the cloud deployment templates for nodes helpful, then you may also add-node-on-kubernetes upgrade-on-kubernetes first-node + log-analytics diff --git a/docs/server/source/cloud-deployment-templates/log-analytics.rst b/docs/server/source/cloud-deployment-templates/log-analytics.rst new file mode 100644 index 00000000..fbef70d2 --- /dev/null +++ b/docs/server/source/cloud-deployment-templates/log-analytics.rst @@ -0,0 +1,256 @@ +Log Analytics on Azure +====================== + +This section documents how to create and configure a Log Analytics workspace on +Azure, for a Kubernetes-based deployment. + +The documented approach is based on an integration of Microsoft's Operations +Management Suite (OMS) with a Kubernetes-based Azure Container Service cluster. + +The :ref:`oms-k8s-references` contains links to more detailed documentation on +Azure, and Kubernetes. + +There are three main steps involved: + +1. Create a workspace (``LogAnalyticsOMS``). +2. Create a ``ContainersOMS`` solution under the workspace. +3. Deploy the OMS agent(s). + +Steps 1 and 2 rely on `Azure Resource Manager templates`_ and can be done with +one template so we'll cover them together. Step 3 relies on a +`Kubernetes DaemonSet`_ and will be covered separately. + +Minimum Requirements +-------------------- +This document assumes that you have already deployed a Kubernetes cluster, and +that you have the Kubernetes command line ``kubectl`` installed. + +Creating a workspace and adding a containers solution +----------------------------------------------------- +For the sake of this document and example, we'll assume an existing resource +group named: + +* ``resource_group`` + +and the workspace we'll create will be named: + +* ``work_space`` + +If you feel creative you may replace these names by more interesting ones. + +.. code-block:: bash + + $ az group deployment create --debug \ + --resource-group resource_group \ + --name "Microsoft.LogAnalyticsOMS" \ + --template-file log_analytics_oms.json \ + --parameters @log_analytics_oms.parameters.json + +An example of a simple tenplate file (``--template-file``): + +.. code-block:: json + + { + "$schema": "http://schema.management.azure.com/schemas/2014-04-01-preview/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "type": "String" + }, + "workspaceName": { + "type": "String" + }, + "solutionType": { + "type": "String" + }, + }, + "resources": [ + { + "apiVersion": "2015-03-20", + "type": "Microsoft.OperationalInsights/workspaces", + "name": "[parameters('workspaceName')]", + "location": "[resourceGroup().location]", + "properties": { + "sku": { + "name": "[parameters('sku')]" + } + }, + "resources": [ + { + "apiVersion": "2015-11-01-preview", + "location": "[resourceGroup().location]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "type": "Microsoft.OperationsManagement/solutions", + "id": "[Concat(resourceGroup().id, '/providers/Microsoft.OperationsManagement/solutions/', parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "dependsOn": [ + "[concat('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + ], + "properties": { + "workspaceResourceId": "[resourceId('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + }, + "plan": { + "publisher": "Microsoft", + "product": "[Concat('OMSGallery/', parameters('solutionType'))]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "promotionCode": "" + } + } + ] + } + ] + } + +An example of the associated parameter file (``--parameters``): + +.. code-block:: json + + { + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "value": "Free" + }, + "workspaceName": { + "value": "work_space" + }, + "solutionType": { + "value": "Containers" + }, + } + } + +Deploying the OMS agent(s) +-------------------------- +In order to deploy an OMS agent two important pieces of information are needed: + +* workspace id +* workspace key + +Obtaining the workspace id: + +.. code-block:: bash + + $ az resource show \ + --resource-group resource_group + --resource-type Microsoft.OperationalInsights/workspaces + --name work_space \ + | grep customerId + "customerId": "12345678-1234-1234-1234-123456789012", + +Obtaining the workspace key: + +Until we figure out a way to this via the command line please see instructions +under `Obtain your workspace ID and key +`_. + +Once you have the workspace id and key you can include them in the following +YAML file (:download:`oms-daemonset.yaml +<../../../../k8s/logging-and-monitoring/oms-daemonset.yaml>`): + +.. code-block:: yaml + + # oms-daemonset.yaml + apiVersion: extensions/v1beta1 + kind: DaemonSet + metadata: + name: omsagent + spec: + template: + metadata: + labels: + app: omsagent + spec: + containers: + - env: + - name: WSID + value: + - name: KEY + value: + image: microsoft/oms + name: omsagent + ports: + - containerPort: 25225 + protocol: TCP + securityContext: + privileged: true + volumeMounts: + - mountPath: /var/run/docker.sock + name: docker-sock + volumes: + - name: docker-sock + hostPath: + path: /var/run/docker.sock + +To deploy the agent simply run the following command: + +.. code-block:: bash + + $ kubectl create -f oms-daemonset.yaml + + +Some useful management tasks +---------------------------- +List workspaces: + +.. code-block:: bash + + $ az resource list \ + --resource-group resource_group \ + --resource-type Microsoft.OperationalInsights/workspaces + +List solutions: + +.. code-block:: bash + + $ az resource list \ + --resource-group resource_group \ + --resource-type Microsoft.OperationsManagement/solutions + +Deleting the containers solution: + +.. code-block:: bash + + $ az group deployment delete --debug \ + --resource-group resource_group \ + --name Microsoft.ContainersOMS + +.. code-block:: bash + + $ az resource delete \ + --resource-group resource_group \ + --resource-type Microsoft.OperationsManagement/solutions \ + --name "Containers(work_space)" + +Deleting the workspace: + +.. code-block:: bash + + $ az group deployment delete --debug \ + --resource-group resource_group \ + --name Microsoft.LogAnalyticsOMS + +.. code-block:: bash + + $ az resource delete \ + --resource-group resource_group \ + --resource-type Microsoft.OperationalInsights/workspaces \ + --name work_space + + +.. _oms-k8s-references: + +References +---------- + +* `Monitor an Azure Container Service cluster with Microsoft Operations Management Suite (OMS) `_ +* `Manage Log Analytics using Azure Resource Manager templates `_ +* `azure commands for deployments `_ + (``az group deployment``) +* `Understand the structure and syntax of Azure Resource Manager templates `_ +* `Kubernetes DaemonSet`_ + + + +.. _Azure Resource Manager templates: https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-group-authoring-templates +.. _Kubernetes DaemonSet: https://kubernetes.io/docs/concepts/workloads/controllers/daemonset/ diff --git a/docs/server/source/drivers-clients/index.rst b/docs/server/source/drivers-clients/index.rst index 127d6309..0bfde7ad 100644 --- a/docs/server/source/drivers-clients/index.rst +++ b/docs/server/source/drivers-clients/index.rst @@ -1,29 +1,27 @@ Drivers & Clients ================= -Currently, the only language-native driver is written in the Python language. +Libraries and Tools Maintained by the BigchainDB Team +----------------------------------------------------- -We also provide the Transaction CLI to be able to script the building of -transactions. You may be able to wrap this tool inside the language of -your choice, and then use the HTTP API directly to post transactions. - -If you use a language other than Python, you may want to look at the current -community projects listed below. - - -.. toctree:: - :maxdepth: 1 - - The Python Driver - Transaction CLI +* `The Python Driver `_ +* `The Transaction CLI `_ is + a command-line interface for building BigchainDB transactions. + You may be able to call it from inside the language of + your choice, and then use :ref:`the HTTP API ` + to post transactions. Community-Driven Libraries and Tools ------------------------------------ -Please note that some of these projects may be work in progress, but may -nevertheless be very useful. + +.. note:: + + Some of these projects are a work in progress, + but may still be useful. * `Javascript transaction builder `_ * `Haskell transaction builder `_ * `Go driver `_ * `Java driver `_ +* `Ruby driver `_ diff --git a/k8s/logging-and-monitoring/log_analytics_oms.json b/k8s/logging-and-monitoring/log_analytics_oms.json new file mode 100644 index 00000000..44b9d05a --- /dev/null +++ b/k8s/logging-and-monitoring/log_analytics_oms.json @@ -0,0 +1,49 @@ +{ + "$schema": "http://schema.management.azure.com/schemas/2014-04-01-preview/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "type": "String" + }, + "workspaceName": { + "type": "String" + }, + "solutionType": { + "type": "String" + } + }, + "resources": [ + { + "apiVersion": "2015-03-20", + "type": "Microsoft.OperationalInsights/workspaces", + "name": "[parameters('workspaceName')]", + "location": "[resourceGroup().location]", + "properties": { + "sku": { + "name": "[parameters('sku')]" + } + }, + "resources": [ + { + "apiVersion": "2015-11-01-preview", + "location": "[resourceGroup().location]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "type": "Microsoft.OperationsManagement/solutions", + "id": "[Concat(resourceGroup().id, '/providers/Microsoft.OperationsManagement/solutions/', parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "dependsOn": [ + "[concat('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + ], + "properties": { + "workspaceResourceId": "[resourceId('Microsoft.OperationalInsights/workspaces/', parameters('workspaceName'))]" + }, + "plan": { + "publisher": "Microsoft", + "product": "[Concat('OMSGallery/', parameters('solutionType'))]", + "name": "[Concat(parameters('solutionType'), '(', parameters('workspaceName'), ')')]", + "promotionCode": "" + } + } + ] + } + ] +} diff --git a/k8s/logging-and-monitoring/log_analytics_oms.parameters.json b/k8s/logging-and-monitoring/log_analytics_oms.parameters.json new file mode 100644 index 00000000..895cbb15 --- /dev/null +++ b/k8s/logging-and-monitoring/log_analytics_oms.parameters.json @@ -0,0 +1,15 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "sku": { + "value": "Free" + }, + "workspaceName": { + "value": "rg-abc-logs" + }, + "solutionType": { + "value": "Containers" + } + } +} diff --git a/k8s/logging-and-monitoring/oms-daemonset.yaml b/k8s/logging-and-monitoring/oms-daemonset.yaml new file mode 100644 index 00000000..99cf14fe --- /dev/null +++ b/k8s/logging-and-monitoring/oms-daemonset.yaml @@ -0,0 +1,30 @@ +apiVersion: extensions/v1beta1 +kind: DaemonSet +metadata: + name: omsagent +spec: + template: + metadata: + labels: + app: omsagent + spec: + containers: + - env: + - name: WSID + value: + - name: KEY + value: + image: microsoft/oms + name: omsagent + ports: + - containerPort: 25225 + protocol: TCP + securityContext: + privileged: true + volumeMounts: + - mountPath: /var/run/docker.sock + name: docker-sock + volumes: + - name: docker-sock + hostPath: + path: /var/run/docker.sock diff --git a/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash b/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash index 3eb20633..ef3895ea 100755 --- a/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash +++ b/k8s/mongodb-backup-agent/container/mongodb_backup_agent_entrypoint.bash @@ -12,9 +12,10 @@ if [[ -z "${mms_api_key}" ]]; then fi sed -i '/mmsApiKey/d' $MONGODB_BACKUP_CONF_FILE +sed -i '/mothership/d' $MONGODB_BACKUP_CONF_FILE echo "mmsApiKey="${mms_api_key} >> $MONGODB_BACKUP_CONF_FILE +echo "mothership=api-backup.eu-west-1.mongodb.com" >> $MONGODB_BACKUP_CONF_FILE echo "INFO: starting mdb backup..." -exec mongodb-mms-backup-agent \ - -c $MONGODB_BACKUP_CONF_FILE +exec mongodb-mms-backup-agent -c $MONGODB_BACKUP_CONF_FILE diff --git a/k8s/nginx-3scale/nginx-3scale-dep.yaml b/k8s/nginx-3scale/nginx-3scale-dep.yaml index 8b1fa673..964cbf8b 100644 --- a/k8s/nginx-3scale/nginx-3scale-dep.yaml +++ b/k8s/nginx-3scale/nginx-3scale-dep.yaml @@ -19,7 +19,7 @@ spec: terminationGracePeriodSeconds: 10 containers: - name: nginx-3scale - image: bigchaindb/nginx_3scale:1.0 + image: bigchaindb/nginx_3scale:1.1 # TODO(Krish): Change later to IfNotPresent imagePullPolicy: Always env: @@ -68,6 +68,10 @@ spec: hostPort: 443 name: public-bdb-port protocol: TCP + - containerPort: 80 + hostPort: 80 + name: https-msg-port + protocol: TCP - containerPort: 8888 hostPort: 8888 name: health-check diff --git a/k8s/nginx-3scale/nginx-3scale-svc.yaml b/k8s/nginx-3scale/nginx-3scale-svc.yaml index db212222..9150c24d 100644 --- a/k8s/nginx-3scale/nginx-3scale-svc.yaml +++ b/k8s/nginx-3scale/nginx-3scale-svc.yaml @@ -14,6 +14,10 @@ spec: selector: app: ngx-instance-0-dep ports: + - port: 80 + targetPort: 80 + name: ngx-public-bdb-port-http + protocol: TCP - port: 443 targetPort: 443 name: ngx-public-bdb-port diff --git a/setup.py b/setup.py index eac9a066..c6a5f63d 100644 --- a/setup.py +++ b/setup.py @@ -102,7 +102,7 @@ setup( zip_safe=False, classifiers=[ - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Database', 'Topic :: Database :: Database Engines/Servers', diff --git a/snap/README.md b/snap/README.md new file mode 100644 index 00000000..14bfdbce --- /dev/null +++ b/snap/README.md @@ -0,0 +1,12 @@ +This is the packaging metadata for the BigchainDB snap. + +Snaps and the snap store allows for the secure installation of apps that work +in most Linux distributions. For more information, go to https://snapcraft.io/ + +To build and install this snap in Ubuntu 16.04: + + $ sudo apt install git snapcraft + $ git clone https://github.com/bigchaindb/bigchaindb + $ cd bigchaindb + $ snapcraft + $ sudo snap install *.snap --dangerous --devmode diff --git a/snap/snapcraft.yaml b/snap/snapcraft.yaml new file mode 100644 index 00000000..aa3a9bca --- /dev/null +++ b/snap/snapcraft.yaml @@ -0,0 +1,23 @@ +name: bigchaindb +version: master +summary: a scalable blockchain database +description: | + With high throughput, sub-second latency and powerful functionality to + automate business processes, BigchainDB looks, acts and feels like a database + with added blockchain characteristics. + +# grade must be 'stable' to release into candidate/stable channels +grade: devel +# strict confinement requires https://github.com/snapcore/snapd/pull/2749 +confinement: devmode + +apps: + bigchaindb: + command: bigchaindb + plugs: [network, network-bind] + +parts: + bigchaindb: + source: . + plugin: python + build-packages: [g++, libffi-dev] diff --git a/tests/assets/test_digital_assets.py b/tests/assets/test_digital_assets.py index 7630ec6f..6378ed97 100644 --- a/tests/assets/test_digital_assets.py +++ b/tests/assets/test_digital_assets.py @@ -1,4 +1,3 @@ -from bigchaindb.common.exceptions import ValidationError import pytest import random @@ -19,18 +18,6 @@ def test_asset_transfer(b, user_pk, user_sk): assert tx_transfer_signed.asset['id'] == tx_create.id -def test_validate_bad_asset_creation(b, user_pk): - from bigchaindb.models import Transaction - - # `data` needs to be a dictionary - tx = Transaction.create([b.me], [([user_pk], 1)]) - tx.asset['data'] = 'a' - tx_signed = tx.sign([b.me_private]) - - with pytest.raises(ValidationError): - Transaction.from_dict(tx_signed.to_dict()) - - @pytest.mark.bdb @pytest.mark.usefixtures('inputs') def test_validate_transfer_asset_id_mismatch(b, user_pk, user_sk): @@ -91,19 +78,6 @@ def test_asset_id_mismatch(b, user_pk): Transaction.get_asset_id([tx1, tx2]) -def test_create_invalid_divisible_asset(b, user_pk, user_sk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import ValidationError - - # Asset amount must be more than 0 - tx = Transaction.create([user_pk], [([user_pk], 1)]) - tx.outputs[0].amount = 0 - tx.sign([user_sk]) - - with pytest.raises(ValidationError): - Transaction.from_dict(tx.to_dict()) - - def test_create_valid_divisible_asset(b, user_pk, user_sk): from bigchaindb.models import Transaction diff --git a/tests/assets/test_divisible_assets.py b/tests/assets/test_divisible_assets.py index 61c8d63f..007436ec 100644 --- a/tests/assets/test_divisible_assets.py +++ b/tests/assets/test_divisible_assets.py @@ -635,88 +635,3 @@ def test_divide(b, user_pk, user_sk): assert len(tx_transfer_signed.outputs) == 3 for output in tx_transfer_signed.outputs: assert output.amount == 1 - - -# Check that negative inputs are caught when creating a TRANSFER transaction -@pytest.mark.skip(reason='part of tx structural tests') -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_transfer(b, user_pk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - tx_create = Transaction.create([b.me], [([user_pk], 3)]) - tx_create_signed = tx_create.sign([b.me_private]) - # create block - block = b.create_block([tx_create_signed]) - assert block.validate(b) == block - b.write_block(block) - # vote - vote = b.vote(block.id, b.get_last_voted_block().id, True) - b.write_vote(vote) - - with pytest.raises(AmountError): - Transaction.transfer(tx_create.to_inputs(), - [([b.me], 4), ([b.me], -1)], - asset_id=tx_create.id) - - -# Check that negative inputs are caught when validating a TRANSFER transaction -@pytest.mark.skip(reason='part of tx structural tests') -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_transfer_validate(b, user_pk, user_sk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - tx_create = Transaction.create([b.me], [([user_pk], 3)]) - tx_create_signed = tx_create.sign([b.me_private]) - # create block - block = b.create_block([tx_create_signed]) - assert block.validate(b) == block - b.write_block(block) - # vote - vote = b.vote(block.id, b.get_last_voted_block().id, True) - b.write_vote(vote) - - # create a transfer transaction with 3 outputs and check if the amount - # of each output is 1 - tx_transfer = Transaction.transfer(tx_create.to_inputs(), - [([b.me], 4), ([b.me], 1)], - asset_id=tx_create.id) - tx_transfer.outputs[1].amount = -1 - tx_transfer_signed = tx_transfer.sign([user_sk]) - - with pytest.raises(AmountError): - tx_transfer_signed.validate(b) - - -# Check that negative inputs are caught when creating a CREATE transaction -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_create(b, user_pk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - with pytest.raises(AmountError): - Transaction.create([b.me], [([user_pk], -3)]) - - -# Check that negative inputs are caught when validating a CREATE transaction -@pytest.mark.skip(reason='part of tx structural tests') -@pytest.mark.bdb -@pytest.mark.usefixtures('inputs') -def test_non_positive_amounts_on_create_validate(b, user_pk): - from bigchaindb.models import Transaction - from bigchaindb.common.exceptions import AmountError - - # CREATE divisible asset with 1 output with amount 3 - tx_create = Transaction.create([b.me], [([user_pk], 3)]) - tx_create.outputs[0].amount = -3 - tx_create_signed = tx_create.sign([b.me_private]) - - with pytest.raises(AmountError): - tx_create_signed.validate(b) diff --git a/tests/common/schema/test_transaction_schema.py b/tests/common/schema/test_transaction_schema.py deleted file mode 100644 index dca10e70..00000000 --- a/tests/common/schema/test_transaction_schema.py +++ /dev/null @@ -1,60 +0,0 @@ -from pytest import raises - -from bigchaindb.common.exceptions import SchemaValidationError -from bigchaindb.common.schema import validate_transaction_schema - - -def test_validate_transaction_create(create_tx): - validate_transaction_schema(create_tx.to_dict()) - - -def test_validate_transaction_signed_create(signed_create_tx): - validate_transaction_schema(signed_create_tx.to_dict()) - - -def test_validate_transaction_signed_transfer(signed_transfer_tx): - validate_transaction_schema(signed_transfer_tx.to_dict()) - - -def test_validate_transaction_fails(): - with raises(SchemaValidationError): - validate_transaction_schema({}) - - -def test_validate_fails_metadata_empty_dict(create_tx): - create_tx.metadata = {'a': 1} - validate_transaction_schema(create_tx.to_dict()) - create_tx.metadata = None - validate_transaction_schema(create_tx.to_dict()) - create_tx.metadata = {} - with raises(SchemaValidationError): - validate_transaction_schema(create_tx.to_dict()) - - -def test_transfer_asset_schema(signed_transfer_tx): - tx = signed_transfer_tx.to_dict() - validate_transaction_schema(tx) - tx['asset']['data'] = {} - with raises(SchemaValidationError): - validate_transaction_schema(tx) - del tx['asset']['data'] - tx['asset']['id'] = 'b' * 63 - with raises(SchemaValidationError): - validate_transaction_schema(tx) - - -def test_create_single_input(create_tx): - tx = create_tx.to_dict() - tx['inputs'] += tx['inputs'] - with raises(SchemaValidationError): - validate_transaction_schema(tx) - tx['inputs'] = [] - with raises(SchemaValidationError): - validate_transaction_schema(tx) - - -def test_create_tx_no_fulfills(create_tx): - tx = create_tx.to_dict() - tx['inputs'][0]['fulfills'] = {'tx': 'a' * 64, 'output': 0} - with raises(SchemaValidationError): - validate_transaction_schema(tx) diff --git a/tests/common/schema/test_vote_schema.py b/tests/common/schema/test_vote_schema.py deleted file mode 100644 index a9de9492..00000000 --- a/tests/common/schema/test_vote_schema.py +++ /dev/null @@ -1,13 +0,0 @@ -from pytest import raises - -from bigchaindb.common.exceptions import SchemaValidationError -from bigchaindb.common.schema import validate_vote_schema - - -def test_validate_vote(structurally_valid_vote): - validate_vote_schema(structurally_valid_vote) - - -def test_validate_vote_fails(): - with raises(SchemaValidationError): - validate_vote_schema({}) diff --git a/tests/common/schema/test_schema.py b/tests/common/test_schema.py similarity index 58% rename from tests/common/schema/test_schema.py rename to tests/common/test_schema.py index 3116fa7d..1db17170 100644 --- a/tests/common/schema/test_schema.py +++ b/tests/common/test_schema.py @@ -1,6 +1,18 @@ -from bigchaindb.common.schema import ( - TX_SCHEMA_COMMON, VOTE_SCHEMA, drop_schema_descriptions) +""" +This module is tests related to schema checking, but _not_ of granular schematic +properties related to validation. +""" +from pytest import raises + +from bigchaindb.common.exceptions import SchemaValidationError +from bigchaindb.common.schema import ( + TX_SCHEMA_COMMON, VOTE_SCHEMA, drop_schema_descriptions, + validate_transaction_schema, validate_vote_schema) + + +################################################################################ +# Test of schema utils def _test_additionalproperties(node, path=''): """ @@ -67,3 +79,37 @@ def test_drop_descriptions(): } drop_schema_descriptions(node) assert node == expected + + +################################################################################ +# Test call transaction schema + + +def test_validate_transaction_create(create_tx): + validate_transaction_schema(create_tx.to_dict()) + + +def test_validate_transaction_signed_create(signed_create_tx): + validate_transaction_schema(signed_create_tx.to_dict()) + + +def test_validate_transaction_signed_transfer(signed_transfer_tx): + validate_transaction_schema(signed_transfer_tx.to_dict()) + + +def test_validate_transaction_fails(): + with raises(SchemaValidationError): + validate_transaction_schema({}) + + +################################################################################ +# Test call vote schema + + +def test_validate_vote(structurally_valid_vote): + validate_vote_schema(structurally_valid_vote) + + +def test_validate_vote_fails(): + with raises(SchemaValidationError): + validate_vote_schema({}) diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index 205009ac..18750ad4 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -1,3 +1,8 @@ +""" +These are tests of the API of the Transaction class and associated classes. +Tests for transaction validation are separate. +""" + from pytest import raises @@ -78,7 +83,7 @@ def test_output_serialization(user_Ed25519, user_pub): 'details': user_Ed25519.to_dict(), }, 'public_keys': [user_pub], - 'amount': 1, + 'amount': '1', } cond = Output(user_Ed25519, [user_pub], 1) @@ -96,7 +101,7 @@ def test_output_deserialization(user_Ed25519, user_pub): 'details': user_Ed25519.to_dict() }, 'public_keys': [user_pub], - 'amount': 1, + 'amount': '1', } cond = Output.from_dict(cond) @@ -115,7 +120,7 @@ def test_output_hashlock_serialization(): 'uri': hashlock, }, 'public_keys': None, - 'amount': 1, + 'amount': '1', } cond = Output(hashlock, amount=1) @@ -135,7 +140,7 @@ def test_output_hashlock_deserialization(): 'uri': hashlock }, 'public_keys': None, - 'amount': 1, + 'amount': '1', } cond = Output.from_dict(cond) @@ -232,6 +237,7 @@ def test_generate_output_single_owner_with_output(user_pub): def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub): from bigchaindb.common.transaction import Output + from bigchaindb.common.exceptions import AmountError with raises(ValueError): Output.generate([], 1) @@ -241,6 +247,8 @@ def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub): Output.generate([[user_pub, [user2_pub, [user3_pub]]]], 1) with raises(ValueError): Output.generate([[user_pub]], 1) + with raises(AmountError): + Output.generate([[user_pub]], -1) def test_invalid_transaction_initialization(asset_definition): @@ -341,28 +349,6 @@ def test_transaction_deserialization(user_input, user_output, data): validate_transaction_model(tx) -def test_tx_serialization_with_incorrect_hash(utx): - from bigchaindb.common.transaction import Transaction - from bigchaindb.common.exceptions import InvalidHash - - utx_dict = utx.to_dict() - utx_dict['id'] = 'a' * 64 - with raises(InvalidHash): - Transaction.from_dict(utx_dict) - utx_dict.pop('id') - - -def test_tx_serialization_hash_function(tx): - import sha3 - import json - tx_dict = tx.to_dict() - tx_dict['inputs'][0]['fulfillment'] = None - del tx_dict['id'] - payload = json.dumps(tx_dict, skipkeys=False, sort_keys=True, - separators=(',', ':')) - assert sha3.sha3_256(payload.encode()).hexdigest() == tx.id - - def test_invalid_input_initialization(user_input, user_pub): from bigchaindb.common.transaction import Input @@ -984,35 +970,11 @@ def test_cant_add_empty_input(): tx.add_input(None) -def test_validate_version(utx): - import re - import bigchaindb.version - from .utils import validate_transaction_model - from bigchaindb.common.exceptions import SchemaValidationError +def test_output_from_dict_invalid_amount(user_output): + from bigchaindb.common.transaction import Output + from bigchaindb.common.exceptions import AmountError - short_ver = bigchaindb.version.__short_version__ - assert utx.version == re.match(r'^(.*\d)', short_ver).group(1) - - validate_transaction_model(utx) - - # At version 1, transaction version will break step with server version. - utx.version = '1.0.0' - with raises(SchemaValidationError): - validate_transaction_model(utx) - - -def test_create_tx_no_asset_id(b, utx): - from bigchaindb.common.exceptions import SchemaValidationError - from .utils import validate_transaction_model - utx.asset['id'] = 'b' * 64 - with raises(SchemaValidationError): - validate_transaction_model(utx) - - -def test_transfer_tx_asset_schema(transfer_utx): - from bigchaindb.common.exceptions import SchemaValidationError - from .utils import validate_transaction_model - tx = transfer_utx - tx.asset['data'] = {} - with raises(SchemaValidationError): - validate_transaction_model(tx) + out = user_output.to_dict() + out['amount'] = 'a' + with raises(AmountError): + Output.from_dict(out) diff --git a/tests/integration/test_federation.py b/tests/integration/test_federation.py new file mode 100644 index 00000000..00c59685 --- /dev/null +++ b/tests/integration/test_federation.py @@ -0,0 +1,207 @@ +from copy import deepcopy +import pytest +import random + +import bigchaindb +from bigchaindb.core import Bigchain +from contextlib import contextmanager +from bigchaindb.common.crypto import generate_key_pair +from tests.pipelines.stepping import create_stepper + + +################################################################################ +# Test setup code + + +@contextmanager +def federation(n): + """ + Return a list of Bigchain objects and pipeline steppers to represent + a BigchainDB federation + """ + keys = [generate_key_pair() for _ in range(n)] + config_orig = bigchaindb.config + + @contextmanager + def make_nodes(i): + """ + make_nodes is a recursive context manager. Essentially it is doing: + + with f(a[0]) as b0: + with f(a[1]) as b1: + with f(a[2]) as b2: + yield [b0, b1, b2] + + with an arbitrary depth. It is also temporarily patching global + configuration to simulate nodes with separate identities. + """ + nonlocal keys + if i == 0: + yield [] + else: + config = deepcopy(config_orig) + keys = [keys[-1]] + keys[:-1] # Rotate keys + config['keyring'] = [pub for _, pub in keys[1:]] + config['keypair']['private'] = keys[0][0] + config['keypair']['public'] = keys[0][1] + bigchaindb.config = config + stepper = create_stepper() + with stepper.start(): + node = (Bigchain(), stepper) + with make_nodes(i-1) as rest: + yield [node] + rest + + with make_nodes(n) as steppers: + bigchaindb.config = config_orig + yield zip(*steppers) + + +@pytest.fixture +def federation_3(): + with federation(3) as f: + yield f + + +def process_tx(steps): + steps.block_changefeed(timeout=1) + if steps.block_filter_tx(): + steps.block_validate_tx() + steps.block_create(timeout=True) + steps.block_write() + steps.block_delete_tx() + + +def input_single_create(b): + from bigchaindb.common.transaction import Transaction + metadata = {'r': random.random()} + tx = Transaction.create([b.me], [([b.me], 1)], metadata).sign([b.me_private]) + b.write_transaction(tx) + return tx + + +def process_vote(steps, result=None): + steps.vote_changefeed() + steps.vote_validate_block() + steps.vote_ungroup() + steps.vote_validate_tx() + if result is not None: + steps.queues['vote_vote'][0][0] = result + vote = steps.vote_vote() + steps.vote_write_vote() + return vote + + +################################################################################ +# Tests here on down + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_valid(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + process_vote(s2, False) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s0, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s1, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'valid' + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_invalid(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + process_vote(s1, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s2, False) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + process_vote(s0, False) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] is None + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_disagree_prev_block(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + process_vote(s0, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + s1.vote.last_voted_id = '5' * 64 + process_vote(s1, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + s2.vote.last_voted_id = '6' * 64 + process_vote(s2, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] is None + + +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_sybill(federation_3): + [bx, (s0, s1, s2)] = federation_3 + tx = input_single_create(bx[0]) + process_tx(s0) + process_tx(s1) + process_tx(s2) + # What we need is some votes from unknown nodes! + # Incorrectly signed votes are ineligible. + for s in [s0, s1, s2]: + s.vote.bigchain.me_private = generate_key_pair()[0] + process_vote(s0, True) + process_vote(s1, True) + process_vote(s2, True) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'undecided' + + +@pytest.mark.skip() +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_dos(federation_3): + """ + https://github.com/bigchaindb/bigchaindb/issues/1314 + Test that a node cannot block another node's opportunity to vote + on a block by writing an incorrectly signed vote + """ + raise NotImplementedError() + + +@pytest.mark.skip('Revisit when we have block election status cache') +@pytest.mark.bdb +@pytest.mark.genesis +def test_elect_bad_block_voters_list(federation_3): + """ + See https://github.com/bigchaindb/bigchaindb/issues/1224 + """ + [bx, (s0, s1, s2)] = federation_3 + b = s0.block.bigchain + # First remove other nodes from node 0 so that it self assigns the tx + b.nodes_except_me = [] + tx = input_single_create(b) + # Now create a block voters list which will not match other keyrings + b.nodes_except_me = [bx[1].me] + process_tx(s0) + process_vote(s0) + process_vote(s1) + process_vote(s2) + for i in range(3): + assert bx[i].get_transaction(tx.id, True)[1] == 'invalid' diff --git a/tests/pipelines/stepping.py b/tests/pipelines/stepping.py index 030863c6..36f68a6a 100644 --- a/tests/pipelines/stepping.py +++ b/tests/pipelines/stepping.py @@ -106,7 +106,7 @@ class MultipipesStepper: for item in items: if type(item) != tuple: item = (item,) - queue.append(item) + queue.append(list(item)) def step(self, name, **kwargs): """ Advance pipeline stage. Throws Empty if no data to consume. """ @@ -163,6 +163,8 @@ def _update_stepper(stepper, prefix, pipeline): n1 = (nodes + [None])[i+1] f = stepper.add_input if i == 0 else stepper.add_stage f(prefix, n0, n1) + # Expose pipeline state + setattr(stepper, prefix, nodes[-1].target.__self__) def create_stepper(): diff --git a/tests/test_models.py b/tests/test_block_model.py similarity index 100% rename from tests/test_models.py rename to tests/test_block_model.py diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py new file mode 100644 index 00000000..71ba2e5b --- /dev/null +++ b/tests/validation/test_transaction_structure.py @@ -0,0 +1,169 @@ +""" +All tests of transaction structure. The concern here is that transaction +structural / schematic issues are caught when reading a transaction +(ie going from dict -> transaction). +""" + +import pytest + +from bigchaindb.common.exceptions import (AmountError, InvalidHash, + SchemaValidationError) +from bigchaindb.models import Transaction + + +################################################################################ +# Helper functions + + +def validate(tx): + if isinstance(tx, Transaction): + tx = tx.to_dict() + Transaction.from_dict(tx) + + +def validate_raises(tx, exc=SchemaValidationError): + with pytest.raises(exc): + validate(tx) + + +# We should test that validation works when we expect it to +def test_validation_passes(create_tx): + validate(create_tx) + + +################################################################################ +# ID + + +def test_tx_serialization_hash_function(create_tx): + import sha3 + import json + tx = create_tx.to_dict() + tx['inputs'][0]['fulfillment'] = None + del tx['id'] + payload = json.dumps(tx, skipkeys=False, sort_keys=True, + separators=(',', ':')) + assert sha3.sha3_256(payload.encode()).hexdigest() == create_tx.id + + +def test_tx_serialization_with_incorrect_hash(create_tx): + tx = create_tx.to_dict() + tx['id'] = 'a' * 64 + validate_raises(tx, InvalidHash) + + +def test_tx_serialization_with_no_hash(create_tx): + tx = create_tx.to_dict() + del tx['id'] + validate_raises(tx) + + +################################################################################ +# Operation + +def test_validate_invalid_operation(create_tx): + create_tx.operation = 'something invalid' + validate_raises(create_tx) + + +################################################################################ +# Metadata + +def test_validate_fails_metadata_empty_dict(create_tx): + create_tx.metadata = {'a': 1} + validate(create_tx) + create_tx.metadata = None + validate(create_tx) + create_tx.metadata = {} + validate_raises(create_tx) + + +################################################################################ +# Asset + +def test_transfer_asset_schema(signed_transfer_tx): + tx = signed_transfer_tx.to_dict() + validate(tx) + tx['asset']['data'] = {} + validate_raises(tx) + del tx['asset']['data'] + tx['asset']['id'] = 'b' * 63 + validate_raises(tx) + + +def test_create_tx_no_asset_id(create_tx): + create_tx.asset['id'] = 'b' * 64 + validate_raises(create_tx) + + +def test_create_tx_asset_type(create_tx): + create_tx.asset['data'] = 'a' + validate_raises(create_tx) + + +################################################################################ +# Inputs + +def test_no_inputs(create_tx): + create_tx.inputs = [] + validate_raises(create_tx) + + +def test_create_single_input(create_tx): + tx = create_tx.to_dict() + tx['inputs'] += tx['inputs'] + validate_raises(tx) + tx['inputs'] = [] + validate_raises(tx) + + +def test_create_tx_no_fulfills(create_tx): + tx = create_tx.to_dict() + tx['inputs'][0]['fulfills'] = {'tx': 'a' * 64, 'output': 0} + validate_raises(tx) + + +def test_transfer_has_inputs(signed_transfer_tx): + signed_transfer_tx.inputs = [] + validate_raises(signed_transfer_tx) + + +################################################################################ +# Outputs + +def test_low_amounts(create_tx, signed_transfer_tx): + for tx in [create_tx, signed_transfer_tx]: + tx.outputs[0].amount = 0 + validate_raises(tx, AmountError) + tx.outputs[0].amount = -1 + validate_raises(tx) + + +def test_high_amounts(create_tx): + # Should raise a SchemaValidationError - don't want to allow ridiculously + # large numbers to get converted to int + create_tx.outputs[0].amount = 10 ** 21 + validate_raises(create_tx) + # Should raise AmountError + create_tx.outputs[0].amount = 9 * 10 ** 18 + 1 + validate_raises(create_tx, AmountError) + # Should pass + create_tx.outputs[0].amount -= 1 + validate(create_tx) + + +################################################################################ +# Version + +def test_validate_version(create_tx): + import re + import bigchaindb.version + + short_ver = bigchaindb.version.__short_version__ + assert create_tx.version == re.match(r'^(.*\d)', short_ver).group(1) + + validate(create_tx) + + # At version 1, transaction version will break step with server version. + create_tx.version = '1.0.0' + validate_raises(create_tx)