From 8abbef00fe6add9df1cf3dd6492ffbc4e1802786 Mon Sep 17 00:00:00 2001 From: Lorenz Herzberger <64837895+LaurentMontBlanc@users.noreply.github.com> Date: Thu, 18 Aug 2022 09:45:51 +0200 Subject: [PATCH] GitHub actions (#234) * creating first github action Signed-off-by: Lorenz Herzberger * fix syntax error Signed-off-by: Lorenz Herzberger * renamed action, using black stable Signed-off-by: Lorenz Herzberger * updated checkout action on workflow black Signed-off-by: Lorenz Herzberger * formatted code with black Signed-off-by: Lorenz Herzberger * replaced lint with black service Signed-off-by: Lorenz Herzberger * removed black service added black check to makefile Signed-off-by: Lorenz Herzberger * replaced flake8 with black Signed-off-by: Lorenz Herzberger * added pull_request to black actions trigger Signed-off-by: Lorenz Herzberger * replaced flake8 with black style checker (#212) * updated version number to 1.0.0 Signed-off-by: Lorenz Herzberger * creating first github action Signed-off-by: Lorenz Herzberger * fix syntax error Signed-off-by: Lorenz Herzberger * renamed action, using black stable Signed-off-by: Lorenz Herzberger * updated checkout action on workflow black Signed-off-by: Lorenz Herzberger * formatted code with black Signed-off-by: Lorenz Herzberger * version bumpt Signed-off-by: Lorenz Herzberger * removed some comments and unsused import Signed-off-by: Lorenz Herzberger * replaced lint with black service Signed-off-by: Lorenz Herzberger * removed black service added black check to makefile Signed-off-by: Lorenz Herzberger * replaced flake8 with black Signed-off-by: Lorenz Herzberger * added pull_request to black actions trigger Signed-off-by: Lorenz Herzberger * started on unit test workflow Signed-off-by: Lorenz Herzberger * removed run step Signed-off-by: Lorenz Herzberger * fixed typo Signed-off-by: Lorenz Herzberger * testing docker-compose Signed-off-by: Lorenz Herzberger * check docker-compose Signed-off-by: Lorenz Herzberger * try running pytest Signed-off-by: Lorenz Herzberger * check out -f Signed-off-by: Lorenz Herzberger * changed path Signed-off-by: Lorenz Herzberger * increased health check retries, added job dependency Signed-off-by: Lorenz Herzberger * added path to docker-compose.yml to test action Signed-off-by: Lorenz Herzberger * moved container startup to test step Signed-off-by: Lorenz Herzberger * added checkout step to test job Signed-off-by: Lorenz Herzberger * different kind of execution Signed-off-by: Lorenz Herzberger * checking build step Signed-off-by: Lorenz Herzberger * fixed missing keyword Signed-off-by: Lorenz Herzberger * added checkout to build step Signed-off-by: Lorenz Herzberger * storing artifacts Signed-off-by: Lorenz Herzberger * added needs Signed-off-by: Lorenz Herzberger * changed Dockerfile-dev to python-slim Signed-off-by: Lorenz Herzberger * added job matrix back in Signed-off-by: Lorenz Herzberger * added abci to build job matrix Signed-off-by: Lorenz Herzberger * updated test job steps Signed-off-by: Lorenz Herzberger * fixed typo Signed-off-by: Lorenz Herzberger * replaced docker exec with docker-compose exec for abci test Signed-off-by: Lorenz Herzberger * added first version of acceptance and integration test action Signed-off-by: Lorenz Herzberger * added runs-on Signed-off-by: Lorenz Herzberger * fixed syntax error Signed-off-by: Lorenz Herzberger * reverted to docker exec Signed-off-by: Lorenz Herzberger * added copyright notice and env to start container step Signed-off-by: Lorenz Herzberger * separated abci from non abci test job Signed-off-by: Lorenz Herzberger * renamed pytest workflow to unit-test Signed-off-by: Lorenz Herzberger * added codecov workflow Signed-off-by: Lorenz Herzberger * added pytest install to codecov step Signed-off-by: Lorenz Herzberger * added pip install Signed-off-by: Lorenz Herzberger * moved codecov to unit-test Signed-off-by: Lorenz Herzberger * show files Signed-off-by: Lorenz Herzberger * changed paths Signed-off-by: Lorenz Herzberger * removed debug job steps Signed-off-by: Lorenz Herzberger * renamed black to lint, added audit workflow Signed-off-by: Lorenz Herzberger * checking if dc down is necessary Signed-off-by: Lorenz Herzberger * removed dc down step from acceptance and integration Signed-off-by: Lorenz Herzberger * fixed lint error Signed-off-by: Lorenz Herzberger * added tox documentation to github acitons (#226) * added documentation job Signed-off-by: Lorenz Herzberger * added docs dependency install to docs workflow Signed-off-by: Lorenz Herzberger * add more dependencies Signed-off-by: Lorenz Herzberger * install rapidjson manually Signed-off-by: Lorenz Herzberger * added python-rapidjson to docs requirements text Signed-off-by: Lorenz Herzberger * changed gh config on tox.ini Signed-off-by: Lorenz Herzberger * added base58 to docs require Signed-off-by: Lorenz Herzberger * changed docs require to dev Signed-off-by: Lorenz Herzberger * reversed changes to docs require Signed-off-by: Lorenz Herzberger * changed gh to gh-actions Signed-off-by: Lorenz Herzberger * increased verbosity for debugging Signed-off-by: Lorenz Herzberger * added -e docsroot manually Signed-off-by: Lorenz Herzberger * removed verbosity Signed-off-by: Lorenz Herzberger * removed travis ci files Signed-off-by: Lorenz Herzberger * changed audit step to trigger on schedule Signed-off-by: Lorenz Herzberger Signed-off-by: Lorenz Herzberger Co-authored-by: enesturk --- .ci/travis-after-success.sh | 12 - .ci/travis-before-install.sh | 20 - .ci/travis-before-script.sh | 18 - .ci/travis-install.sh | 19 - .ci/travis_script.sh | 21 - .github/workflows/acceptance-test.yml | 21 + .github/workflows/audit.yml | 36 ++ .github/workflows/documenation.yml | 35 ++ .github/workflows/integration-test.yml | 18 + .github/workflows/lint.yml | 17 + .github/workflows/unit-test.yml | 109 ++++ .travis.yml | 64 -- Dockerfile-dev | 4 +- Makefile | 16 +- PYTHON_STYLE_GUIDE.md | 6 +- acceptance/python/src/test_basic.py | 46 +- acceptance/python/src/test_divisible_asset.py | 95 ++- acceptance/python/src/test_double_spend.py | 22 +- acceptance/python/src/test_multiple_owners.py | 62 +- acceptance/python/src/test_naughty_strings.py | 72 ++- acceptance/python/src/test_stream.py | 20 +- acceptance/python/src/test_zenroom.py | 121 ++-- docker-compose.yml | 14 +- .../generate_http_server_api_documentation.py | 141 +++-- .../python-style-guide.md | 6 +- integration/python/src/helper/hosts.py | 3 +- integration/python/src/test_basic.py | 54 +- .../python/src/test_divisible_asset.py | 97 ++- integration/python/src/test_double_spend.py | 22 +- .../python/src/test_multiple_owners.py | 66 +- .../python/src/test_naughty_strings.py | 67 +- integration/python/src/test_stream.py | 20 +- integration/python/src/test_threshold.py | 167 +++-- integration/python/src/test_zenroom.py | 4 +- integration/scripts/genesis.py | 8 +- k8s/logging-and-monitoring/analyze.py | 37 +- planetmint/__init__.py | 2 +- planetmint/backend/connection.py | 105 ++-- planetmint/backend/localmongodb/__init__.py | 2 +- planetmint/backend/localmongodb/connection.py | 100 +-- planetmint/backend/localmongodb/convert.py | 5 +- planetmint/backend/localmongodb/query.py | 260 ++++---- planetmint/backend/localmongodb/schema.py | 62 +- planetmint/backend/query.py | 32 +- planetmint/backend/schema.py | 119 +++- planetmint/backend/tarantool/__init__.py | 2 +- planetmint/backend/tarantool/convert.py | 5 +- planetmint/backend/tarantool/query.py | 267 +++----- planetmint/backend/tarantool/schema.py | 200 +++--- .../backend/tarantool/transaction/tools.py | 75 +-- planetmint/backend/tarantool/utils.py | 6 +- planetmint/backend/utils.py | 10 +- planetmint/commands/election_types.py | 47 +- planetmint/commands/planetmint.py | 203 +++--- planetmint/commands/utils.py | 65 +- planetmint/config.py | 198 +++--- planetmint/config_utils.py | 46 +- planetmint/core.py | 120 ++-- planetmint/events.py | 6 +- planetmint/fastquery.py | 21 +- planetmint/lib.py | 168 +++-- planetmint/log.py | 59 +- planetmint/models.py | 17 +- planetmint/parallel_validation.py | 14 +- planetmint/start.py | 9 +- planetmint/tendermint_utils.py | 23 +- planetmint/transactions/common/crypto.py | 17 +- planetmint/transactions/common/input.py | 64 +- planetmint/transactions/common/memoize.py | 8 +- .../transactions/common/schema/__init__.py | 25 +- planetmint/transactions/common/transaction.py | 148 ++--- .../transactions/common/transaction_link.py | 52 +- .../common/transaction_mode_types.py | 6 +- planetmint/transactions/common/utils.py | 6 +- .../transactions/types/assets/create.py | 62 +- .../transactions/types/assets/transfer.py | 78 +-- .../elections/chain_migration_election.py | 19 +- .../transactions/types/elections/election.py | 128 ++-- .../transactions/types/elections/vote.py | 12 +- planetmint/upsert_validator/__init__.py | 2 +- .../upsert_validator/validator_election.py | 27 +- .../upsert_validator/validator_utils.py | 49 +- planetmint/utils.py | 28 +- planetmint/validation.py | 2 +- planetmint/web/routes.py | 26 +- planetmint/web/server.py | 29 +- .../web/strip_content_type_middleware.py | 4 +- planetmint/web/views/assets.py | 19 +- planetmint/web/views/base.py | 18 +- planetmint/web/views/blocks.py | 10 +- planetmint/web/views/info.py | 45 +- planetmint/web/views/metadata.py | 21 +- planetmint/web/views/outputs.py | 13 +- planetmint/web/views/parameters.py | 32 +- planetmint/web/views/transactions.py | 16 +- planetmint/web/views/validators.py | 2 +- planetmint/web/websocket_dispatcher.py | 18 +- planetmint/web/websocket_server.py | 57 +- {.ci => scripts}/entrypoint.sh | 0 setup.cfg | 3 - setup.py | 53 +- tests/assets/test_digital_assets.py | 23 +- tests/assets/test_divisible_assets.py | 219 ++++--- tests/assets/test_zenroom_signing.py | 24 +- tests/backend/tarantool/test_queries.py | 126 ++-- tests/backend/test_connection.py | 11 +- tests/backend/test_generics.py | 42 +- tests/backend/test_utils.py | 10 +- tests/commands/conftest.py | 29 +- tests/commands/test_commands.py | 401 ++++++------ tests/commands/test_utils.py | 73 +-- tests/common/conftest.py | 291 +++++---- tests/common/test_memoize.py | 33 +- tests/common/test_schema.py | 93 +-- tests/common/test_transaction.py | 582 ++++++++---------- tests/conftest.py | 320 +++++----- tests/db/test_planetmint_api.py | 193 +++--- tests/elections/test_election.py | 201 +++--- tests/migrations/test_migration_election.py | 4 +- tests/tendermint/conftest.py | 9 +- tests/tendermint/test_core.py | 283 ++++----- tests/tendermint/test_fastquery.py | 36 +- tests/tendermint/test_integration.py | 71 +-- tests/tendermint/test_lib.py | 326 +++++----- tests/tendermint/test_utils.py | 34 +- tests/test_config_utils.py | 319 +++++----- tests/test_core.py | 63 +- tests/test_docs.py | 4 +- tests/test_events.py | 7 +- tests/test_parallel_validation.py | 27 +- tests/test_txlist.py | 28 +- tests/test_utils.py | 65 +- tests/upsert_validator/conftest.py | 19 +- .../test_upsert_validator_vote.py | 137 ++--- .../test_validator_election.py | 133 ++-- tests/utils.py | 72 +-- .../validation/test_transaction_structure.py | 83 +-- tests/web/conftest.py | 2 +- tests/web/test_assets.py | 35 +- tests/web/test_block_tendermint.py | 31 +- tests/web/test_blocks.py | 34 +- tests/web/test_content_type_middleware.py | 26 +- tests/web/test_info.py | 68 +- tests/web/test_metadata.py | 47 +- tests/web/test_outputs.py | 65 +- tests/web/test_parameters.py | 63 +- tests/web/test_server.py | 4 +- tests/web/test_transactions.py | 84 +-- tests/web/test_validators.py | 15 +- tests/web/test_websocket_server.py | 132 ++-- tox.ini | 18 +- 151 files changed, 4721 insertions(+), 5201 deletions(-) delete mode 100755 .ci/travis-after-success.sh delete mode 100755 .ci/travis-before-install.sh delete mode 100755 .ci/travis-before-script.sh delete mode 100755 .ci/travis-install.sh delete mode 100755 .ci/travis_script.sh create mode 100644 .github/workflows/acceptance-test.yml create mode 100644 .github/workflows/audit.yml create mode 100644 .github/workflows/documenation.yml create mode 100644 .github/workflows/integration-test.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/unit-test.yml delete mode 100644 .travis.yml rename {.ci => scripts}/entrypoint.sh (100%) diff --git a/.ci/travis-after-success.sh b/.ci/travis-after-success.sh deleted file mode 100755 index af77412..0000000 --- a/.ci/travis-after-success.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -# Copyright © 2020 Interplanetary Database Association e.V., -# Planetmint and IPDB software contributors. -# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) -# Code is Apache-2.0 and docs are CC-BY-4.0 - - -set -e -x - -if [[ -z ${TOXENV} ]] && [[ ${PLANETMINT_CI_ABCI} != 'enable' ]] && [[ ${PLANETMINT_ACCEPTANCE_TEST} != 'enable' ]]; then - codecov -v -f htmlcov/coverage.xml -fi diff --git a/.ci/travis-before-install.sh b/.ci/travis-before-install.sh deleted file mode 100755 index 4c53a86..0000000 --- a/.ci/travis-before-install.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash -# Copyright © 2020 Interplanetary Database Association e.V., -# Planetmint and IPDB software contributors. -# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) -# Code is Apache-2.0 and docs are CC-BY-4.0 - -if [[ -n ${TOXENV} ]]; then - sudo apt-get update - sudo apt-get install zsh -fi - -if [[ -z ${TOXENV} ]]; then - sudo apt-get update - sudo apt-get -y -o Dpkg::Options::="--force-confnew" install docker-ce - - sudo rm /usr/local/bin/docker-compose - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose - chmod +x docker-compose - sudo mv docker-compose /usr/local/bin -fi diff --git a/.ci/travis-before-script.sh b/.ci/travis-before-script.sh deleted file mode 100755 index bb55c38..0000000 --- a/.ci/travis-before-script.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# Copyright © 2020 Interplanetary Database Association e.V., -# Planetmint and IPDB software contributors. -# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) -# Code is Apache-2.0 and docs are CC-BY-4.0 - - -set -e -x - -if [[ -z ${TOXENV} ]]; then - - if [[ ${PLANETMINT_CI_ABCI} == 'enable' ]]; then - docker-compose up -d planetmint - else - docker-compose up -d bdb - fi - -fi diff --git a/.ci/travis-install.sh b/.ci/travis-install.sh deleted file mode 100755 index 083f9bb..0000000 --- a/.ci/travis-install.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash -# Copyright © 2020 Interplanetary Database Association e.V., -# Planetmint and IPDB software contributors. -# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) -# Code is Apache-2.0 and docs are CC-BY-4.0 - - -set -e -x - -pip install --upgrade pip - -if [[ -n ${TOXENV} ]]; then - pip install --upgrade tox -elif [[ ${PLANETMINT_CI_ABCI} == 'enable' ]]; then - docker-compose build --no-cache --build-arg abci_status=enable planetmint -else - docker-compose build --no-cache planetmint - pip install --upgrade codecov -fi diff --git a/.ci/travis_script.sh b/.ci/travis_script.sh deleted file mode 100755 index 68398d6..0000000 --- a/.ci/travis_script.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -# Copyright © 2020 Interplanetary Database Association e.V., -# Planetmint and IPDB software contributors. -# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) -# Code is Apache-2.0 and docs are CC-BY-4.0 - - -set -e -x - -if [[ -n ${TOXENV} ]]; then - tox -e ${TOXENV} -elif [[ ${PLANETMINT_CI_ABCI} == 'enable' ]]; then - docker-compose exec planetmint pytest -v -m abci -elif [[ ${PLANETMINT_ACCEPTANCE_TEST} == 'enable' ]]; then - ./scripts/run-acceptance-test.sh -elif [[ ${PLANETMINT_INTEGRATION_TEST} == 'enable' ]]; then - docker-compose down # TODO: remove after ci optimization - ./scripts/run-integration-test.sh -else - docker-compose exec planetmint pytest -v --cov=planetmint --cov-report xml:htmlcov/coverage.xml -fi diff --git a/.github/workflows/acceptance-test.yml b/.github/workflows/acceptance-test.yml new file mode 100644 index 0000000..8ee1161 --- /dev/null +++ b/.github/workflows/acceptance-test.yml @@ -0,0 +1,21 @@ +# Copyright © 2020 Interplanetary Database Association e.V., +# Planetmint and IPDB software contributors. +# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) +# Code is Apache-2.0 and docs are CC-BY-4.0 + +name: Acceptance tests +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - name: Start container + run: docker-compose up -d planetmint + + - name: Run test + run: docker-compose -f docker-compose.yml run --rm python-acceptance pytest /src \ No newline at end of file diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml new file mode 100644 index 0000000..78a1622 --- /dev/null +++ b/.github/workflows/audit.yml @@ -0,0 +1,36 @@ +# Copyright © 2020 Interplanetary Database Association e.V., +# Planetmint and IPDB software contributors. +# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) +# Code is Apache-2.0 and docs are CC-BY-4.0 + +name: Audit +on: + schedule: + - cron: '0 2 * * *' + +jobs: + audit: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Install pip-audit + run: pip install --upgrade pip pip-audit + + - name: Install dependencies + run: pip install . + + - name: Create requirements.txt + run: pip freeze > requirements.txt + + - name: Audit dependencies + run: pip-audit + + \ No newline at end of file diff --git a/.github/workflows/documenation.yml b/.github/workflows/documenation.yml new file mode 100644 index 0000000..4cda540 --- /dev/null +++ b/.github/workflows/documenation.yml @@ -0,0 +1,35 @@ +# Copyright © 2020 Interplanetary Database Association e.V., +# Planetmint and IPDB software contributors. +# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) +# Code is Apache-2.0 and docs are CC-BY-4.0 + +name: Documentation +on: [push, pull_request] + +jobs: + documentation: + runs-on: ubuntu-latest + + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - name: Setup python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + + - name: Install tox + run: python -m pip install --upgrade tox tox-gh-actions + + - name: Install dependencies + run: pip install .'[dev]' + + - name: Run tox + run: tox -e docsroot + + + + + + \ No newline at end of file diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml new file mode 100644 index 0000000..6ea5adb --- /dev/null +++ b/.github/workflows/integration-test.yml @@ -0,0 +1,18 @@ +# Copyright © 2020 Interplanetary Database Association e.V., +# Planetmint and IPDB software contributors. +# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) +# Code is Apache-2.0 and docs are CC-BY-4.0 + +name: Integration tests +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - name: Start test run + run: docker-compose -f docker-compose.integration.yml up test diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..43eaa30 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,17 @@ +# Copyright © 2020 Interplanetary Database Association e.V., +# Planetmint and IPDB software contributors. +# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) +# Code is Apache-2.0 and docs are CC-BY-4.0 + +name: Lint +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: psf/black@stable + with: + options: "--check -l 119" + src: "." diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml new file mode 100644 index 0000000..52adb0d --- /dev/null +++ b/.github/workflows/unit-test.yml @@ -0,0 +1,109 @@ +# Copyright © 2020 Interplanetary Database Association e.V., +# Planetmint and IPDB software contributors. +# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) +# Code is Apache-2.0 and docs are CC-BY-4.0 + +name: Unit tests +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + include: + - abci_enabled: "ABCI enabled" + abci: "enabled" + - abci_disabled: "ABCI disabled" + abci: "disabled" + + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - name: Build container + run: | + if [[ "${{ matrix.abci }}" == "enabled" ]]; then + docker-compose -f docker-compose.yml build --no-cache --build-arg abci_status=enable planetmint + fi + if [[ ""${{ matrix.abci }}" == "disabled"" ]]; then + docker-compose -f docker-compose.yml build --no-cache planetmint + fi + + - name: Save image + run: docker save -o planetmint.tar planetmint_planetmint + + - name: Upload image + uses: actions/upload-artifact@v3 + with: + name: planetmint-abci-${{matrix.abci}} + path: planetmint.tar + retention-days: 5 + + + test-with-abci: + runs-on: ubuntu-latest + needs: build + strategy: + matrix: + include: + - db: "MongoDB with ABCI" + host: "mongodb" + port: 27017 + abci: "enabled" + - db: "Tarantool with ABCI" + host: "tarantool" + port: 3303 + abci: "enabled" + + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - name: Download planetmint + uses: actions/download-artifact@v3 + with: + name: planetmint-abci-enabled + + - name: Load planetmint + run: docker load -i planetmint.tar + + - name: Start containers + run: docker-compose -f docker-compose.yml up -d planetmint + + - name: Run tests + run: docker exec planetmint_planetmint_1 pytest -v -m abci + + test-without-abci: + runs-on: ubuntu-latest + needs: build + strategy: + matrix: + include: + - db: "MongoDB without ABCI" + host: "mongodb" + port: 27017 + - db: "Tarantool without ABCI" + host: "tarantool" + port: 3303 + + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - name: Download planetmint + uses: actions/download-artifact@v3 + with: + name: planetmint-abci-disabled + + - name: Load planetmint + run: docker load -i planetmint.tar + + - name: Start containers + run: docker-compose -f docker-compose.yml up -d bdb + + - name: Run tests + run: docker exec planetmint_planetmint_1 pytest -v --cov=planetmint --cov-report xml:htmlcov/coverage.xml + + - name: Upload Coverage to Codecov + uses: codecov/codecov-action@v3 \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 534b6fd..0000000 --- a/.travis.yml +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright © 2020, 2021 Interplanetary Database Association e.V., -# Planetmint and IPDB software contributors. -# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) -# Code is Apache-2.0 and docs are CC-BY-4.0 - -sudo: required - -dist: focal - -services: - - docker - -language: python -cache: pip - -python: - - 3.9 - -env: - global: - - DOCKER_COMPOSE_VERSION=1.29.2 - matrix: - - TOXENV=flake8 - - TOXENV=docsroot - -matrix: - fast_finish: true - include: - - python: 3.9 - env: - - PLANETMINT_DATABASE_BACKEND=tarantool_db - - PLANETMINT_DATABASE_SSL= - - python: 3.9 - env: - - PLANETMINT_DATABASE_BACKEND=tarantool_db - - PLANETMINT_DATABASE_SSL= - - PLANETMINT_CI_ABCI=enable - - python: 3.9 - env: - - PLANETMINT_DATABASE_BACKEND=localmongodb - - PLANETMINT_DATABASE_SSL= - - python: 3.9 - env: - - PLANETMINT_DATABASE_BACKEND=localmongodb - - PLANETMINT_DATABASE_SSL= - - PLANETMINT_CI_ABCI=enable - - - python: 3.9 - env: - - PLANETMINT_ACCEPTANCE_TEST=enable - - python: 3.9 - env: - - PLANETMINT_INTEGRATION_TEST=enable - - -before_install: sudo .ci/travis-before-install.sh - -install: .ci/travis-install.sh - -before_script: .ci/travis-before-script.sh - -script: .ci/travis_script.sh - -after_success: .ci/travis-after-success.sh diff --git a/Dockerfile-dev b/Dockerfile-dev index 7ccb7dc..f2dc908 100644 --- a/Dockerfile-dev +++ b/Dockerfile-dev @@ -1,9 +1,9 @@ ARG python_version=3.9 -FROM python:${python_version} +FROM python:${python_version}-slim LABEL maintainer "contact@ipdb.global" RUN apt-get update \ - && apt-get install -y git zsh\ + && apt-get install -y git zsh curl\ && apt-get install -y tarantool-common\ && apt-get install -y vim build-essential cmake\ && pip install -U pip \ diff --git a/Makefile b/Makefile index b29ea0f..1df40b8 100644 --- a/Makefile +++ b/Makefile @@ -47,6 +47,7 @@ HELP := python -c "$$PRINT_HELP_PYSCRIPT" ECHO := /usr/bin/env echo IS_DOCKER_COMPOSE_INSTALLED := $(shell command -v docker-compose 2> /dev/null) +IS_BLACK_INSTALLED := $(shell command -v black 2> /dev/null) ################ # Main targets # @@ -70,8 +71,11 @@ stop: check-deps ## Stop Planetmint logs: check-deps ## Attach to the logs @$(DC) logs -f planetmint -lint: check-deps ## Lint the project - @$(DC) up lint +lint: check-py-deps ## Lint the project + black --check -l 119 . + +format: check-py-deps ## Format the project + black -l 119 . test: check-deps test-unit test-acceptance ## Run unit and acceptance tests @@ -132,3 +136,11 @@ ifndef IS_DOCKER_COMPOSE_INSTALLED @$(ECHO) @$(DC) # docker-compose is not installed, so we call it to generate an error and exit endif + +check-py-deps: +ifndef IS_BLACK_INSTALLED + @$(ECHO) "Error: black is not installed" + @$(ECHO) + @$(ECHO) "You need to activate your virtual environment and install the test dependencies" + black # black is not installed, so we call it to generate an error and exit +endif \ No newline at end of file diff --git a/PYTHON_STYLE_GUIDE.md b/PYTHON_STYLE_GUIDE.md index 65ffaf3..dff51f6 100644 --- a/PYTHON_STYLE_GUIDE.md +++ b/PYTHON_STYLE_GUIDE.md @@ -82,11 +82,11 @@ x = 'name: {}; score: {}'.format(name, n) we use the `format()` version. The [official Python documentation says](https://docs.python.org/2/library/stdtypes.html#str.format), "This method of string formatting is the new standard in Python 3, and should be preferred to the % formatting described in String Formatting Operations in new code." -## Running the Flake8 Style Checker +## Running the Black Style Checker -We use [Flake8](http://flake8.pycqa.org/en/latest/index.html) to check our Python code style. Once you have it installed, you can run it using: +We use [Black](https://black.readthedocs.io/en/stable/) to check our Python code style. Once you have it installed, you can run it using: ```text -flake8 --max-line-length 119 planetmint/ +black --check -l 119 . ``` diff --git a/acceptance/python/src/test_basic.py b/acceptance/python/src/test_basic.py index 1b54348..9872362 100644 --- a/acceptance/python/src/test_basic.py +++ b/acceptance/python/src/test_basic.py @@ -31,7 +31,7 @@ def test_basic(): # connect to localhost, but you can override this value using the env variable # called `PLANETMINT_ENDPOINT`, a valid value must include the schema: # `https://example.com:9984` - bdb = Planetmint(os.environ.get('PLANETMINT_ENDPOINT')) + bdb = Planetmint(os.environ.get("PLANETMINT_ENDPOINT")) # ## Create keypairs # This test requires the interaction between two actors with their own keypair. @@ -41,33 +41,28 @@ def test_basic(): # ## Alice registers her bike in Planetmint # Alice has a nice bike, and here she creates the "digital twin" # of her bike. - bike = {'data': {'bicycle': {'serial_number': 420420}}} + bike = {"data": {"bicycle": {"serial_number": 420420}}} # She prepares a `CREATE` transaction... - prepared_creation_tx = bdb.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - asset=bike) + prepared_creation_tx = bdb.transactions.prepare(operation="CREATE", signers=alice.public_key, asset=bike) # ... and she fulfills it with her private key. - fulfilled_creation_tx = bdb.transactions.fulfill( - prepared_creation_tx, - private_keys=alice.private_key) + fulfilled_creation_tx = bdb.transactions.fulfill(prepared_creation_tx, private_keys=alice.private_key) # We will use the `id` of this transaction several time, so we store it in # a variable with a short and easy name - bike_id = fulfilled_creation_tx['id'] + bike_id = fulfilled_creation_tx["id"] # Now she is ready to send it to the Planetmint Network. sent_transfer_tx = bdb.transactions.send_commit(fulfilled_creation_tx) # And just to be 100% sure, she also checks if she can retrieve # it from the Planetmint node. - assert bdb.transactions.retrieve(bike_id), 'Cannot find transaction {}'.format(bike_id) + assert bdb.transactions.retrieve(bike_id), "Cannot find transaction {}".format(bike_id) # Alice is now the proud owner of one unspent asset. assert len(bdb.outputs.get(alice.public_key, spent=False)) == 1 - assert bdb.outputs.get(alice.public_key)[0]['transaction_id'] == bike_id + assert bdb.outputs.get(alice.public_key)[0]["transaction_id"] == bike_id # ## Alice transfers her bike to Bob # After registering her bike, Alice is ready to transfer it to Bob. @@ -75,11 +70,11 @@ def test_basic(): # A `TRANSFER` transaction contains a pointer to the original asset. The original asset # is identified by the `id` of the `CREATE` transaction that defined it. - transfer_asset = {'id': bike_id} + transfer_asset = {"id": bike_id} # Alice wants to spend the one and only output available, the one with index `0`. output_index = 0 - output = fulfilled_creation_tx['outputs'][output_index] + output = fulfilled_creation_tx["outputs"][output_index] # Here, she defines the `input` of the `TRANSFER` transaction. The `input` contains # several keys: @@ -87,29 +82,26 @@ def test_basic(): # - `fulfillment`, taken from the previous `CREATE` transaction. # - `fulfills`, that specifies which condition she is fulfilling. # - `owners_before`. - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': fulfilled_creation_tx['id']}, - 'owners_before': output['public_keys']} + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": fulfilled_creation_tx["id"]}, + "owners_before": output["public_keys"], + } # Now that all the elements are set, she creates the actual transaction... prepared_transfer_tx = bdb.transactions.prepare( - operation='TRANSFER', - asset=transfer_asset, - inputs=transfer_input, - recipients=bob.public_key) + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, recipients=bob.public_key + ) # ... and signs it with her private key. - fulfilled_transfer_tx = bdb.transactions.fulfill( - prepared_transfer_tx, - private_keys=alice.private_key) + fulfilled_transfer_tx = bdb.transactions.fulfill(prepared_transfer_tx, private_keys=alice.private_key) # She finally sends the transaction to a Planetmint node. sent_transfer_tx = bdb.transactions.send_commit(fulfilled_transfer_tx) # And just to be 100% sure, she also checks if she can retrieve # it from the Planetmint node. - assert bdb.transactions.retrieve(fulfilled_transfer_tx['id']) == sent_transfer_tx + assert bdb.transactions.retrieve(fulfilled_transfer_tx["id"]) == sent_transfer_tx # Now Alice has zero unspent transactions. assert len(bdb.outputs.get(alice.public_key, spent=False)) == 0 @@ -118,5 +110,5 @@ def test_basic(): assert len(bdb.outputs.get(bob.public_key, spent=False)) == 1 # Bob double checks what he got was the actual bike. - bob_tx_id = bdb.outputs.get(bob.public_key, spent=False)[0]['transaction_id'] + bob_tx_id = bdb.outputs.get(bob.public_key, spent=False)[0]["transaction_id"] assert bdb.transactions.retrieve(bob_tx_id) == sent_transfer_tx diff --git a/acceptance/python/src/test_divisible_asset.py b/acceptance/python/src/test_divisible_asset.py index 409788e..415a0bb 100644 --- a/acceptance/python/src/test_divisible_asset.py +++ b/acceptance/python/src/test_divisible_asset.py @@ -34,7 +34,7 @@ def test_divisible_assets(): # ## Set up a connection to Planetmint # Check [test_basic.py](./test_basic.html) to get some more details # about the endpoint. - bdb = Planetmint(os.environ.get('PLANETMINT_ENDPOINT')) + bdb = Planetmint(os.environ.get("PLANETMINT_ENDPOINT")) # Oh look, it is Alice again and she brought her friend Bob along. alice, bob = generate_keypair(), generate_keypair() @@ -48,13 +48,9 @@ def test_divisible_assets(): # the bike for one hour. bike_token = { - 'data': { - 'token_for': { - 'bike': { - 'serial_number': 420420 - } - }, - 'description': 'Time share token. Each token equals one hour of riding.', + "data": { + "token_for": {"bike": {"serial_number": 420420}}, + "description": "Time share token. Each token equals one hour of riding.", }, } @@ -62,28 +58,22 @@ def test_divisible_assets(): # Here, Alice defines in a tuple that she wants to assign # these 10 tokens to Bob. prepared_token_tx = bdb.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - recipients=[([bob.public_key], 10)], - asset=bike_token) + operation="CREATE", signers=alice.public_key, recipients=[([bob.public_key], 10)], asset=bike_token + ) # She fulfills and sends the transaction. - fulfilled_token_tx = bdb.transactions.fulfill( - prepared_token_tx, - private_keys=alice.private_key) + fulfilled_token_tx = bdb.transactions.fulfill(prepared_token_tx, private_keys=alice.private_key) bdb.transactions.send_commit(fulfilled_token_tx) # We store the `id` of the transaction to use it later on. - bike_token_id = fulfilled_token_tx['id'] + bike_token_id = fulfilled_token_tx["id"] # Let's check if the transaction was successful. - assert bdb.transactions.retrieve(bike_token_id), \ - 'Cannot find transaction {}'.format(bike_token_id) + assert bdb.transactions.retrieve(bike_token_id), "Cannot find transaction {}".format(bike_token_id) # Bob owns 10 tokens now. - assert bdb.transactions.retrieve(bike_token_id)['outputs'][0][ - 'amount'] == '10' + assert bdb.transactions.retrieve(bike_token_id)["outputs"][0]["amount"] == "10" # ## Bob wants to use the bike # Now that Bob got the tokens and the sun is shining, he wants to get out @@ -91,49 +81,45 @@ def test_divisible_assets(): # To use the bike he has to send the tokens back to Alice. # To learn about the details of transferring a transaction check out # [test_basic.py](./test_basic.html) - transfer_asset = {'id': bike_token_id} + transfer_asset = {"id": bike_token_id} output_index = 0 - output = fulfilled_token_tx['outputs'][output_index] - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': fulfilled_token_tx[ - 'id']}, - 'owners_before': output['public_keys']} + output = fulfilled_token_tx["outputs"][output_index] + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": fulfilled_token_tx["id"]}, + "owners_before": output["public_keys"], + } # To use the tokens Bob has to reassign 7 tokens to himself and the # amount he wants to use to Alice. prepared_transfer_tx = bdb.transactions.prepare( - operation='TRANSFER', + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, - recipients=[([alice.public_key], 3), ([bob.public_key], 7)]) + recipients=[([alice.public_key], 3), ([bob.public_key], 7)], + ) # He signs and sends the transaction. - fulfilled_transfer_tx = bdb.transactions.fulfill( - prepared_transfer_tx, - private_keys=bob.private_key) + fulfilled_transfer_tx = bdb.transactions.fulfill(prepared_transfer_tx, private_keys=bob.private_key) sent_transfer_tx = bdb.transactions.send_commit(fulfilled_transfer_tx) # First, Bob checks if the transaction was successful. - assert bdb.transactions.retrieve( - fulfilled_transfer_tx['id']) == sent_transfer_tx + assert bdb.transactions.retrieve(fulfilled_transfer_tx["id"]) == sent_transfer_tx # There are two outputs in the transaction now. # The first output shows that Alice got back 3 tokens... - assert bdb.transactions.retrieve( - fulfilled_transfer_tx['id'])['outputs'][0]['amount'] == '3' + assert bdb.transactions.retrieve(fulfilled_transfer_tx["id"])["outputs"][0]["amount"] == "3" # ... while Bob still has 7 left. - assert bdb.transactions.retrieve( - fulfilled_transfer_tx['id'])['outputs'][1]['amount'] == '7' + assert bdb.transactions.retrieve(fulfilled_transfer_tx["id"])["outputs"][1]["amount"] == "7" # ## Bob wants to ride the bike again # It's been a week and Bob wants to right the bike again. # Now he wants to ride for 8 hours, that's a lot Bob! # He prepares the transaction again. - transfer_asset = {'id': bike_token_id} + transfer_asset = {"id": bike_token_id} # This time we need an `output_index` of 1, since we have two outputs # in the `fulfilled_transfer_tx` we created before. The first output with # index 0 is for Alice and the second output is for Bob. @@ -141,24 +127,21 @@ def test_divisible_assets(): # correct output with the correct amount of tokens. output_index = 1 - output = fulfilled_transfer_tx['outputs'][output_index] + output = fulfilled_transfer_tx["outputs"][output_index] - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': fulfilled_transfer_tx['id']}, - 'owners_before': output['public_keys']} + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": fulfilled_transfer_tx["id"]}, + "owners_before": output["public_keys"], + } # This time Bob only provides Alice in the `recipients` because he wants # to spend all his tokens prepared_transfer_tx = bdb.transactions.prepare( - operation='TRANSFER', - asset=transfer_asset, - inputs=transfer_input, - recipients=[([alice.public_key], 8)]) + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, recipients=[([alice.public_key], 8)] + ) - fulfilled_transfer_tx = bdb.transactions.fulfill( - prepared_transfer_tx, - private_keys=bob.private_key) + fulfilled_transfer_tx = bdb.transactions.fulfill(prepared_transfer_tx, private_keys=bob.private_key) # Oh Bob, what have you done?! You tried to spend more tokens than you had. # Remember Bob, last time you spent 3 tokens already, @@ -169,10 +152,12 @@ def test_divisible_assets(): # Now Bob gets an error saying that the amount he wanted to spent is # higher than the amount of tokens he has left. assert error.value.args[0] == 400 - message = 'Invalid transaction (AmountError): The amount used in the ' \ - 'inputs `7` needs to be same as the amount used in the ' \ - 'outputs `8`' - assert error.value.args[2]['message'] == message + message = ( + "Invalid transaction (AmountError): The amount used in the " + "inputs `7` needs to be same as the amount used in the " + "outputs `8`" + ) + assert error.value.args[2]["message"] == message # We have to stop this test now, I am sorry, but Bob is pretty upset # about his mistake. See you next time :) diff --git a/acceptance/python/src/test_double_spend.py b/acceptance/python/src/test_double_spend.py index 8f8fab9..3478e4e 100644 --- a/acceptance/python/src/test_double_spend.py +++ b/acceptance/python/src/test_double_spend.py @@ -17,32 +17,30 @@ from planetmint_driver.crypto import generate_keypair def test_double_create(): - bdb = Planetmint(os.environ.get('PLANETMINT_ENDPOINT')) + bdb = Planetmint(os.environ.get("PLANETMINT_ENDPOINT")) alice = generate_keypair() results = queue.Queue() tx = bdb.transactions.fulfill( - bdb.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - asset={'data': {'uuid': str(uuid4())}}), - private_keys=alice.private_key) + bdb.transactions.prepare(operation="CREATE", signers=alice.public_key, asset={"data": {"uuid": str(uuid4())}}), + private_keys=alice.private_key, + ) def send_and_queue(tx): try: bdb.transactions.send_commit(tx) - results.put('OK') + results.put("OK") except planetmint_driver.exceptions.TransportError as e: - results.put('FAIL') + results.put("FAIL") - t1 = Thread(target=send_and_queue, args=(tx, )) - t2 = Thread(target=send_and_queue, args=(tx, )) + t1 = Thread(target=send_and_queue, args=(tx,)) + t2 = Thread(target=send_and_queue, args=(tx,)) t1.start() t2.start() results = [results.get(timeout=2), results.get(timeout=2)] - assert results.count('OK') == 1 - assert results.count('FAIL') == 1 + assert results.count("OK") == 1 + assert results.count("FAIL") == 1 diff --git a/acceptance/python/src/test_multiple_owners.py b/acceptance/python/src/test_multiple_owners.py index bd5e995..77e0464 100644 --- a/acceptance/python/src/test_multiple_owners.py +++ b/acceptance/python/src/test_multiple_owners.py @@ -31,7 +31,7 @@ def test_multiple_owners(): # ## Set up a connection to Planetmint # Check [test_basic.py](./test_basic.html) to get some more details # about the endpoint. - bdb = Planetmint(os.environ.get('PLANETMINT_ENDPOINT')) + bdb = Planetmint(os.environ.get("PLANETMINT_ENDPOINT")) # Hey Alice and Bob, nice to see you again! alice, bob = generate_keypair(), generate_keypair() @@ -41,40 +41,28 @@ def test_multiple_owners(): # high rents anymore. Bob suggests to get a dish washer for the # kitchen. Alice agrees and here they go, creating the asset for their # dish washer. - dw_asset = { - 'data': { - 'dish washer': { - 'serial_number': 1337 - } - } - } + dw_asset = {"data": {"dish washer": {"serial_number": 1337}}} # They prepare a `CREATE` transaction. To have multiple owners, both # Bob and Alice need to be the recipients. prepared_dw_tx = bdb.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - recipients=(alice.public_key, bob.public_key), - asset=dw_asset) + operation="CREATE", signers=alice.public_key, recipients=(alice.public_key, bob.public_key), asset=dw_asset + ) # Now they both sign the transaction by providing their private keys. # And send it afterwards. - fulfilled_dw_tx = bdb.transactions.fulfill( - prepared_dw_tx, - private_keys=[alice.private_key, bob.private_key]) + fulfilled_dw_tx = bdb.transactions.fulfill(prepared_dw_tx, private_keys=[alice.private_key, bob.private_key]) bdb.transactions.send_commit(fulfilled_dw_tx) # We store the `id` of the transaction to use it later on. - dw_id = fulfilled_dw_tx['id'] + dw_id = fulfilled_dw_tx["id"] # Let's check if the transaction was successful. - assert bdb.transactions.retrieve(dw_id), \ - 'Cannot find transaction {}'.format(dw_id) + assert bdb.transactions.retrieve(dw_id), "Cannot find transaction {}".format(dw_id) # The transaction should have two public keys in the outputs. - assert len( - bdb.transactions.retrieve(dw_id)['outputs'][0]['public_keys']) == 2 + assert len(bdb.transactions.retrieve(dw_id)["outputs"][0]["public_keys"]) == 2 # ## Alice and Bob transfer a transaction to Carol. # Alice and Bob save a lot of money living together. They often go out @@ -86,39 +74,33 @@ def test_multiple_owners(): # Alice and Bob prepare the transaction to transfer the dish washer to # Carol. - transfer_asset = {'id': dw_id} + transfer_asset = {"id": dw_id} output_index = 0 - output = fulfilled_dw_tx['outputs'][output_index] - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': fulfilled_dw_tx[ - 'id']}, - 'owners_before': output['public_keys']} + output = fulfilled_dw_tx["outputs"][output_index] + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": fulfilled_dw_tx["id"]}, + "owners_before": output["public_keys"], + } # Now they create the transaction... prepared_transfer_tx = bdb.transactions.prepare( - operation='TRANSFER', - asset=transfer_asset, - inputs=transfer_input, - recipients=carol.public_key) + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, recipients=carol.public_key + ) # ... and sign it with their private keys, then send it. fulfilled_transfer_tx = bdb.transactions.fulfill( - prepared_transfer_tx, - private_keys=[alice.private_key, bob.private_key]) + prepared_transfer_tx, private_keys=[alice.private_key, bob.private_key] + ) sent_transfer_tx = bdb.transactions.send_commit(fulfilled_transfer_tx) # They check if the transaction was successful. - assert bdb.transactions.retrieve( - fulfilled_transfer_tx['id']) == sent_transfer_tx + assert bdb.transactions.retrieve(fulfilled_transfer_tx["id"]) == sent_transfer_tx # The owners before should include both Alice and Bob. - assert len( - bdb.transactions.retrieve(fulfilled_transfer_tx['id'])['inputs'][0][ - 'owners_before']) == 2 + assert len(bdb.transactions.retrieve(fulfilled_transfer_tx["id"])["inputs"][0]["owners_before"]) == 2 # While the new owner is Carol. - assert bdb.transactions.retrieve(fulfilled_transfer_tx['id'])[ - 'outputs'][0]['public_keys'][0] == carol.public_key + assert bdb.transactions.retrieve(fulfilled_transfer_tx["id"])["outputs"][0]["public_keys"][0] == carol.public_key diff --git a/acceptance/python/src/test_naughty_strings.py b/acceptance/python/src/test_naughty_strings.py index 6f1e93a..0bbb081 100644 --- a/acceptance/python/src/test_naughty_strings.py +++ b/acceptance/python/src/test_naughty_strings.py @@ -32,15 +32,36 @@ from planetmint_driver.exceptions import BadRequest naughty_strings = blns.all() skipped_naughty_strings = [ - '1.00', '$1.00', '-1.00', '-$1.00', '0.00', '0..0', '.', '0.0.0', - '-.', ",./;'[]\\-=", 'ثم نفس سقطت وبالتحديد،, جزيرتي باستخدام أن دنو. إذ هنا؟ الستار وتنصيب كان. أهّل ايطاليا، بريطانيا-فرنسا قد أخذ. سليمان، إتفاقية بين ما, يذكر الحدود أي بعد, معاملة بولندا، الإطلاق عل إيو.', - 'test\x00', 'Ṱ̺̺̕o͞ ̷i̲̬͇̪͙n̝̗͕v̟̜̘̦͟o̶̙̰̠kè͚̮̺̪̹̱̤ ̖t̝͕̳̣̻̪͞h̼͓̲̦̳̘̲e͇̣̰̦̬͎ ̢̼̻̱̘h͚͎͙̜̣̲ͅi̦̲̣̰̤v̻͍e̺̭̳̪̰-m̢iͅn̖̺̞̲̯̰d̵̼̟͙̩̼̘̳ ̞̥̱̳̭r̛̗̘e͙p͠r̼̞̻̭̗e̺̠̣͟s̘͇̳͍̝͉e͉̥̯̞̲͚̬͜ǹ̬͎͎̟̖͇̤t͍̬̤͓̼̭͘ͅi̪̱n͠g̴͉ ͏͉ͅc̬̟h͡a̫̻̯͘o̫̟̖͍̙̝͉s̗̦̲.̨̹͈̣', '̡͓̞ͅI̗̘̦͝n͇͇͙v̮̫ok̲̫̙͈i̖͙̭̹̠̞n̡̻̮̣̺g̲͈͙̭͙̬͎ ̰t͔̦h̞̲e̢̤ ͍̬̲͖f̴̘͕̣è͖ẹ̥̩l͖͔͚i͓͚̦͠n͖͍̗͓̳̮g͍ ̨o͚̪͡f̘̣̬ ̖̘͖̟͙̮c҉͔̫͖͓͇͖ͅh̵̤̣͚͔á̗̼͕ͅo̼̣̥s̱͈̺̖̦̻͢.̛̖̞̠̫̰', '̗̺͖̹̯͓Ṯ̤͍̥͇͈h̲́e͏͓̼̗̙̼̣͔ ͇̜̱̠͓͍ͅN͕͠e̗̱z̘̝̜̺͙p̤̺̹͍̯͚e̠̻̠͜r̨̤͍̺̖͔̖̖d̠̟̭̬̝͟i̦͖̩͓͔̤a̠̗̬͉̙n͚͜ ̻̞̰͚ͅh̵͉i̳̞v̢͇ḙ͎͟-҉̭̩̼͔m̤̭̫i͕͇̝̦n̗͙ḍ̟ ̯̲͕͞ǫ̟̯̰̲͙̻̝f ̪̰̰̗̖̭̘͘c̦͍̲̞͍̩̙ḥ͚a̮͎̟̙͜ơ̩̹͎s̤.̝̝ ҉Z̡̖̜͖̰̣͉̜a͖̰͙̬͡l̲̫̳͍̩g̡̟̼̱͚̞̬ͅo̗͜.̟', - '̦H̬̤̗̤͝e͜ ̜̥̝̻͍̟́w̕h̖̯͓o̝͙̖͎̱̮ ҉̺̙̞̟͈W̷̼̭a̺̪͍į͈͕̭͙̯̜t̶̼̮s̘͙͖̕ ̠̫̠B̻͍͙͉̳ͅe̵h̵̬͇̫͙i̹͓̳̳̮͎̫̕n͟d̴̪̜̖ ̰͉̩͇͙̲͞ͅT͖̼͓̪͢h͏͓̮̻e̬̝̟ͅ ̤̹̝W͙̞̝͔͇͝ͅa͏͓͔̹̼̣l̴͔̰̤̟͔ḽ̫.͕', '">', "'>", - '>', '', '< / script >< script >alert(document.title)< / script >', - ' onfocus=alert(document.title) autofocus ','" onfocus=alert(document.title) autofocus ', "' onfocus=alert(document.title) autofocus ", - '<script>alert(document.title)</script>', '/dev/null; touch /tmp/blns.fail ; echo', '../../../../../../../../../../../etc/passwd%00', - '../../../../../../../../../../../etc/hosts', '() { 0; }; touch /tmp/blns.shellshock1.fail;', - '() { _; } >_[$($())] { touch /tmp/blns.shellshock2.fail; }' + "1.00", + "$1.00", + "-1.00", + "-$1.00", + "0.00", + "0..0", + ".", + "0.0.0", + "-.", + ",./;'[]\\-=", + "ثم نفس سقطت وبالتحديد،, جزيرتي باستخدام أن دنو. إذ هنا؟ الستار وتنصيب كان. أهّل ايطاليا، بريطانيا-فرنسا قد أخذ. سليمان، إتفاقية بين ما, يذكر الحدود أي بعد, معاملة بولندا، الإطلاق عل إيو.", + "test\x00", + "Ṱ̺̺̕o͞ ̷i̲̬͇̪͙n̝̗͕v̟̜̘̦͟o̶̙̰̠kè͚̮̺̪̹̱̤ ̖t̝͕̳̣̻̪͞h̼͓̲̦̳̘̲e͇̣̰̦̬͎ ̢̼̻̱̘h͚͎͙̜̣̲ͅi̦̲̣̰̤v̻͍e̺̭̳̪̰-m̢iͅn̖̺̞̲̯̰d̵̼̟͙̩̼̘̳ ̞̥̱̳̭r̛̗̘e͙p͠r̼̞̻̭̗e̺̠̣͟s̘͇̳͍̝͉e͉̥̯̞̲͚̬͜ǹ̬͎͎̟̖͇̤t͍̬̤͓̼̭͘ͅi̪̱n͠g̴͉ ͏͉ͅc̬̟h͡a̫̻̯͘o̫̟̖͍̙̝͉s̗̦̲.̨̹͈̣", + "̡͓̞ͅI̗̘̦͝n͇͇͙v̮̫ok̲̫̙͈i̖͙̭̹̠̞n̡̻̮̣̺g̲͈͙̭͙̬͎ ̰t͔̦h̞̲e̢̤ ͍̬̲͖f̴̘͕̣è͖ẹ̥̩l͖͔͚i͓͚̦͠n͖͍̗͓̳̮g͍ ̨o͚̪͡f̘̣̬ ̖̘͖̟͙̮c҉͔̫͖͓͇͖ͅh̵̤̣͚͔á̗̼͕ͅo̼̣̥s̱͈̺̖̦̻͢.̛̖̞̠̫̰", + "̗̺͖̹̯͓Ṯ̤͍̥͇͈h̲́e͏͓̼̗̙̼̣͔ ͇̜̱̠͓͍ͅN͕͠e̗̱z̘̝̜̺͙p̤̺̹͍̯͚e̠̻̠͜r̨̤͍̺̖͔̖̖d̠̟̭̬̝͟i̦͖̩͓͔̤a̠̗̬͉̙n͚͜ ̻̞̰͚ͅh̵͉i̳̞v̢͇ḙ͎͟-҉̭̩̼͔m̤̭̫i͕͇̝̦n̗͙ḍ̟ ̯̲͕͞ǫ̟̯̰̲͙̻̝f ̪̰̰̗̖̭̘͘c̦͍̲̞͍̩̙ḥ͚a̮͎̟̙͜ơ̩̹͎s̤.̝̝ ҉Z̡̖̜͖̰̣͉̜a͖̰͙̬͡l̲̫̳͍̩g̡̟̼̱͚̞̬ͅo̗͜.̟", + "̦H̬̤̗̤͝e͜ ̜̥̝̻͍̟́w̕h̖̯͓o̝͙̖͎̱̮ ҉̺̙̞̟͈W̷̼̭a̺̪͍į͈͕̭͙̯̜t̶̼̮s̘͙͖̕ ̠̫̠B̻͍͙͉̳ͅe̵h̵̬͇̫͙i̹͓̳̳̮͎̫̕n͟d̴̪̜̖ ̰͉̩͇͙̲͞ͅT͖̼͓̪͢h͏͓̮̻e̬̝̟ͅ ̤̹̝W͙̞̝͔͇͝ͅa͏͓͔̹̼̣l̴͔̰̤̟͔ḽ̫.͕", + '">', + "'>", + ">", + "", + "< / script >< script >alert(document.title)< / script >", + " onfocus=alert(document.title) autofocus ", + '" onfocus=alert(document.title) autofocus ', + "' onfocus=alert(document.title) autofocus ", + "<script>alert(document.title)</script>", + "/dev/null; touch /tmp/blns.fail ; echo", + "../../../../../../../../../../../etc/passwd%00", + "../../../../../../../../../../../etc/hosts", + "() { 0; }; touch /tmp/blns.shellshock1.fail;", + "() { _; } >_[$($())] { touch /tmp/blns.shellshock2.fail; }", ] naughty_strings = [naughty for naughty in naughty_strings if naughty not in skipped_naughty_strings] @@ -50,22 +71,18 @@ def send_naughty_tx(asset, metadata): # ## Set up a connection to Planetmint # Check [test_basic.py](./test_basic.html) to get some more details # about the endpoint. - bdb = Planetmint(os.environ.get('PLANETMINT_ENDPOINT')) + bdb = Planetmint(os.environ.get("PLANETMINT_ENDPOINT")) # Here's Alice. alice = generate_keypair() # Alice is in a naughty mood today, so she creates a tx with some naughty strings prepared_transaction = bdb.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - asset=asset, - metadata=metadata) + operation="CREATE", signers=alice.public_key, asset=asset, metadata=metadata + ) # She fulfills the transaction - fulfilled_transaction = bdb.transactions.fulfill( - prepared_transaction, - private_keys=alice.private_key) + fulfilled_transaction = bdb.transactions.fulfill(prepared_transaction, private_keys=alice.private_key) # The fulfilled tx gets sent to the BDB network try: @@ -74,23 +91,24 @@ def send_naughty_tx(asset, metadata): sent_transaction = e # If her key contained a '.', began with a '$', or contained a NUL character - regex = '.*\..*|\$.*|.*\x00.*' + regex = ".*\..*|\$.*|.*\x00.*" key = next(iter(metadata)) if re.match(regex, key): # Then she expects a nicely formatted error code status_code = sent_transaction.status_code error = sent_transaction.error regex = ( - r'\{\s*\n*' + r"\{\s*\n*" r'\s*"message":\s*"Invalid transaction \(ValidationError\):\s*' - r'Invalid key name.*The key name cannot contain characters.*\n*' + r"Invalid key name.*The key name cannot contain characters.*\n*" r'\s*"status":\s*400\n*' - r'\s*\}\n*') + r"\s*\}\n*" + ) assert status_code == 400 assert re.fullmatch(regex, error), sent_transaction # Otherwise, she expects to see her transaction in the database - elif 'id' in sent_transaction.keys(): - tx_id = sent_transaction['id'] + elif "id" in sent_transaction.keys(): + tx_id = sent_transaction["id"] assert bdb.transactions.retrieve(tx_id) # If neither condition was true, then something weird happened... else: @@ -100,8 +118,8 @@ def send_naughty_tx(asset, metadata): @pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings) def test_naughty_keys(naughty_string): - asset = {'data': {naughty_string: 'nice_value'}} - metadata = {naughty_string: 'nice_value'} + asset = {"data": {naughty_string: "nice_value"}} + metadata = {naughty_string: "nice_value"} send_naughty_tx(asset, metadata) @@ -109,7 +127,7 @@ def test_naughty_keys(naughty_string): @pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings) def test_naughty_values(naughty_string): - asset = {'data': {'nice_key': naughty_string}} - metadata = {'nice_key': naughty_string} + asset = {"data": {"nice_key": naughty_string}} + metadata = {"nice_key": naughty_string} send_naughty_tx(asset, metadata) diff --git a/acceptance/python/src/test_stream.py b/acceptance/python/src/test_stream.py index c6d037b..93b0876 100644 --- a/acceptance/python/src/test_stream.py +++ b/acceptance/python/src/test_stream.py @@ -35,10 +35,10 @@ def test_stream(): # ## Set up the test # We use the env variable `BICHAINDB_ENDPOINT` to know where to connect. # Check [test_basic.py](./test_basic.html) for more information. - BDB_ENDPOINT = os.environ.get('PLANETMINT_ENDPOINT') + BDB_ENDPOINT = os.environ.get("PLANETMINT_ENDPOINT") # *That's pretty bad, but let's do like this for now.* - WS_ENDPOINT = 'ws://{}:9985/api/v1/streams/valid_transactions'.format(BDB_ENDPOINT.rsplit(':')[0]) + WS_ENDPOINT = "ws://{}:9985/api/v1/streams/valid_transactions".format(BDB_ENDPOINT.rsplit(":")[0]) bdb = Planetmint(BDB_ENDPOINT) @@ -90,11 +90,11 @@ def test_stream(): # random `uuid`. for _ in range(10): tx = bdb.transactions.fulfill( - bdb.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - asset={'data': {'uuid': str(uuid4())}}), - private_keys=alice.private_key) + bdb.transactions.prepare( + operation="CREATE", signers=alice.public_key, asset={"data": {"uuid": str(uuid4())}} + ), + private_keys=alice.private_key, + ) # We don't want to wait for each transaction to be in a block. By using # `async` mode, we make sure that the driver returns as soon as the # transaction is pushed to the Planetmint API. Remember: we expect all @@ -104,7 +104,7 @@ def test_stream(): bdb.transactions.send_async(tx) # The `id` of every sent transaction is then stored in a list. - sent.append(tx['id']) + sent.append(tx["id"]) # ## Check the valid transactions coming from Planetmint # Now we are ready to check if Planetmint did its job. A simple way to @@ -118,9 +118,9 @@ def test_stream(): # the timeout, then game over ¯\\\_(ツ)\_/¯ try: event = received.get(timeout=5) - txid = json.loads(event)['transaction_id'] + txid = json.loads(event)["transaction_id"] except queue.Empty: - assert False, 'Did not receive all expected transactions' + assert False, "Did not receive all expected transactions" # Last thing is to try to remove the `txid` from the set of sent # transactions. If this test is running in parallel with others, we diff --git a/acceptance/python/src/test_zenroom.py b/acceptance/python/src/test_zenroom.py index c5b33bf..914a2a7 100644 --- a/acceptance/python/src/test_zenroom.py +++ b/acceptance/python/src/test_zenroom.py @@ -9,106 +9,105 @@ from planetmint_driver import Planetmint from planetmint_driver.crypto import generate_keypair - - - -def test_zenroom_signing(gen_key_zencode, secret_key_to_private_key_zencode, - fulfill_script_zencode, zenroom_data, zenroom_house_assets, - condition_script_zencode): +def test_zenroom_signing( + gen_key_zencode, + secret_key_to_private_key_zencode, + fulfill_script_zencode, + zenroom_data, + zenroom_house_assets, + condition_script_zencode, +): biolabs = generate_keypair() - version = '2.0' - - alice = json.loads(zencode_exec(gen_key_zencode).output)['keyring'] - bob = json.loads(zencode_exec(gen_key_zencode).output)['keyring'] - - zen_public_keys = json.loads(zencode_exec(secret_key_to_private_key_zencode.format('Alice'), - keys=json.dumps({'keyring': alice})).output) - zen_public_keys.update(json.loads(zencode_exec(secret_key_to_private_key_zencode.format('Bob'), - keys=json.dumps({'keyring': bob})).output)) + version = "2.0" + alice = json.loads(zencode_exec(gen_key_zencode).output)["keyring"] + bob = json.loads(zencode_exec(gen_key_zencode).output)["keyring"] + zen_public_keys = json.loads( + zencode_exec(secret_key_to_private_key_zencode.format("Alice"), keys=json.dumps({"keyring": alice})).output + ) + zen_public_keys.update( + json.loads( + zencode_exec(secret_key_to_private_key_zencode.format("Bob"), keys=json.dumps({"keyring": bob})).output + ) + ) zenroomscpt = ZenroomSha256(script=fulfill_script_zencode, data=zenroom_data, keys=zen_public_keys) - print(F'zenroom is: {zenroomscpt.script}') - + print(f"zenroom is: {zenroomscpt.script}") + # CRYPTO-CONDITIONS: generate the condition uri - condition_uri_zen = zenroomscpt.condition.serialize_uri() - print(F'\nzenroom condition URI: {condition_uri_zen}') + condition_uri_zen = zenroomscpt.condition.serialize_uri() + print(f"\nzenroom condition URI: {condition_uri_zen}") # CRYPTO-CONDITIONS: construct an unsigned fulfillment dictionary unsigned_fulfillment_dict_zen = { - 'type': zenroomscpt.TYPE_NAME, - 'public_key': base58.b58encode(biolabs.public_key).decode(), + "type": zenroomscpt.TYPE_NAME, + "public_key": base58.b58encode(biolabs.public_key).decode(), } output = { - 'amount': '10', - 'condition': { - 'details': unsigned_fulfillment_dict_zen, - 'uri': condition_uri_zen, - + "amount": "10", + "condition": { + "details": unsigned_fulfillment_dict_zen, + "uri": condition_uri_zen, }, - 'public_keys': [biolabs.public_key,], + "public_keys": [ + biolabs.public_key, + ], } input_ = { - 'fulfillment': None, - 'fulfills': None, - 'owners_before': [biolabs.public_key,] + "fulfillment": None, + "fulfills": None, + "owners_before": [ + biolabs.public_key, + ], } - metadata = { - "result": { - "output": ["ok"] - } - } - + metadata = {"result": {"output": ["ok"]}} + token_creation_tx = { - 'operation': 'CREATE', - 'asset': zenroom_house_assets, - 'metadata': metadata, - 'outputs': [output,], - 'inputs': [input_,], - 'version': version, - 'id': None, + "operation": "CREATE", + "asset": zenroom_house_assets, + "metadata": metadata, + "outputs": [ + output, + ], + "inputs": [ + input_, + ], + "version": version, + "id": None, } # JSON: serialize the transaction-without-id to a json formatted string message = json.dumps( token_creation_tx, sort_keys=True, - separators=(',', ':'), + separators=(",", ":"), ensure_ascii=False, ) # major workflow: # we store the fulfill script in the transaction/message (zenroom-sha) # the condition script is used to fulfill the transaction and create the signature - # + # # the server should ick the fulfill script and recreate the zenroom-sha and verify the signature - - message = zenroomscpt.sign(message, condition_script_zencode, alice) - assert(zenroomscpt.validate(message=message)) + assert zenroomscpt.validate(message=message) message = json.loads(message) fulfillment_uri_zen = zenroomscpt.serialize_uri() - - message['inputs'][0]['fulfillment'] = fulfillment_uri_zen + + message["inputs"][0]["fulfillment"] = fulfillment_uri_zen tx = message - tx['id'] = None - json_str_tx = json.dumps( - tx, - sort_keys=True, - skipkeys=False, - separators=(',', ':') - ) + tx["id"] = None + json_str_tx = json.dumps(tx, sort_keys=True, skipkeys=False, separators=(",", ":")) # SHA3: hash the serialized id-less transaction to generate the id shared_creation_txid = sha3_256(json_str_tx.encode()).hexdigest() - message['id'] = shared_creation_txid - + message["id"] = shared_creation_txid # `https://example.com:9984` - plntmnt = Planetmint(os.environ.get('PLANETMINT_ENDPOINT')) + plntmnt = Planetmint(os.environ.get("PLANETMINT_ENDPOINT")) sent_transfer_tx = plntmnt.transactions.send_commit(message) - print( f"\n\nstatus and result : + {sent_transfer_tx}") + print(f"\n\nstatus and result : + {sent_transfer_tx}") diff --git a/docker-compose.yml b/docker-compose.yml index 0d6d199..e7f7124 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -60,8 +60,8 @@ services: test: ["CMD", "bash", "-c", "curl http://planetmint:9984 && curl http://tendermint:26657/abci_query"] interval: 3s timeout: 5s - retries: 3 - command: '.ci/entrypoint.sh' + retries: 5 + command: 'scripts/entrypoint.sh' restart: always tendermint: @@ -119,16 +119,6 @@ services: volumes: - ./docs/root/build/html:/usr/share/nginx/html - # Lints project according to PEP8 - lint: - image: alpine/flake8 - command: --max-line-length 119 /planetmint /acceptance /integration /tests - volumes: - - ./planetmint:/planetmint - - ./acceptance:/acceptance - - ./integration:/integration - - ./tests:/tests - # Remove all build, test, coverage and Python artifacts clean: image: alpine diff --git a/docs/root/generate_http_server_api_documentation.py b/docs/root/generate_http_server_api_documentation.py index a51cae5..0bed660 100644 --- a/docs/root/generate_http_server_api_documentation.py +++ b/docs/root/generate_http_server_api_documentation.py @@ -20,28 +20,36 @@ from planetmint.web import server TPLS = {} -TPLS['index-response'] = """\ +TPLS[ + "index-response" +] = """\ HTTP/1.1 200 OK Content-Type: application/json %(index)s """ -TPLS['api-index-response'] = """\ +TPLS[ + "api-index-response" +] = """\ HTTP/1.1 200 OK Content-Type: application/json %(api_index)s """ -TPLS['get-tx-id-request'] = """\ +TPLS[ + "get-tx-id-request" +] = """\ GET /api/v1/transactions/%(txid)s HTTP/1.1 Host: example.com """ -TPLS['get-tx-id-response'] = """\ +TPLS[ + "get-tx-id-response" +] = """\ HTTP/1.1 200 OK Content-Type: application/json @@ -49,14 +57,18 @@ Content-Type: application/json """ -TPLS['get-tx-by-asset-request'] = """\ +TPLS[ + "get-tx-by-asset-request" +] = """\ GET /api/v1/transactions?operation=TRANSFER&asset_id=%(txid)s HTTP/1.1 Host: example.com """ -TPLS['get-tx-by-asset-response'] = """\ +TPLS[ + "get-tx-by-asset-response" +] = """\ HTTP/1.1 200 OK Content-Type: application/json @@ -64,7 +76,9 @@ Content-Type: application/json %(tx_transfer_last)s] """ -TPLS['post-tx-request'] = """\ +TPLS[ + "post-tx-request" +] = """\ POST /api/v1/transactions?mode=async HTTP/1.1 Host: example.com Content-Type: application/json @@ -73,7 +87,9 @@ Content-Type: application/json """ -TPLS['post-tx-response'] = """\ +TPLS[ + "post-tx-response" +] = """\ HTTP/1.1 202 Accepted Content-Type: application/json @@ -81,14 +97,18 @@ Content-Type: application/json """ -TPLS['get-block-request'] = """\ +TPLS[ + "get-block-request" +] = """\ GET /api/v1/blocks/%(blockid)s HTTP/1.1 Host: example.com """ -TPLS['get-block-response'] = """\ +TPLS[ + "get-block-response" +] = """\ HTTP/1.1 200 OK Content-Type: application/json @@ -96,14 +116,18 @@ Content-Type: application/json """ -TPLS['get-block-txid-request'] = """\ +TPLS[ + "get-block-txid-request" +] = """\ GET /api/v1/blocks?transaction_id=%(txid)s HTTP/1.1 Host: example.com """ -TPLS['get-block-txid-response'] = """\ +TPLS[ + "get-block-txid-response" +] = """\ HTTP/1.1 200 OK Content-Type: application/json @@ -112,7 +136,7 @@ Content-Type: application/json def main(): - """ Main function """ + """Main function""" ctx = {} @@ -121,90 +145,91 @@ def main(): client = server.create_app().test_client() - host = 'example.com:9984' + host = "example.com:9984" # HTTP Index - res = client.get('/', environ_overrides={'HTTP_HOST': host}) + res = client.get("/", environ_overrides={"HTTP_HOST": host}) res_data = json.loads(res.data.decode()) - ctx['index'] = pretty_json(res_data) + ctx["index"] = pretty_json(res_data) # API index - res = client.get('/api/v1/', environ_overrides={'HTTP_HOST': host}) - ctx['api_index'] = pretty_json(json.loads(res.data.decode())) + res = client.get("/api/v1/", environ_overrides={"HTTP_HOST": host}) + ctx["api_index"] = pretty_json(json.loads(res.data.decode())) # tx create - privkey = 'CfdqtD7sS7FgkMoGPXw55MVGGFwQLAoHYTcBhZDtF99Z' - pubkey = '4K9sWUMFwTgaDGPfdynrbxWqWS6sWmKbZoTjxLtVUibD' - asset = {'msg': 'Hello Planetmint!'} - tx = Create.generate([pubkey], [([pubkey], 1)], asset=asset, metadata={'sequence': 0}) + privkey = "CfdqtD7sS7FgkMoGPXw55MVGGFwQLAoHYTcBhZDtF99Z" + pubkey = "4K9sWUMFwTgaDGPfdynrbxWqWS6sWmKbZoTjxLtVUibD" + asset = {"msg": "Hello Planetmint!"} + tx = Create.generate([pubkey], [([pubkey], 1)], asset=asset, metadata={"sequence": 0}) tx = tx.sign([privkey]) - ctx['tx'] = pretty_json(tx.to_dict()) - ctx['public_keys'] = tx.outputs[0].public_keys[0] - ctx['txid'] = tx.id + ctx["tx"] = pretty_json(tx.to_dict()) + ctx["public_keys"] = tx.outputs[0].public_keys[0] + ctx["txid"] = tx.id # tx transfer - privkey_transfer = '3AeWpPdhEZzWLYfkfYHBfMFC2r1f8HEaGS9NtbbKssya' - pubkey_transfer = '3yfQPHeWAa1MxTX9Zf9176QqcpcnWcanVZZbaHb8B3h9' + privkey_transfer = "3AeWpPdhEZzWLYfkfYHBfMFC2r1f8HEaGS9NtbbKssya" + pubkey_transfer = "3yfQPHeWAa1MxTX9Zf9176QqcpcnWcanVZZbaHb8B3h9" cid = 0 - input_ = Input(fulfillment=tx.outputs[cid].fulfillment, - fulfills=TransactionLink(txid=tx.id, output=cid), - owners_before=tx.outputs[cid].public_keys) - tx_transfer = Transfer.generate([input_], [([pubkey_transfer], 1)], asset_id=tx.id, metadata={'sequence': 1}) + input_ = Input( + fulfillment=tx.outputs[cid].fulfillment, + fulfills=TransactionLink(txid=tx.id, output=cid), + owners_before=tx.outputs[cid].public_keys, + ) + tx_transfer = Transfer.generate([input_], [([pubkey_transfer], 1)], asset_id=tx.id, metadata={"sequence": 1}) tx_transfer = tx_transfer.sign([privkey]) - ctx['tx_transfer'] = pretty_json(tx_transfer.to_dict()) - ctx['public_keys_transfer'] = tx_transfer.outputs[0].public_keys[0] - ctx['tx_transfer_id'] = tx_transfer.id + ctx["tx_transfer"] = pretty_json(tx_transfer.to_dict()) + ctx["public_keys_transfer"] = tx_transfer.outputs[0].public_keys[0] + ctx["tx_transfer_id"] = tx_transfer.id # privkey_transfer_last = 'sG3jWDtdTXUidBJK53ucSTrosktG616U3tQHBk81eQe' - pubkey_transfer_last = '3Af3fhhjU6d9WecEM9Uw5hfom9kNEwE7YuDWdqAUssqm' + pubkey_transfer_last = "3Af3fhhjU6d9WecEM9Uw5hfom9kNEwE7YuDWdqAUssqm" cid = 0 - input_ = Input(fulfillment=tx_transfer.outputs[cid].fulfillment, - fulfills=TransactionLink(txid=tx_transfer.id, output=cid), - owners_before=tx_transfer.outputs[cid].public_keys) - tx_transfer_last = Transfer.generate([input_], [([pubkey_transfer_last], 1)], - asset_id=tx.id, metadata={'sequence': 2}) + input_ = Input( + fulfillment=tx_transfer.outputs[cid].fulfillment, + fulfills=TransactionLink(txid=tx_transfer.id, output=cid), + owners_before=tx_transfer.outputs[cid].public_keys, + ) + tx_transfer_last = Transfer.generate( + [input_], [([pubkey_transfer_last], 1)], asset_id=tx.id, metadata={"sequence": 2} + ) tx_transfer_last = tx_transfer_last.sign([privkey_transfer]) - ctx['tx_transfer_last'] = pretty_json(tx_transfer_last.to_dict()) - ctx['tx_transfer_last_id'] = tx_transfer_last.id - ctx['public_keys_transfer_last'] = tx_transfer_last.outputs[0].public_keys[0] + ctx["tx_transfer_last"] = pretty_json(tx_transfer_last.to_dict()) + ctx["tx_transfer_last_id"] = tx_transfer_last.id + ctx["public_keys_transfer_last"] = tx_transfer_last.outputs[0].public_keys[0] # block node_private = "5G2kE1zJAgTajkVSbPAQWo4c2izvtwqaNHYsaNpbbvxX" node_public = "DngBurxfeNVKZWCEcDnLj1eMPAS7focUZTE5FndFGuHT" signature = "53wxrEQDYk1dXzmvNSytbCfmNVnPqPkDQaTnAe8Jf43s6ssejPxezkCvUnGTnduNUmaLjhaan1iRLi3peu6s5DzA" - app_hash = 'f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056' + app_hash = "f6e0c49c6d94d6924351f25bb334cf2a99af4206339bf784e741d1a5ab599056" block = lib.Block(height=1, transactions=[tx.to_dict()], app_hash=app_hash) block_dict = block._asdict() - block_dict.pop('app_hash') - ctx['block'] = pretty_json(block_dict) - ctx['blockid'] = block.height + block_dict.pop("app_hash") + ctx["block"] = pretty_json(block_dict) + ctx["blockid"] = block.height # block status - block_list = [ - block.height - ] - ctx['block_list'] = pretty_json(block_list) + block_list = [block.height] + ctx["block_list"] = pretty_json(block_list) - - base_path = os.path.join(os.path.dirname(__file__), - 'source/connecting/http-samples') + base_path = os.path.join(os.path.dirname(__file__), "source/connecting/http-samples") if not os.path.exists(base_path): os.makedirs(base_path) for name, tpl in TPLS.items(): - path = os.path.join(base_path, name + '.http') + path = os.path.join(base_path, name + ".http") code = tpl % ctx - with open(path, 'w') as handle: + with open(path, "w") as handle: handle.write(code) def setup(*_): - """ Fool sphinx into think it's an extension muahaha """ + """Fool sphinx into think it's an extension muahaha""" main() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/docs/root/source/contributing/cross-project-policies/python-style-guide.md b/docs/root/source/contributing/cross-project-policies/python-style-guide.md index 65ffaf3..dff51f6 100644 --- a/docs/root/source/contributing/cross-project-policies/python-style-guide.md +++ b/docs/root/source/contributing/cross-project-policies/python-style-guide.md @@ -82,11 +82,11 @@ x = 'name: {}; score: {}'.format(name, n) we use the `format()` version. The [official Python documentation says](https://docs.python.org/2/library/stdtypes.html#str.format), "This method of string formatting is the new standard in Python 3, and should be preferred to the % formatting described in String Formatting Operations in new code." -## Running the Flake8 Style Checker +## Running the Black Style Checker -We use [Flake8](http://flake8.pycqa.org/en/latest/index.html) to check our Python code style. Once you have it installed, you can run it using: +We use [Black](https://black.readthedocs.io/en/stable/) to check our Python code style. Once you have it installed, you can run it using: ```text -flake8 --max-line-length 119 planetmint/ +black --check -l 119 . ``` diff --git a/integration/python/src/helper/hosts.py b/integration/python/src/helper/hosts.py index b14f875..a76e238 100644 --- a/integration/python/src/helper/hosts.py +++ b/integration/python/src/helper/hosts.py @@ -32,5 +32,4 @@ class Hosts: def assert_transaction(self, tx_id) -> None: txs = self.get_transactions(tx_id) for tx in txs: - assert txs[0] == tx, \ - 'Cannot find transaction {}'.format(tx_id) + assert txs[0] == tx, "Cannot find transaction {}".format(tx_id) diff --git a/integration/python/src/test_basic.py b/integration/python/src/test_basic.py index 691dbc3..e5b4e15 100644 --- a/integration/python/src/test_basic.py +++ b/integration/python/src/test_basic.py @@ -14,7 +14,7 @@ import time def test_basic(): # Setup up connection to Planetmint integration test nodes - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm_alpha = hosts.get_connection() # genarate a keypair @@ -22,62 +22,64 @@ def test_basic(): # create a digital asset for Alice game_boy_token = { - 'data': { - 'hash': '0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', - 'storageID': '0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', + "data": { + "hash": "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + "storageID": "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", }, } # prepare the transaction with the digital asset and issue 10 tokens to bob prepared_creation_tx = pm_alpha.transactions.prepare( - operation='CREATE', + operation="CREATE", metadata={ - 'hash': '0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', - 'storageID': '0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', }, + "hash": "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + "storageID": "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + }, signers=alice.public_key, recipients=[([alice.public_key], 10)], - asset=game_boy_token) + asset=game_boy_token, + ) # fulfill and send the transaction - fulfilled_creation_tx = pm_alpha.transactions.fulfill( - prepared_creation_tx, - private_keys=alice.private_key) + fulfilled_creation_tx = pm_alpha.transactions.fulfill(prepared_creation_tx, private_keys=alice.private_key) pm_alpha.transactions.send_commit(fulfilled_creation_tx) time.sleep(1) - creation_tx_id = fulfilled_creation_tx['id'] + creation_tx_id = fulfilled_creation_tx["id"] # Assert that transaction is stored on all planetmint nodes hosts.assert_transaction(creation_tx_id) # Transfer # create the output and inout for the transaction - transfer_asset = {'id': creation_tx_id} + transfer_asset = {"id": creation_tx_id} output_index = 0 - output = fulfilled_creation_tx['outputs'][output_index] - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': transfer_asset['id']}, - 'owners_before': output['public_keys']} + output = fulfilled_creation_tx["outputs"][output_index] + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": transfer_asset["id"]}, + "owners_before": output["public_keys"], + } # prepare the transaction and use 3 tokens prepared_transfer_tx = pm_alpha.transactions.prepare( - operation='TRANSFER', + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, - metadata={'hash': '0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', - 'storageID': '0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF', }, - recipients=[([alice.public_key], 10)]) + metadata={ + "hash": "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + "storageID": "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + }, + recipients=[([alice.public_key], 10)], + ) # fulfill and send the transaction - fulfilled_transfer_tx = pm_alpha.transactions.fulfill( - prepared_transfer_tx, - private_keys=alice.private_key) + fulfilled_transfer_tx = pm_alpha.transactions.fulfill(prepared_transfer_tx, private_keys=alice.private_key) sent_transfer_tx = pm_alpha.transactions.send_commit(fulfilled_transfer_tx) time.sleep(1) - transfer_tx_id = sent_transfer_tx['id'] + transfer_tx_id = sent_transfer_tx["id"] # Assert that transaction is stored on both planetmint nodes hosts.assert_transaction(transfer_tx_id) diff --git a/integration/python/src/test_divisible_asset.py b/integration/python/src/test_divisible_asset.py index ca3427e..dfeb8e8 100644 --- a/integration/python/src/test_divisible_asset.py +++ b/integration/python/src/test_divisible_asset.py @@ -33,7 +33,7 @@ def test_divisible_assets(): # ## Set up a connection to Planetmint # Check [test_basic.py](./test_basic.html) to get some more details # about the endpoint. - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm = hosts.get_connection() # Oh look, it is Alice again and she brought her friend Bob along. @@ -48,13 +48,9 @@ def test_divisible_assets(): # the bike for one hour. bike_token = { - 'data': { - 'token_for': { - 'bike': { - 'serial_number': 420420 - } - }, - 'description': 'Time share token. Each token equals one hour of riding.', + "data": { + "token_for": {"bike": {"serial_number": 420420}}, + "description": "Time share token. Each token equals one hour of riding.", }, } @@ -62,28 +58,22 @@ def test_divisible_assets(): # Here, Alice defines in a tuple that she wants to assign # these 10 tokens to Bob. prepared_token_tx = pm.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - recipients=[([bob.public_key], 10)], - asset=bike_token) + operation="CREATE", signers=alice.public_key, recipients=[([bob.public_key], 10)], asset=bike_token + ) # She fulfills and sends the transaction. - fulfilled_token_tx = pm.transactions.fulfill( - prepared_token_tx, - private_keys=alice.private_key) + fulfilled_token_tx = pm.transactions.fulfill(prepared_token_tx, private_keys=alice.private_key) pm.transactions.send_commit(fulfilled_token_tx) # We store the `id` of the transaction to use it later on. - bike_token_id = fulfilled_token_tx['id'] + bike_token_id = fulfilled_token_tx["id"] # Let's check if the transaction was successful. - assert pm.transactions.retrieve(bike_token_id), \ - 'Cannot find transaction {}'.format(bike_token_id) + assert pm.transactions.retrieve(bike_token_id), "Cannot find transaction {}".format(bike_token_id) # Bob owns 10 tokens now. - assert pm.transactions.retrieve(bike_token_id)['outputs'][0][ - 'amount'] == '10' + assert pm.transactions.retrieve(bike_token_id)["outputs"][0]["amount"] == "10" # ## Bob wants to use the bike # Now that Bob got the tokens and the sun is shining, he wants to get out @@ -91,51 +81,47 @@ def test_divisible_assets(): # To use the bike he has to send the tokens back to Alice. # To learn about the details of transferring a transaction check out # [test_basic.py](./test_basic.html) - transfer_asset = {'id': bike_token_id} + transfer_asset = {"id": bike_token_id} output_index = 0 - output = fulfilled_token_tx['outputs'][output_index] - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': fulfilled_token_tx[ - 'id']}, - 'owners_before': output['public_keys']} + output = fulfilled_token_tx["outputs"][output_index] + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": fulfilled_token_tx["id"]}, + "owners_before": output["public_keys"], + } # To use the tokens Bob has to reassign 7 tokens to himself and the # amount he wants to use to Alice. prepared_transfer_tx = pm.transactions.prepare( - operation='TRANSFER', + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, - recipients=[([alice.public_key], 3), ([bob.public_key], 7)]) + recipients=[([alice.public_key], 3), ([bob.public_key], 7)], + ) # He signs and sends the transaction. - fulfilled_transfer_tx = pm.transactions.fulfill( - prepared_transfer_tx, - private_keys=bob.private_key) + fulfilled_transfer_tx = pm.transactions.fulfill(prepared_transfer_tx, private_keys=bob.private_key) sent_transfer_tx = pm.transactions.send_commit(fulfilled_transfer_tx) # First, Bob checks if the transaction was successful. - assert pm.transactions.retrieve( - fulfilled_transfer_tx['id']) == sent_transfer_tx + assert pm.transactions.retrieve(fulfilled_transfer_tx["id"]) == sent_transfer_tx - hosts.assert_transaction(fulfilled_transfer_tx['id']) + hosts.assert_transaction(fulfilled_transfer_tx["id"]) # There are two outputs in the transaction now. # The first output shows that Alice got back 3 tokens... - assert pm.transactions.retrieve( - fulfilled_transfer_tx['id'])['outputs'][0]['amount'] == '3' + assert pm.transactions.retrieve(fulfilled_transfer_tx["id"])["outputs"][0]["amount"] == "3" # ... while Bob still has 7 left. - assert pm.transactions.retrieve( - fulfilled_transfer_tx['id'])['outputs'][1]['amount'] == '7' + assert pm.transactions.retrieve(fulfilled_transfer_tx["id"])["outputs"][1]["amount"] == "7" # ## Bob wants to ride the bike again # It's been a week and Bob wants to right the bike again. # Now he wants to ride for 8 hours, that's a lot Bob! # He prepares the transaction again. - transfer_asset = {'id': bike_token_id} + transfer_asset = {"id": bike_token_id} # This time we need an `output_index` of 1, since we have two outputs # in the `fulfilled_transfer_tx` we created before. The first output with # index 0 is for Alice and the second output is for Bob. @@ -143,24 +129,21 @@ def test_divisible_assets(): # correct output with the correct amount of tokens. output_index = 1 - output = fulfilled_transfer_tx['outputs'][output_index] + output = fulfilled_transfer_tx["outputs"][output_index] - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': fulfilled_transfer_tx['id']}, - 'owners_before': output['public_keys']} + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": fulfilled_transfer_tx["id"]}, + "owners_before": output["public_keys"], + } # This time Bob only provides Alice in the `recipients` because he wants # to spend all his tokens prepared_transfer_tx = pm.transactions.prepare( - operation='TRANSFER', - asset=transfer_asset, - inputs=transfer_input, - recipients=[([alice.public_key], 8)]) + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, recipients=[([alice.public_key], 8)] + ) - fulfilled_transfer_tx = pm.transactions.fulfill( - prepared_transfer_tx, - private_keys=bob.private_key) + fulfilled_transfer_tx = pm.transactions.fulfill(prepared_transfer_tx, private_keys=bob.private_key) # Oh Bob, what have you done?! You tried to spend more tokens than you had. # Remember Bob, last time you spent 3 tokens already, @@ -171,10 +154,12 @@ def test_divisible_assets(): # Now Bob gets an error saying that the amount he wanted to spent is # higher than the amount of tokens he has left. assert error.value.args[0] == 400 - message = 'Invalid transaction (AmountError): The amount used in the ' \ - 'inputs `7` needs to be same as the amount used in the ' \ - 'outputs `8`' - assert error.value.args[2]['message'] == message + message = ( + "Invalid transaction (AmountError): The amount used in the " + "inputs `7` needs to be same as the amount used in the " + "outputs `8`" + ) + assert error.value.args[2]["message"] == message # We have to stop this test now, I am sorry, but Bob is pretty upset # about his mistake. See you next time :) diff --git a/integration/python/src/test_double_spend.py b/integration/python/src/test_double_spend.py index 1a17738..49586f1 100644 --- a/integration/python/src/test_double_spend.py +++ b/integration/python/src/test_double_spend.py @@ -16,33 +16,31 @@ from .helper.hosts import Hosts def test_double_create(): - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm = hosts.get_connection() alice = generate_keypair() results = queue.Queue() tx = pm.transactions.fulfill( - pm.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - asset={'data': {'uuid': str(uuid4())}}), - private_keys=alice.private_key) + pm.transactions.prepare(operation="CREATE", signers=alice.public_key, asset={"data": {"uuid": str(uuid4())}}), + private_keys=alice.private_key, + ) def send_and_queue(tx): try: pm.transactions.send_commit(tx) - results.put('OK') + results.put("OK") except planetmint_driver.exceptions.TransportError: - results.put('FAIL') + results.put("FAIL") - t1 = Thread(target=send_and_queue, args=(tx, )) - t2 = Thread(target=send_and_queue, args=(tx, )) + t1 = Thread(target=send_and_queue, args=(tx,)) + t2 = Thread(target=send_and_queue, args=(tx,)) t1.start() t2.start() results = [results.get(timeout=2), results.get(timeout=2)] - assert results.count('OK') == 1 - assert results.count('FAIL') == 1 + assert results.count("OK") == 1 + assert results.count("FAIL") == 1 diff --git a/integration/python/src/test_multiple_owners.py b/integration/python/src/test_multiple_owners.py index 9d4c8c1..12422b3 100644 --- a/integration/python/src/test_multiple_owners.py +++ b/integration/python/src/test_multiple_owners.py @@ -28,7 +28,7 @@ from .helper.hosts import Hosts def test_multiple_owners(): # Setup up connection to Planetmint integration test nodes - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm_alpha = hosts.get_connection() # Generate Keypairs for Alice and Bob! @@ -39,32 +39,22 @@ def test_multiple_owners(): # high rents anymore. Bob suggests to get a dish washer for the # kitchen. Alice agrees and here they go, creating the asset for their # dish washer. - dw_asset = { - 'data': { - 'dish washer': { - 'serial_number': 1337 - } - } - } + dw_asset = {"data": {"dish washer": {"serial_number": 1337}}} # They prepare a `CREATE` transaction. To have multiple owners, both # Bob and Alice need to be the recipients. prepared_dw_tx = pm_alpha.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - recipients=(alice.public_key, bob.public_key), - asset=dw_asset) + operation="CREATE", signers=alice.public_key, recipients=(alice.public_key, bob.public_key), asset=dw_asset + ) # Now they both sign the transaction by providing their private keys. # And send it afterwards. - fulfilled_dw_tx = pm_alpha.transactions.fulfill( - prepared_dw_tx, - private_keys=[alice.private_key, bob.private_key]) + fulfilled_dw_tx = pm_alpha.transactions.fulfill(prepared_dw_tx, private_keys=[alice.private_key, bob.private_key]) pm_alpha.transactions.send_commit(fulfilled_dw_tx) # We store the `id` of the transaction to use it later on. - dw_id = fulfilled_dw_tx['id'] + dw_id = fulfilled_dw_tx["id"] time.sleep(1) @@ -72,12 +62,10 @@ def test_multiple_owners(): hosts.assert_transaction(dw_id) # Let's check if the transaction was successful. - assert pm_alpha.transactions.retrieve(dw_id), \ - 'Cannot find transaction {}'.format(dw_id) + assert pm_alpha.transactions.retrieve(dw_id), "Cannot find transaction {}".format(dw_id) # The transaction should have two public keys in the outputs. - assert len( - pm_alpha.transactions.retrieve(dw_id)['outputs'][0]['public_keys']) == 2 + assert len(pm_alpha.transactions.retrieve(dw_id)["outputs"][0]["public_keys"]) == 2 # ## Alice and Bob transfer a transaction to Carol. # Alice and Bob save a lot of money living together. They often go out @@ -89,43 +77,39 @@ def test_multiple_owners(): # Alice and Bob prepare the transaction to transfer the dish washer to # Carol. - transfer_asset = {'id': dw_id} + transfer_asset = {"id": dw_id} output_index = 0 - output = fulfilled_dw_tx['outputs'][output_index] - transfer_input = {'fulfillment': output['condition']['details'], - 'fulfills': {'output_index': output_index, - 'transaction_id': fulfilled_dw_tx[ - 'id']}, - 'owners_before': output['public_keys']} + output = fulfilled_dw_tx["outputs"][output_index] + transfer_input = { + "fulfillment": output["condition"]["details"], + "fulfills": {"output_index": output_index, "transaction_id": fulfilled_dw_tx["id"]}, + "owners_before": output["public_keys"], + } # Now they create the transaction... prepared_transfer_tx = pm_alpha.transactions.prepare( - operation='TRANSFER', - asset=transfer_asset, - inputs=transfer_input, - recipients=carol.public_key) + operation="TRANSFER", asset=transfer_asset, inputs=transfer_input, recipients=carol.public_key + ) # ... and sign it with their private keys, then send it. fulfilled_transfer_tx = pm_alpha.transactions.fulfill( - prepared_transfer_tx, - private_keys=[alice.private_key, bob.private_key]) + prepared_transfer_tx, private_keys=[alice.private_key, bob.private_key] + ) sent_transfer_tx = pm_alpha.transactions.send_commit(fulfilled_transfer_tx) time.sleep(1) # Now compare if both nodes returned the same transaction - hosts.assert_transaction(fulfilled_transfer_tx['id']) + hosts.assert_transaction(fulfilled_transfer_tx["id"]) # They check if the transaction was successful. - assert pm_alpha.transactions.retrieve( - fulfilled_transfer_tx['id']) == sent_transfer_tx + assert pm_alpha.transactions.retrieve(fulfilled_transfer_tx["id"]) == sent_transfer_tx # The owners before should include both Alice and Bob. - assert len( - pm_alpha.transactions.retrieve(fulfilled_transfer_tx['id'])['inputs'][0][ - 'owners_before']) == 2 + assert len(pm_alpha.transactions.retrieve(fulfilled_transfer_tx["id"])["inputs"][0]["owners_before"]) == 2 # While the new owner is Carol. - assert pm_alpha.transactions.retrieve(fulfilled_transfer_tx['id'])[ - 'outputs'][0]['public_keys'][0] == carol.public_key + assert ( + pm_alpha.transactions.retrieve(fulfilled_transfer_tx["id"])["outputs"][0]["public_keys"][0] == carol.public_key + ) diff --git a/integration/python/src/test_naughty_strings.py b/integration/python/src/test_naughty_strings.py index 4a090c0..bf17f4e 100644 --- a/integration/python/src/test_naughty_strings.py +++ b/integration/python/src/test_naughty_strings.py @@ -27,6 +27,40 @@ from planetmint_driver.exceptions import BadRequest from .helper.hosts import Hosts naughty_strings = blns.all() +skipped_naughty_strings = [ + "1.00", + "$1.00", + "-1.00", + "-$1.00", + "0.00", + "0..0", + ".", + "0.0.0", + "-.", + ",./;'[]\\-=", + "ثم نفس سقطت وبالتحديد،, جزيرتي باستخدام أن دنو. إذ هنا؟ الستار وتنصيب كان. أهّل ايطاليا، بريطانيا-فرنسا قد أخذ. سليمان، إتفاقية بين ما, يذكر الحدود أي بعد, معاملة بولندا، الإطلاق عل إيو.", + "test\x00", + "Ṱ̺̺̕o͞ ̷i̲̬͇̪͙n̝̗͕v̟̜̘̦͟o̶̙̰̠kè͚̮̺̪̹̱̤ ̖t̝͕̳̣̻̪͞h̼͓̲̦̳̘̲e͇̣̰̦̬͎ ̢̼̻̱̘h͚͎͙̜̣̲ͅi̦̲̣̰̤v̻͍e̺̭̳̪̰-m̢iͅn̖̺̞̲̯̰d̵̼̟͙̩̼̘̳ ̞̥̱̳̭r̛̗̘e͙p͠r̼̞̻̭̗e̺̠̣͟s̘͇̳͍̝͉e͉̥̯̞̲͚̬͜ǹ̬͎͎̟̖͇̤t͍̬̤͓̼̭͘ͅi̪̱n͠g̴͉ ͏͉ͅc̬̟h͡a̫̻̯͘o̫̟̖͍̙̝͉s̗̦̲.̨̹͈̣", + "̡͓̞ͅI̗̘̦͝n͇͇͙v̮̫ok̲̫̙͈i̖͙̭̹̠̞n̡̻̮̣̺g̲͈͙̭͙̬͎ ̰t͔̦h̞̲e̢̤ ͍̬̲͖f̴̘͕̣è͖ẹ̥̩l͖͔͚i͓͚̦͠n͖͍̗͓̳̮g͍ ̨o͚̪͡f̘̣̬ ̖̘͖̟͙̮c҉͔̫͖͓͇͖ͅh̵̤̣͚͔á̗̼͕ͅo̼̣̥s̱͈̺̖̦̻͢.̛̖̞̠̫̰", + "̗̺͖̹̯͓Ṯ̤͍̥͇͈h̲́e͏͓̼̗̙̼̣͔ ͇̜̱̠͓͍ͅN͕͠e̗̱z̘̝̜̺͙p̤̺̹͍̯͚e̠̻̠͜r̨̤͍̺̖͔̖̖d̠̟̭̬̝͟i̦͖̩͓͔̤a̠̗̬͉̙n͚͜ ̻̞̰͚ͅh̵͉i̳̞v̢͇ḙ͎͟-҉̭̩̼͔m̤̭̫i͕͇̝̦n̗͙ḍ̟ ̯̲͕͞ǫ̟̯̰̲͙̻̝f ̪̰̰̗̖̭̘͘c̦͍̲̞͍̩̙ḥ͚a̮͎̟̙͜ơ̩̹͎s̤.̝̝ ҉Z̡̖̜͖̰̣͉̜a͖̰͙̬͡l̲̫̳͍̩g̡̟̼̱͚̞̬ͅo̗͜.̟", + "̦H̬̤̗̤͝e͜ ̜̥̝̻͍̟́w̕h̖̯͓o̝͙̖͎̱̮ ҉̺̙̞̟͈W̷̼̭a̺̪͍į͈͕̭͙̯̜t̶̼̮s̘͙͖̕ ̠̫̠B̻͍͙͉̳ͅe̵h̵̬͇̫͙i̹͓̳̳̮͎̫̕n͟d̴̪̜̖ ̰͉̩͇͙̲͞ͅT͖̼͓̪͢h͏͓̮̻e̬̝̟ͅ ̤̹̝W͙̞̝͔͇͝ͅa͏͓͔̹̼̣l̴͔̰̤̟͔ḽ̫.͕", + '">', + "'>", + ">", + "", + "< / script >< script >alert(document.title)< / script >", + " onfocus=alert(document.title) autofocus ", + '" onfocus=alert(document.title) autofocus ', + "' onfocus=alert(document.title) autofocus ", + "<script>alert(document.title)</script>", + "/dev/null; touch /tmp/blns.fail ; echo", + "../../../../../../../../../../../etc/passwd%00", + "../../../../../../../../../../../etc/hosts", + "() { 0; }; touch /tmp/blns.shellshock1.fail;", + "() { _; } >_[$($())] { touch /tmp/blns.shellshock2.fail; }", +] + +naughty_strings = [naughty for naughty in naughty_strings if naughty not in skipped_naughty_strings] # This is our base test case, but we'll reuse it to send naughty strings as both keys and values. @@ -34,7 +68,7 @@ def send_naughty_tx(asset, metadata): # ## Set up a connection to Planetmint # Check [test_basic.py](./test_basic.html) to get some more details # about the endpoint. - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm = hosts.get_connection() # Here's Alice. @@ -42,15 +76,11 @@ def send_naughty_tx(asset, metadata): # Alice is in a naughty mood today, so she creates a tx with some naughty strings prepared_transaction = pm.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - asset=asset, - metadata=metadata) + operation="CREATE", signers=alice.public_key, asset=asset, metadata=metadata + ) # She fulfills the transaction - fulfilled_transaction = pm.transactions.fulfill( - prepared_transaction, - private_keys=alice.private_key) + fulfilled_transaction = pm.transactions.fulfill(prepared_transaction, private_keys=alice.private_key) # The fulfilled tx gets sent to the pm network try: @@ -59,23 +89,24 @@ def send_naughty_tx(asset, metadata): sent_transaction = e # If her key contained a '.', began with a '$', or contained a NUL character - regex = r'.*\..*|\$.*|.*\x00.*' + regex = r".*\..*|\$.*|.*\x00.*" key = next(iter(metadata)) if re.match(regex, key): # Then she expects a nicely formatted error code status_code = sent_transaction.status_code error = sent_transaction.error regex = ( - r'\{\s*\n*' + r"\{\s*\n*" r'\s*"message":\s*"Invalid transaction \(ValidationError\):\s*' - r'Invalid key name.*The key name cannot contain characters.*\n*' + r"Invalid key name.*The key name cannot contain characters.*\n*" r'\s*"status":\s*400\n*' - r'\s*\}\n*') + r"\s*\}\n*" + ) assert status_code == 400 assert re.fullmatch(regex, error), sent_transaction # Otherwise, she expects to see her transaction in the database - elif 'id' in sent_transaction.keys(): - tx_id = sent_transaction['id'] + elif "id" in sent_transaction.keys(): + tx_id = sent_transaction["id"] assert pm.transactions.retrieve(tx_id) # If neither condition was true, then something weird happened... else: @@ -85,8 +116,8 @@ def send_naughty_tx(asset, metadata): @pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings) def test_naughty_keys(naughty_string): - asset = {'data': {naughty_string: 'nice_value'}} - metadata = {naughty_string: 'nice_value'} + asset = {"data": {naughty_string: "nice_value"}} + metadata = {naughty_string: "nice_value"} send_naughty_tx(asset, metadata) @@ -94,7 +125,7 @@ def test_naughty_keys(naughty_string): @pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings) def test_naughty_values(naughty_string): - asset = {'data': {'nice_key': naughty_string}} - metadata = {'nice_key': naughty_string} + asset = {"data": {"nice_key": naughty_string}} + metadata = {"nice_key": naughty_string} send_naughty_tx(asset, metadata) diff --git a/integration/python/src/test_stream.py b/integration/python/src/test_stream.py index c93d5c6..076ba08 100644 --- a/integration/python/src/test_stream.py +++ b/integration/python/src/test_stream.py @@ -35,11 +35,11 @@ def test_stream(): # ## Set up the test # We use the env variable `BICHAINDB_ENDPOINT` to know where to connect. # Check [test_basic.py](./test_basic.html) for more information. - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm = hosts.get_connection() # *That's pretty bad, but let's do like this for now.* - WS_ENDPOINT = 'ws://{}:9985/api/v1/streams/valid_transactions'.format(hosts.hostnames[0]) + WS_ENDPOINT = "ws://{}:9985/api/v1/streams/valid_transactions".format(hosts.hostnames[0]) # Hello to Alice again, she is pretty active in those tests, good job # Alice! @@ -89,11 +89,11 @@ def test_stream(): # random `uuid`. for _ in range(10): tx = pm.transactions.fulfill( - pm.transactions.prepare( - operation='CREATE', - signers=alice.public_key, - asset={'data': {'uuid': str(uuid4())}}), - private_keys=alice.private_key) + pm.transactions.prepare( + operation="CREATE", signers=alice.public_key, asset={"data": {"uuid": str(uuid4())}} + ), + private_keys=alice.private_key, + ) # We don't want to wait for each transaction to be in a block. By using # `async` mode, we make sure that the driver returns as soon as the # transaction is pushed to the Planetmint API. Remember: we expect all @@ -103,7 +103,7 @@ def test_stream(): pm.transactions.send_async(tx) # The `id` of every sent transaction is then stored in a list. - sent.append(tx['id']) + sent.append(tx["id"]) # ## Check the valid transactions coming from Planetmint # Now we are ready to check if Planetmint did its job. A simple way to @@ -117,9 +117,9 @@ def test_stream(): # the timeout, then game over ¯\\\_(ツ)\_/¯ try: event = received.get(timeout=5) - txid = json.loads(event)['transaction_id'] + txid = json.loads(event)["transaction_id"] except queue.Empty: - assert False, 'Did not receive all expected transactions' + assert False, "Did not receive all expected transactions" # Last thing is to try to remove the `txid` from the set of sent # transactions. If this test is running in parallel with others, we diff --git a/integration/python/src/test_threshold.py b/integration/python/src/test_threshold.py index f118651..5465915 100644 --- a/integration/python/src/test_threshold.py +++ b/integration/python/src/test_threshold.py @@ -18,27 +18,22 @@ from .helper.hosts import Hosts def prepare_condition_details(condition: ThresholdSha256): - condition_details = { - 'subconditions': [], - 'threshold': condition.threshold, - 'type': condition.TYPE_NAME - } + condition_details = {"subconditions": [], "threshold": condition.threshold, "type": condition.TYPE_NAME} for s in condition.subconditions: - if (s['type'] == 'fulfillment' and s['body'].TYPE_NAME == 'ed25519-sha-256'): - condition_details['subconditions'].append({ - 'type': s['body'].TYPE_NAME, - 'public_key': base58.b58encode(s['body'].public_key).decode() - }) + if s["type"] == "fulfillment" and s["body"].TYPE_NAME == "ed25519-sha-256": + condition_details["subconditions"].append( + {"type": s["body"].TYPE_NAME, "public_key": base58.b58encode(s["body"].public_key).decode()} + ) else: - condition_details['subconditions'].append(prepare_condition_details(s['body'])) + condition_details["subconditions"].append(prepare_condition_details(s["body"])) return condition_details def test_threshold(): # Setup connection to test nodes - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm = hosts.get_connection() # Generate Keypars for Alice, Bob an Carol! @@ -49,13 +44,7 @@ def test_threshold(): # high rents anymore. Bob suggests to get a dish washer for the # kitchen. Alice agrees and here they go, creating the asset for their # dish washer. - dw_asset = { - 'data': { - 'dish washer': { - 'serial_number': 1337 - } - } - } + dw_asset = {"data": {"dish washer": {"serial_number": 1337}}} # Create subfulfillments alice_ed25519 = Ed25519Sha256(public_key=base58.b58decode(alice.public_key)) @@ -74,37 +63,37 @@ def test_threshold(): # Assemble output and input for the handcrafted tx output = { - 'amount': '1', - 'condition': { - 'details': condition_details, - 'uri': condition_uri, + "amount": "1", + "condition": { + "details": condition_details, + "uri": condition_uri, }, - 'public_keys': (alice.public_key, bob.public_key, carol.public_key), + "public_keys": (alice.public_key, bob.public_key, carol.public_key), } # The yet to be fulfilled input: input_ = { - 'fulfillment': None, - 'fulfills': None, - 'owners_before': (alice.public_key, bob.public_key), + "fulfillment": None, + "fulfills": None, + "owners_before": (alice.public_key, bob.public_key), } # Assemble the handcrafted transaction handcrafted_dw_tx = { - 'operation': 'CREATE', - 'asset': dw_asset, - 'metadata': None, - 'outputs': (output,), - 'inputs': (input_,), - 'version': '2.0', - 'id': None, + "operation": "CREATE", + "asset": dw_asset, + "metadata": None, + "outputs": (output,), + "inputs": (input_,), + "version": "2.0", + "id": None, } # Create sha3-256 of message to sign message = json.dumps( handcrafted_dw_tx, sort_keys=True, - separators=(',', ':'), + separators=(",", ":"), ensure_ascii=False, ) message = sha3.sha3_256(message.encode()) @@ -121,19 +110,19 @@ def test_threshold(): fulfillment_uri = fulfillment_threshold.serialize_uri() - handcrafted_dw_tx['inputs'][0]['fulfillment'] = fulfillment_uri + handcrafted_dw_tx["inputs"][0]["fulfillment"] = fulfillment_uri # Create tx_id for handcrafted_dw_tx and send tx commit json_str_tx = json.dumps( handcrafted_dw_tx, sort_keys=True, - separators=(',', ':'), + separators=(",", ":"), ensure_ascii=False, ) dw_creation_txid = sha3.sha3_256(json_str_tx.encode()).hexdigest() - handcrafted_dw_tx['id'] = dw_creation_txid + handcrafted_dw_tx["id"] = dw_creation_txid pm.transactions.send_commit(handcrafted_dw_tx) @@ -144,18 +133,12 @@ def test_threshold(): def test_weighted_threshold(): - hosts = Hosts('/shared/hostnames') + hosts = Hosts("/shared/hostnames") pm = hosts.get_connection() alice, bob, carol = generate_keypair(), generate_keypair(), generate_keypair() - asset = { - 'data': { - 'trashcan': { - 'animals': ['racoon_1', 'racoon_2'] - } - } - } + asset = {"data": {"trashcan": {"animals": ["racoon_1", "racoon_2"]}}} alice_ed25519 = Ed25519Sha256(public_key=base58.b58decode(alice.public_key)) bob_ed25519 = Ed25519Sha256(public_key=base58.b58decode(bob.public_key)) @@ -175,37 +158,37 @@ def test_weighted_threshold(): # Assemble output and input for the handcrafted tx output = { - 'amount': '1', - 'condition': { - 'details': condition_details, - 'uri': condition_uri, + "amount": "1", + "condition": { + "details": condition_details, + "uri": condition_uri, }, - 'public_keys': (alice.public_key, bob.public_key, carol.public_key), + "public_keys": (alice.public_key, bob.public_key, carol.public_key), } # The yet to be fulfilled input: input_ = { - 'fulfillment': None, - 'fulfills': None, - 'owners_before': (alice.public_key, bob.public_key), + "fulfillment": None, + "fulfills": None, + "owners_before": (alice.public_key, bob.public_key), } # Assemble the handcrafted transaction handcrafted_tx = { - 'operation': 'CREATE', - 'asset': asset, - 'metadata': None, - 'outputs': (output,), - 'inputs': (input_,), - 'version': '2.0', - 'id': None, + "operation": "CREATE", + "asset": asset, + "metadata": None, + "outputs": (output,), + "inputs": (input_,), + "version": "2.0", + "id": None, } # Create sha3-256 of message to sign message = json.dumps( handcrafted_tx, sort_keys=True, - separators=(',', ':'), + separators=(",", ":"), ensure_ascii=False, ) message = sha3.sha3_256(message.encode()) @@ -224,19 +207,19 @@ def test_weighted_threshold(): fulfillment_uri = fulfillment_threshold.serialize_uri() - handcrafted_tx['inputs'][0]['fulfillment'] = fulfillment_uri + handcrafted_tx["inputs"][0]["fulfillment"] = fulfillment_uri # Create tx_id for handcrafted_dw_tx and send tx commit json_str_tx = json.dumps( handcrafted_tx, sort_keys=True, - separators=(',', ':'), + separators=(",", ":"), ensure_ascii=False, ) creation_tx_id = sha3.sha3_256(json_str_tx.encode()).hexdigest() - handcrafted_tx['id'] = creation_tx_id + handcrafted_tx["id"] = creation_tx_id pm.transactions.send_commit(handcrafted_tx) @@ -254,50 +237,50 @@ def test_weighted_threshold(): # Assemble output and input for the handcrafted tx transfer_output = { - 'amount': '1', - 'condition': { - 'details': { - 'type': alice_transfer_ed25519.TYPE_NAME, - 'public_key': base58.b58encode(alice_transfer_ed25519.public_key).decode() + "amount": "1", + "condition": { + "details": { + "type": alice_transfer_ed25519.TYPE_NAME, + "public_key": base58.b58encode(alice_transfer_ed25519.public_key).decode(), }, - 'uri': transfer_condition_uri, + "uri": transfer_condition_uri, }, - 'public_keys': (alice.public_key,), + "public_keys": (alice.public_key,), } # The yet to be fulfilled input: transfer_input_ = { - 'fulfillment': None, - 'fulfills': { - 'transaction_id': creation_tx_id, - 'output_index': 0 - }, - 'owners_before': (alice.public_key, bob.public_key, carol.public_key), + "fulfillment": None, + "fulfills": {"transaction_id": creation_tx_id, "output_index": 0}, + "owners_before": (alice.public_key, bob.public_key, carol.public_key), } # Assemble the handcrafted transaction handcrafted_transfer_tx = { - 'operation': 'TRANSFER', - 'asset': {'id': creation_tx_id}, - 'metadata': None, - 'outputs': (transfer_output,), - 'inputs': (transfer_input_,), - 'version': '2.0', - 'id': None, + "operation": "TRANSFER", + "asset": {"id": creation_tx_id}, + "metadata": None, + "outputs": (transfer_output,), + "inputs": (transfer_input_,), + "version": "2.0", + "id": None, } # Create sha3-256 of message to sign message = json.dumps( handcrafted_transfer_tx, sort_keys=True, - separators=(',', ':'), + separators=(",", ":"), ensure_ascii=False, ) message = sha3.sha3_256(message.encode()) - message.update('{}{}'.format( - handcrafted_transfer_tx['inputs'][0]['fulfills']['transaction_id'], - handcrafted_transfer_tx['inputs'][0]['fulfills']['output_index']).encode()) + message.update( + "{}{}".format( + handcrafted_transfer_tx["inputs"][0]["fulfills"]["transaction_id"], + handcrafted_transfer_tx["inputs"][0]["fulfills"]["output_index"], + ).encode() + ) # Sign message with Alice's und Bob's private key bob_transfer_ed25519.sign(message.digest(), base58.b58decode(bob.private_key)) @@ -314,19 +297,19 @@ def test_weighted_threshold(): fulfillment_uri = fulfillment_threshold.serialize_uri() - handcrafted_transfer_tx['inputs'][0]['fulfillment'] = fulfillment_uri + handcrafted_transfer_tx["inputs"][0]["fulfillment"] = fulfillment_uri # Create tx_id for handcrafted_dw_tx and send tx commit json_str_tx = json.dumps( handcrafted_transfer_tx, sort_keys=True, - separators=(',', ':'), + separators=(",", ":"), ensure_ascii=False, ) transfer_tx_id = sha3.sha3_256(json_str_tx.encode()).hexdigest() - handcrafted_transfer_tx['id'] = transfer_tx_id + handcrafted_transfer_tx["id"] = transfer_tx_id pm.transactions.send_commit(handcrafted_transfer_tx) diff --git a/integration/python/src/test_zenroom.py b/integration/python/src/test_zenroom.py index cce592e..7d8f860 100644 --- a/integration/python/src/test_zenroom.py +++ b/integration/python/src/test_zenroom.py @@ -38,9 +38,7 @@ def test_zenroom_signing( ) ) - zenroomscpt = ZenroomSha256( - script=fulfill_script_zencode, data=zenroom_data, keys=zen_public_keys - ) + zenroomscpt = ZenroomSha256(script=fulfill_script_zencode, data=zenroom_data, keys=zen_public_keys) print(f"zenroom is: {zenroomscpt.script}") # CRYPTO-CONDITIONS: generate the condition uri diff --git a/integration/scripts/genesis.py b/integration/scripts/genesis.py index 3593f34..8f21446 100755 --- a/integration/scripts/genesis.py +++ b/integration/scripts/genesis.py @@ -15,19 +15,19 @@ def edit_genesis() -> None: for file_name in file_names: file = open(file_name) genesis = json.load(file) - validators.extend(genesis['validators']) + validators.extend(genesis["validators"]) file.close() genesis_file = open(file_names[0]) genesis_json = json.load(genesis_file) - genesis_json['validators'] = validators + genesis_json["validators"] = validators genesis_file.close() - with open('/shared/genesis.json', 'w') as f: + with open("/shared/genesis.json", "w") as f: json.dump(genesis_json, f, indent=True) return None -if __name__ == '__main__': +if __name__ == "__main__": edit_genesis() diff --git a/k8s/logging-and-monitoring/analyze.py b/k8s/logging-and-monitoring/analyze.py index a3ca68f..d50a877 100644 --- a/k8s/logging-and-monitoring/analyze.py +++ b/k8s/logging-and-monitoring/analyze.py @@ -31,25 +31,27 @@ import re from dateutil.parser import parse -lineformat = re.compile(r'(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) - - ' - r'\[(?P\d{2}\/[a-z]{3}\/\d{4}:\d{2}:\d{2}:\d{2} ' - r'(\+|\-)\d{4})\] ((\"(GET|POST) )(?P.+)(http\/1\.1")) ' - r'(?P\d{3}) ' - r'(?P\d+) ' - r'(["](?P(\-)|(.+))["]) ' - r'(["](?P.+)["])', - re.IGNORECASE) +lineformat = re.compile( + r"(?P\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}) - - " + r"\[(?P\d{2}\/[a-z]{3}\/\d{4}:\d{2}:\d{2}:\d{2} " + r'(\+|\-)\d{4})\] ((\"(GET|POST) )(?P.+)(http\/1\.1")) ' + r"(?P\d{3}) " + r"(?P\d+) " + r'(["](?P(\-)|(.+))["]) ' + r'(["](?P.+)["])', + re.IGNORECASE, +) filepath = sys.argv[1] logline_list = [] with open(filepath) as csvfile: - csvreader = csv.reader(csvfile, delimiter=',') + csvreader = csv.reader(csvfile, delimiter=",") for row in csvreader: - if row and (row[8] != 'LogEntry'): + if row and (row[8] != "LogEntry"): # because the first line is just the column headers, such as 'LogEntry' logline = row[8] - print(logline + '\n') + print(logline + "\n") logline_data = re.search(lineformat, logline) if logline_data: logline_dict = logline_data.groupdict() @@ -63,20 +65,19 @@ total_bytes_sent = 0 tstamp_list = [] for lldict in logline_list: - total_bytes_sent += int(lldict['bytessent']) - dt = lldict['dateandtime'] + total_bytes_sent += int(lldict["bytessent"]) + dt = lldict["dateandtime"] # https://tinyurl.com/lqjnhot dtime = parse(dt[:11] + " " + dt[12:]) tstamp_list.append(dtime.timestamp()) -print('Number of log lines seen: {}'.format(len(logline_list))) +print("Number of log lines seen: {}".format(len(logline_list))) # Time range trange_sec = max(tstamp_list) - min(tstamp_list) trange_days = trange_sec / 60.0 / 60.0 / 24.0 -print('Time range seen (days): {}'.format(trange_days)) +print("Time range seen (days): {}".format(trange_days)) -print('Total bytes sent: {}'.format(total_bytes_sent)) +print("Total bytes sent: {}".format(total_bytes_sent)) -print('Average bytes sent per day (out via GET): {}'. - format(total_bytes_sent / trange_days)) +print("Average bytes sent per day (out via GET): {}".format(total_bytes_sent / trange_days)) diff --git a/planetmint/__init__.py b/planetmint/__init__.py index 5d8d7e0..4b3e8bd 100644 --- a/planetmint/__init__.py +++ b/planetmint/__init__.py @@ -6,7 +6,7 @@ from planetmint.transactions.common.transaction import Transaction # noqa from planetmint import models # noqa from planetmint.upsert_validator import ValidatorElection # noqa -from planetmint.transactions.types.elections.vote import Vote # noqa +from planetmint.transactions.types.elections.vote import Vote # noqa from planetmint.transactions.types.elections.chain_migration_election import ChainMigrationElection from planetmint.lib import Planetmint from planetmint.core import App diff --git a/planetmint/backend/connection.py b/planetmint/backend/connection.py index e9da39b..57e5d35 100644 --- a/planetmint/backend/connection.py +++ b/planetmint/backend/connection.py @@ -14,15 +14,16 @@ from planetmint.backend.exceptions import ConnectionError from planetmint.transactions.common.exceptions import ConfigurationError BACKENDS = { - 'tarantool_db': 'planetmint.backend.tarantool.connection.TarantoolDBConnection', - 'localmongodb': 'planetmint.backend.localmongodb.connection.LocalMongoDBConnection' + "tarantool_db": "planetmint.backend.tarantool.connection.TarantoolDBConnection", + "localmongodb": "planetmint.backend.localmongodb.connection.LocalMongoDBConnection", } logger = logging.getLogger(__name__) -def connect(host: str = None, port: int = None, login: str = None, password: str = None, backend: str = None, - **kwargs): +def connect( + host: str = None, port: int = None, login: str = None, password: str = None, backend: str = None, **kwargs +): try: backend = backend if not backend and kwargs and kwargs.get("backend"): @@ -37,40 +38,57 @@ def connect(host: str = None, port: int = None, login: str = None, password: str raise ConfigurationError host = host or Config().get()["database"]["host"] if not kwargs.get("host") else kwargs["host"] - port = port or Config().get()['database']['port'] if not kwargs.get("port") else kwargs["port"] + port = port or Config().get()["database"]["port"] if not kwargs.get("port") else kwargs["port"] login = login or Config().get()["database"]["login"] if not kwargs.get("login") else kwargs["login"] password = password or Config().get()["database"]["password"] try: if backend == "tarantool_db": - modulepath, _, class_name = BACKENDS[backend].rpartition('.') + modulepath, _, class_name = BACKENDS[backend].rpartition(".") Class = getattr(import_module(modulepath), class_name) return Class(host=host, port=port, user=login, password=password, kwargs=kwargs) elif backend == "localmongodb": - modulepath, _, class_name = BACKENDS[backend].rpartition('.') + modulepath, _, class_name = BACKENDS[backend].rpartition(".") Class = getattr(import_module(modulepath), class_name) - dbname = _kwargs_parser(key="name", kwargs=kwargs) or Config().get()['database']['name'] - replicaset = _kwargs_parser(key="replicaset", kwargs=kwargs) or Config().get()['database']['replicaset'] - ssl = _kwargs_parser(key="ssl", kwargs=kwargs) or Config().get()['database']['ssl'] - login = login or Config().get()['database']['login'] if _kwargs_parser(key="login", - kwargs=kwargs) is None else _kwargs_parser( # noqa: E501 - key="login", kwargs=kwargs) - password = password or Config().get()['database']['password'] if _kwargs_parser(key="password", - kwargs=kwargs) is None else _kwargs_parser( # noqa: E501 - key="password", kwargs=kwargs) - ca_cert = _kwargs_parser(key="ca_cert", kwargs=kwargs) or Config().get()['database']['ca_cert'] - certfile = _kwargs_parser(key="certfile", kwargs=kwargs) or Config().get()['database']['certfile'] - keyfile = _kwargs_parser(key="keyfile", kwargs=kwargs) or Config().get()['database']['keyfile'] - keyfile_passphrase = _kwargs_parser(key="keyfile_passphrase", kwargs=kwargs) or Config().get()['database'][ - 'keyfile_passphrase'] - crlfile = _kwargs_parser(key="crlfile", kwargs=kwargs) or Config().get()['database']['crlfile'] + dbname = _kwargs_parser(key="name", kwargs=kwargs) or Config().get()["database"]["name"] + replicaset = _kwargs_parser(key="replicaset", kwargs=kwargs) or Config().get()["database"]["replicaset"] + ssl = _kwargs_parser(key="ssl", kwargs=kwargs) or Config().get()["database"]["ssl"] + login = ( + login or Config().get()["database"]["login"] + if _kwargs_parser(key="login", kwargs=kwargs) is None + else _kwargs_parser(key="login", kwargs=kwargs) # noqa: E501 + ) + password = ( + password or Config().get()["database"]["password"] + if _kwargs_parser(key="password", kwargs=kwargs) is None + else _kwargs_parser(key="password", kwargs=kwargs) # noqa: E501 + ) + ca_cert = _kwargs_parser(key="ca_cert", kwargs=kwargs) or Config().get()["database"]["ca_cert"] + certfile = _kwargs_parser(key="certfile", kwargs=kwargs) or Config().get()["database"]["certfile"] + keyfile = _kwargs_parser(key="keyfile", kwargs=kwargs) or Config().get()["database"]["keyfile"] + keyfile_passphrase = ( + _kwargs_parser(key="keyfile_passphrase", kwargs=kwargs) + or Config().get()["database"]["keyfile_passphrase"] + ) + crlfile = _kwargs_parser(key="crlfile", kwargs=kwargs) or Config().get()["database"]["crlfile"] max_tries = _kwargs_parser(key="max_tries", kwargs=kwargs) connection_timeout = _kwargs_parser(key="connection_timeout", kwargs=kwargs) - return Class(host=host, port=port, dbname=dbname, - max_tries=max_tries, connection_timeout=connection_timeout, - replicaset=replicaset, ssl=ssl, login=login, password=password, - ca_cert=ca_cert, certfile=certfile, keyfile=keyfile, - keyfile_passphrase=keyfile_passphrase, crlfile=crlfile) + return Class( + host=host, + port=port, + dbname=dbname, + max_tries=max_tries, + connection_timeout=connection_timeout, + replicaset=replicaset, + ssl=ssl, + login=login, + password=password, + ca_cert=ca_cert, + certfile=certfile, + keyfile=keyfile, + keyfile_passphrase=keyfile_passphrase, + crlfile=crlfile, + ) except tarantool.error.NetworkError as network_err: print(f"Host {host}:{port} can't be reached.\n{network_err}") raise network_err @@ -81,15 +99,14 @@ def _kwargs_parser(key, kwargs): return kwargs[key] return None + class Connection: """Connection class interface. All backend implementations should provide a connection class that inherits from and implements this class. """ - def __init__(self, host=None, port=None, dbname=None, - connection_timeout=None, max_tries=None, - **kwargs): + def __init__(self, host=None, port=None, dbname=None, connection_timeout=None, max_tries=None, **kwargs): """Create a new :class:`~.Connection` instance. Args: host (str): the host to connect to. @@ -104,14 +121,15 @@ class Connection: configuration's ``database`` settings """ - dbconf = Config().get()['database'] + dbconf = Config().get()["database"] - self.host = host or dbconf['host'] - self.port = port or dbconf['port'] - self.dbname = dbname or dbconf['name'] - self.connection_timeout = connection_timeout if connection_timeout is not None \ - else dbconf['connection_timeout'] - self.max_tries = max_tries if max_tries is not None else dbconf['max_tries'] + self.host = host or dbconf["host"] + self.port = port or dbconf["port"] + self.dbname = dbname or dbconf["name"] + self.connection_timeout = ( + connection_timeout if connection_timeout is not None else dbconf["connection_timeout"] + ) + self.max_tries = max_tries if max_tries is not None else dbconf["max_tries"] self.max_tries_counter = range(self.max_tries) if self.max_tries != 0 else repeat(0) self._conn = None @@ -149,11 +167,16 @@ class Connection: try: self._conn = self._connect() except ConnectionError as exc: - logger.warning('Attempt %s/%s. Connection to %s:%s failed after %sms.', - attempt, self.max_tries if self.max_tries != 0 else '∞', - self.host, self.port, self.connection_timeout) + logger.warning( + "Attempt %s/%s. Connection to %s:%s failed after %sms.", + attempt, + self.max_tries if self.max_tries != 0 else "∞", + self.host, + self.port, + self.connection_timeout, + ) if attempt == self.max_tries: - logger.critical('Cannot connect to the Database. Giving up.') + logger.critical("Cannot connect to the Database. Giving up.") raise ConnectionError() from exc else: break diff --git a/planetmint/backend/localmongodb/__init__.py b/planetmint/backend/localmongodb/__init__.py index 48719c7..97b45cd 100644 --- a/planetmint/backend/localmongodb/__init__.py +++ b/planetmint/backend/localmongodb/__init__.py @@ -22,7 +22,7 @@ generic backend interfaces to the implementations in this module. """ # Register the single dispatched modules on import. -from planetmint.backend.localmongodb import schema, query, convert # noqa +from planetmint.backend.localmongodb import schema, query, convert # noqa # MongoDBConnection should always be accessed via # ``planetmint.backend.connect()``. diff --git a/planetmint/backend/localmongodb/connection.py b/planetmint/backend/localmongodb/connection.py index 1216010..1851f2e 100644 --- a/planetmint/backend/localmongodb/connection.py +++ b/planetmint/backend/localmongodb/connection.py @@ -8,20 +8,28 @@ from ssl import CERT_REQUIRED import pymongo from planetmint.config import Config -from planetmint.backend.exceptions import (DuplicateKeyError, - OperationError, - ConnectionError) +from planetmint.backend.exceptions import DuplicateKeyError, OperationError, ConnectionError from planetmint.transactions.common.exceptions import ConfigurationError from planetmint.utils import Lazy from planetmint.backend.connection import Connection logger = logging.getLogger(__name__) -class LocalMongoDBConnection(Connection): - def __init__(self, replicaset=None, ssl=None, login=None, password=None, - ca_cert=None, certfile=None, keyfile=None, - keyfile_passphrase=None, crlfile=None, **kwargs): +class LocalMongoDBConnection(Connection): + def __init__( + self, + replicaset=None, + ssl=None, + login=None, + password=None, + ca_cert=None, + certfile=None, + keyfile=None, + keyfile_passphrase=None, + crlfile=None, + **kwargs, + ): """Create a new Connection instance. Args: @@ -32,15 +40,15 @@ class LocalMongoDBConnection(Connection): """ super().__init__(**kwargs) - self.replicaset = replicaset or Config().get()['database']['replicaset'] - self.ssl = ssl if ssl is not None else Config().get()['database']['ssl'] - self.login = login or Config().get()['database']['login'] - self.password = password or Config().get()['database']['password'] - self.ca_cert = ca_cert or Config().get()['database']['ca_cert'] - self.certfile = certfile or Config().get()['database']['certfile'] - self.keyfile = keyfile or Config().get()['database']['keyfile'] - self.keyfile_passphrase = keyfile_passphrase or Config().get()['database']['keyfile_passphrase'] - self.crlfile = crlfile or Config().get()['database']['crlfile'] + self.replicaset = replicaset or Config().get()["database"]["replicaset"] + self.ssl = ssl if ssl is not None else Config().get()["database"]["ssl"] + self.login = login or Config().get()["database"]["login"] + self.password = password or Config().get()["database"]["password"] + self.ca_cert = ca_cert or Config().get()["database"]["ca_cert"] + self.certfile = certfile or Config().get()["database"]["certfile"] + self.keyfile = keyfile or Config().get()["database"]["keyfile"] + self.keyfile_passphrase = keyfile_passphrase or Config().get()["database"]["keyfile_passphrase"] + self.crlfile = crlfile or Config().get()["database"]["crlfile"] if not self.ssl: self.ssl = False if not self.keyfile_passphrase: @@ -66,15 +74,14 @@ class LocalMongoDBConnection(Connection): try: return query.run(self.conn) except pymongo.errors.AutoReconnect: - logger.warning('Lost connection to the database, ' - 'retrying query.') + logger.warning("Lost connection to the database, " "retrying query.") return query.run(self.conn) except pymongo.errors.AutoReconnect as exc: raise ConnectionError from exc except pymongo.errors.DuplicateKeyError as exc: raise DuplicateKeyError from exc except pymongo.errors.OperationFailure as exc: - print(f'DETAILS: {exc.details}') + print(f"DETAILS: {exc.details}") raise OperationError from exc def _connect(self): @@ -95,44 +102,45 @@ class LocalMongoDBConnection(Connection): # `ConnectionFailure`. # The presence of ca_cert, certfile, keyfile, crlfile implies the # use of certificates for TLS connectivity. - if self.ca_cert is None or self.certfile is None or \ - self.keyfile is None or self.crlfile is None: - client = pymongo.MongoClient(self.host, - self.port, - replicaset=self.replicaset, - serverselectiontimeoutms=self.connection_timeout, - ssl=self.ssl, - **MONGO_OPTS) + if self.ca_cert is None or self.certfile is None or self.keyfile is None or self.crlfile is None: + client = pymongo.MongoClient( + self.host, + self.port, + replicaset=self.replicaset, + serverselectiontimeoutms=self.connection_timeout, + ssl=self.ssl, + **MONGO_OPTS, + ) if self.login is not None and self.password is not None: client[self.dbname].authenticate(self.login, self.password) else: - logger.info('Connecting to MongoDB over TLS/SSL...') - client = pymongo.MongoClient(self.host, - self.port, - replicaset=self.replicaset, - serverselectiontimeoutms=self.connection_timeout, - ssl=self.ssl, - ssl_ca_certs=self.ca_cert, - ssl_certfile=self.certfile, - ssl_keyfile=self.keyfile, - ssl_pem_passphrase=self.keyfile_passphrase, - ssl_crlfile=self.crlfile, - ssl_cert_reqs=CERT_REQUIRED, - **MONGO_OPTS) + logger.info("Connecting to MongoDB over TLS/SSL...") + client = pymongo.MongoClient( + self.host, + self.port, + replicaset=self.replicaset, + serverselectiontimeoutms=self.connection_timeout, + ssl=self.ssl, + ssl_ca_certs=self.ca_cert, + ssl_certfile=self.certfile, + ssl_keyfile=self.keyfile, + ssl_pem_passphrase=self.keyfile_passphrase, + ssl_crlfile=self.crlfile, + ssl_cert_reqs=CERT_REQUIRED, + **MONGO_OPTS, + ) if self.login is not None: - client[self.dbname].authenticate(self.login, - mechanism='MONGODB-X509') + client[self.dbname].authenticate(self.login, mechanism="MONGODB-X509") return client - except (pymongo.errors.ConnectionFailure, - pymongo.errors.OperationFailure) as exc: - logger.info('Exception in _connect(): {}'.format(exc)) + except (pymongo.errors.ConnectionFailure, pymongo.errors.OperationFailure) as exc: + logger.info("Exception in _connect(): {}".format(exc)) raise ConnectionError(str(exc)) from exc except pymongo.errors.ConfigurationError as exc: raise ConfigurationError from exc MONGO_OPTS = { - 'socketTimeoutMS': 20000, + "socketTimeoutMS": 20000, } diff --git a/planetmint/backend/localmongodb/convert.py b/planetmint/backend/localmongodb/convert.py index 5f0e04b..d1e3f74 100644 --- a/planetmint/backend/localmongodb/convert.py +++ b/planetmint/backend/localmongodb/convert.py @@ -15,11 +15,10 @@ register_query = module_dispatch_registrar(convert) @register_query(LocalMongoDBConnection) def prepare_asset(connection, transaction_type, transaction_id, filter_operation, asset): if transaction_type == filter_operation: - asset['id'] = transaction_id + asset["id"] = transaction_id return asset @register_query(LocalMongoDBConnection) def prepare_metadata(connection, transaction_id, metadata): - return {'id': transaction_id, - 'metadata': metadata} + return {"id": transaction_id, "metadata": metadata} diff --git a/planetmint/backend/localmongodb/query.py b/planetmint/backend/localmongodb/query.py index d8bc464..355fc1e 100644 --- a/planetmint/backend/localmongodb/query.py +++ b/planetmint/backend/localmongodb/query.py @@ -1,4 +1,5 @@ from functools import singledispatch + # Copyright © 2020 Interplanetary Database Association e.V., # Planetmint and IPDB software contributors. # SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) @@ -19,104 +20,80 @@ register_query = module_dispatch_registrar(backend.query) @register_query(LocalMongoDBConnection) def store_transactions(conn, signed_transactions): - return conn.run(conn.collection('transactions') - .insert_many(signed_transactions)) + return conn.run(conn.collection("transactions").insert_many(signed_transactions)) @register_query(LocalMongoDBConnection) def get_transaction(conn, transaction_id): - return conn.run( - conn.collection('transactions') - .find_one({'id': transaction_id}, {'_id': 0})) + return conn.run(conn.collection("transactions").find_one({"id": transaction_id}, {"_id": 0})) @register_query(LocalMongoDBConnection) def get_transactions(conn, transaction_ids): try: return conn.run( - conn.collection('transactions') - .find({'id': {'$in': transaction_ids}}, - projection={'_id': False})) + conn.collection("transactions").find({"id": {"$in": transaction_ids}}, projection={"_id": False}) + ) except IndexError: pass @register_query(LocalMongoDBConnection) def store_metadatas(conn, metadata): - return conn.run( - conn.collection('metadata') - .insert_many(metadata, ordered=False)) + return conn.run(conn.collection("metadata").insert_many(metadata, ordered=False)) @register_query(LocalMongoDBConnection) def get_metadata(conn, transaction_ids): - return conn.run( - conn.collection('metadata') - .find({'id': {'$in': transaction_ids}}, - projection={'_id': False})) + return conn.run(conn.collection("metadata").find({"id": {"$in": transaction_ids}}, projection={"_id": False})) @register_query(LocalMongoDBConnection) def store_asset(conn, asset): try: - return conn.run( - conn.collection('assets') - .insert_one(asset)) + return conn.run(conn.collection("assets").insert_one(asset)) except DuplicateKeyError: pass @register_query(LocalMongoDBConnection) def store_assets(conn, assets): - return conn.run( - conn.collection('assets') - .insert_many(assets, ordered=False)) + return conn.run(conn.collection("assets").insert_many(assets, ordered=False)) @register_query(LocalMongoDBConnection) def get_asset(conn, asset_id): try: - return conn.run( - conn.collection('assets') - .find_one({'id': asset_id}, {'_id': 0, 'id': 0})) + return conn.run(conn.collection("assets").find_one({"id": asset_id}, {"_id": 0, "id": 0})) except IndexError: pass @register_query(LocalMongoDBConnection) def get_assets(conn, asset_ids): - return conn.run( - conn.collection('assets') - .find({'id': {'$in': asset_ids}}, - projection={'_id': False})) + return conn.run(conn.collection("assets").find({"id": {"$in": asset_ids}}, projection={"_id": False})) @register_query(LocalMongoDBConnection) def get_spent(conn, transaction_id, output): - query = {'inputs': - {'$elemMatch': - {'$and': [{'fulfills.transaction_id': transaction_id}, - {'fulfills.output_index': output}]}}} + query = { + "inputs": { + "$elemMatch": {"$and": [{"fulfills.transaction_id": transaction_id}, {"fulfills.output_index": output}]} + } + } - return conn.run( - conn.collection('transactions') - .find(query, {'_id': 0})) + return conn.run(conn.collection("transactions").find(query, {"_id": 0})) @register_query(LocalMongoDBConnection) def get_latest_block(conn): - return conn.run( - conn.collection('blocks') - .find_one(projection={'_id': False}, - sort=[('height', DESCENDING)])) + return conn.run(conn.collection("blocks").find_one(projection={"_id": False}, sort=[("height", DESCENDING)])) @register_query(LocalMongoDBConnection) def store_block(conn, block): try: - return conn.run( - conn.collection('blocks') - .insert_one(block)) + return conn.run(conn.collection("blocks").insert_one(block)) except DuplicateKeyError: pass @@ -125,32 +102,47 @@ def store_block(conn, block): def get_txids_filtered(conn, asset_id, operation=None, last_tx=None): match = { - Transaction.CREATE: {'operation': 'CREATE', 'id': asset_id}, - Transaction.TRANSFER: {'operation': 'TRANSFER', 'asset.id': asset_id}, - None: {'$or': [{'asset.id': asset_id}, {'id': asset_id}]}, + Transaction.CREATE: {"operation": "CREATE", "id": asset_id}, + Transaction.TRANSFER: {"operation": "TRANSFER", "asset.id": asset_id}, + None: {"$or": [{"asset.id": asset_id}, {"id": asset_id}]}, }[operation] - cursor = conn.run(conn.collection('transactions').find(match)) + cursor = conn.run(conn.collection("transactions").find(match)) if last_tx: - cursor = cursor.sort([('$natural', DESCENDING)]).limit(1) + cursor = cursor.sort([("$natural", DESCENDING)]).limit(1) - return (elem['id'] for elem in cursor) + return (elem["id"] for elem in cursor) @register_query(LocalMongoDBConnection) -def text_search(conn, search, *, language='english', case_sensitive=False, - diacritic_sensitive=False, text_score=False, limit=0, table='assets'): +def text_search( + conn, + search, + *, + language="english", + case_sensitive=False, + diacritic_sensitive=False, + text_score=False, + limit=0, + table="assets" +): cursor = conn.run( conn.collection(table) - .find({'$text': { - '$search': search, - '$language': language, - '$caseSensitive': case_sensitive, - '$diacriticSensitive': diacritic_sensitive}}, - {'score': {'$meta': 'textScore'}, '_id': False}) - .sort([('score', {'$meta': 'textScore'})]) - .limit(limit)) + .find( + { + "$text": { + "$search": search, + "$language": language, + "$caseSensitive": case_sensitive, + "$diacriticSensitive": diacritic_sensitive, + } + }, + {"score": {"$meta": "textScore"}, "_id": False}, + ) + .sort([("score", {"$meta": "textScore"})]) + .limit(limit) + ) if text_score: return cursor @@ -159,58 +151,54 @@ def text_search(conn, search, *, language='english', case_sensitive=False, def _remove_text_score(asset): - asset.pop('score', None) + asset.pop("score", None) return asset @register_query(LocalMongoDBConnection) def get_owned_ids(conn, owner): cursor = conn.run( - conn.collection('transactions').aggregate([ - {'$match': {'outputs.public_keys': owner}}, - {'$project': {'_id': False}} - ])) + conn.collection("transactions").aggregate( + [{"$match": {"outputs.public_keys": owner}}, {"$project": {"_id": False}}] + ) + ) return cursor @register_query(LocalMongoDBConnection) def get_spending_transactions(conn, inputs): - transaction_ids = [i['transaction_id'] for i in inputs] - output_indexes = [i['output_index'] for i in inputs] - query = {'inputs': - {'$elemMatch': - {'$and': - [ - {'fulfills.transaction_id': {'$in': transaction_ids}}, - {'fulfills.output_index': {'$in': output_indexes}} - ]}}} + transaction_ids = [i["transaction_id"] for i in inputs] + output_indexes = [i["output_index"] for i in inputs] + query = { + "inputs": { + "$elemMatch": { + "$and": [ + {"fulfills.transaction_id": {"$in": transaction_ids}}, + {"fulfills.output_index": {"$in": output_indexes}}, + ] + } + } + } - cursor = conn.run( - conn.collection('transactions').find(query, {'_id': False})) + cursor = conn.run(conn.collection("transactions").find(query, {"_id": False})) return cursor @register_query(LocalMongoDBConnection) def get_block(conn, block_id): - return conn.run( - conn.collection('blocks') - .find_one({'height': block_id}, - projection={'_id': False})) + return conn.run(conn.collection("blocks").find_one({"height": block_id}, projection={"_id": False})) @register_query(LocalMongoDBConnection) def get_block_with_transaction(conn, txid): - return conn.run( - conn.collection('blocks') - .find({'transactions': txid}, - projection={'_id': False, 'height': True})) + return conn.run(conn.collection("blocks").find({"transactions": txid}, projection={"_id": False, "height": True})) @register_query(LocalMongoDBConnection) def delete_transactions(conn, txn_ids): - conn.run(conn.collection('assets').delete_many({'id': {'$in': txn_ids}})) - conn.run(conn.collection('metadata').delete_many({'id': {'$in': txn_ids}})) - conn.run(conn.collection('transactions').delete_many({'id': {'$in': txn_ids}})) + conn.run(conn.collection("assets").delete_many({"id": {"$in": txn_ids}})) + conn.run(conn.collection("metadata").delete_many({"id": {"$in": txn_ids}})) + conn.run(conn.collection("transactions").delete_many({"id": {"$in": txn_ids}})) @register_query(LocalMongoDBConnection) @@ -218,7 +206,7 @@ def store_unspent_outputs(conn, *unspent_outputs): if unspent_outputs: try: return conn.run( - conn.collection('utxos').insert_many( + conn.collection("utxos").insert_many( unspent_outputs, ordered=False, ) @@ -232,14 +220,19 @@ def store_unspent_outputs(conn, *unspent_outputs): def delete_unspent_outputs(conn, *unspent_outputs): if unspent_outputs: return conn.run( - conn.collection('utxos').delete_many({ - '$or': [{ - '$and': [ - {'transaction_id': unspent_output['transaction_id']}, - {'output_index': unspent_output['output_index']}, - ], - } for unspent_output in unspent_outputs] - }) + conn.collection("utxos").delete_many( + { + "$or": [ + { + "$and": [ + {"transaction_id": unspent_output["transaction_id"]}, + {"output_index": unspent_output["output_index"]}, + ], + } + for unspent_output in unspent_outputs + ] + } + ) ) @@ -247,51 +240,36 @@ def delete_unspent_outputs(conn, *unspent_outputs): def get_unspent_outputs(conn, *, query=None): if query is None: query = {} - return conn.run(conn.collection('utxos').find(query, - projection={'_id': False})) + return conn.run(conn.collection("utxos").find(query, projection={"_id": False})) @register_query(LocalMongoDBConnection) def store_pre_commit_state(conn, state): - return conn.run( - conn.collection('pre_commit') - .replace_one({}, state, upsert=True) - ) + return conn.run(conn.collection("pre_commit").replace_one({}, state, upsert=True)) @register_query(LocalMongoDBConnection) def get_pre_commit_state(connection): - return connection.run(connection.collection('pre_commit').find_one()) + return connection.run(connection.collection("pre_commit").find_one()) @register_query(LocalMongoDBConnection) def store_validator_set(conn, validators_update): - height = validators_update['height'] - return conn.run( - conn.collection('validators').replace_one( - {'height': height}, - validators_update, - upsert=True - ) - ) + height = validators_update["height"] + return conn.run(conn.collection("validators").replace_one({"height": height}, validators_update, upsert=True)) @register_query(LocalMongoDBConnection) def delete_validator_set(conn, height): - return conn.run( - conn.collection('validators').delete_many({'height': height}) - ) + return conn.run(conn.collection("validators").delete_many({"height": height})) @register_query(LocalMongoDBConnection) def store_election(conn, election_id, height, is_concluded): return conn.run( - conn.collection('elections').replace_one( - {'election_id': election_id, - 'height': height}, - {'election_id': election_id, - 'height': height, - 'is_concluded': is_concluded}, + conn.collection("elections").replace_one( + {"election_id": election_id, "height": height}, + {"election_id": election_id, "height": height, "is_concluded": is_concluded}, upsert=True, ) ) @@ -299,29 +277,22 @@ def store_election(conn, election_id, height, is_concluded): @register_query(LocalMongoDBConnection) def store_elections(conn, elections): - return conn.run( - conn.collection('elections').insert_many(elections) - ) + return conn.run(conn.collection("elections").insert_many(elections)) @register_query(LocalMongoDBConnection) def delete_elections(conn, height): - return conn.run( - conn.collection('elections').delete_many({'height': height}) - ) + return conn.run(conn.collection("elections").delete_many({"height": height})) @register_query(LocalMongoDBConnection) def get_validator_set(conn, height=None): query = {} if height is not None: - query = {'height': {'$lte': height}} + query = {"height": {"$lte": height}} cursor = conn.run( - conn.collection('validators') - .find(query, projection={'_id': False}) - .sort([('height', DESCENDING)]) - .limit(1) + conn.collection("validators").find(query, projection={"_id": False}).sort([("height", DESCENDING)]).limit(1) ) return next(cursor, None) @@ -329,35 +300,27 @@ def get_validator_set(conn, height=None): @register_query(LocalMongoDBConnection) def get_election(conn, election_id): - query = {'election_id': election_id} + query = {"election_id": election_id} return conn.run( - conn.collection('elections') - .find_one(query, projection={'_id': False}, - sort=[('height', DESCENDING)]) + conn.collection("elections").find_one(query, projection={"_id": False}, sort=[("height", DESCENDING)]) ) @register_query(LocalMongoDBConnection) def get_asset_tokens_for_public_key(conn, asset_id, public_key): - query = {'outputs.public_keys': [public_key], - 'asset.id': asset_id} + query = {"outputs.public_keys": [public_key], "asset.id": asset_id} - cursor = conn.run( - conn.collection('transactions').aggregate([ - {'$match': query}, - {'$project': {'_id': False}} - ])) + cursor = conn.run(conn.collection("transactions").aggregate([{"$match": query}, {"$project": {"_id": False}}])) return cursor @register_query(LocalMongoDBConnection) def store_abci_chain(conn, height, chain_id, is_synced=True): return conn.run( - conn.collection('abci_chains').replace_one( - {'height': height}, - {'height': height, 'chain_id': chain_id, - 'is_synced': is_synced}, + conn.collection("abci_chains").replace_one( + {"height": height}, + {"height": height, "chain_id": chain_id, "is_synced": is_synced}, upsert=True, ) ) @@ -365,14 +328,9 @@ def store_abci_chain(conn, height, chain_id, is_synced=True): @register_query(LocalMongoDBConnection) def delete_abci_chain(conn, height): - return conn.run( - conn.collection('abci_chains').delete_many({'height': height}) - ) + return conn.run(conn.collection("abci_chains").delete_many({"height": height})) @register_query(LocalMongoDBConnection) def get_latest_abci_chain(conn): - return conn.run( - conn.collection('abci_chains') - .find_one(projection={'_id': False}, sort=[('height', DESCENDING)]) - ) + return conn.run(conn.collection("abci_chains").find_one(projection={"_id": False}, sort=[("height", DESCENDING)])) diff --git a/planetmint/backend/localmongodb/schema.py b/planetmint/backend/localmongodb/schema.py index d92d6d4..b8fd6a0 100644 --- a/planetmint/backend/localmongodb/schema.py +++ b/planetmint/backend/localmongodb/schema.py @@ -20,48 +20,48 @@ register_schema = module_dispatch_registrar(backend.schema) INDEXES = { - 'transactions': [ - ('id', dict(unique=True, name='transaction_id')), - ('asset.id', dict(name='asset_id')), - ('outputs.public_keys', dict(name='outputs')), - ([('inputs.fulfills.transaction_id', ASCENDING), - ('inputs.fulfills.output_index', ASCENDING)], dict(name='inputs')), + "transactions": [ + ("id", dict(unique=True, name="transaction_id")), + ("asset.id", dict(name="asset_id")), + ("outputs.public_keys", dict(name="outputs")), + ( + [("inputs.fulfills.transaction_id", ASCENDING), ("inputs.fulfills.output_index", ASCENDING)], + dict(name="inputs"), + ), ], - 'assets': [ - ('id', dict(name='asset_id', unique=True)), - ([('$**', TEXT)], dict(name='text')), + "assets": [ + ("id", dict(name="asset_id", unique=True)), + ([("$**", TEXT)], dict(name="text")), ], - 'blocks': [ - ([('height', DESCENDING)], dict(name='height', unique=True)), + "blocks": [ + ([("height", DESCENDING)], dict(name="height", unique=True)), ], - 'metadata': [ - ('id', dict(name='transaction_id', unique=True)), - ([('$**', TEXT)], dict(name='text')), + "metadata": [ + ("id", dict(name="transaction_id", unique=True)), + ([("$**", TEXT)], dict(name="text")), ], - 'utxos': [ - ([('transaction_id', ASCENDING), - ('output_index', ASCENDING)], dict(name='utxo', unique=True)), + "utxos": [ + ([("transaction_id", ASCENDING), ("output_index", ASCENDING)], dict(name="utxo", unique=True)), ], - 'pre_commit': [ - ('height', dict(name='height', unique=True)), + "pre_commit": [ + ("height", dict(name="height", unique=True)), ], - 'elections': [ - ([('height', DESCENDING), ('election_id', ASCENDING)], - dict(name='election_id_height', unique=True)), + "elections": [ + ([("height", DESCENDING), ("election_id", ASCENDING)], dict(name="election_id_height", unique=True)), ], - 'validators': [ - ('height', dict(name='height', unique=True)), + "validators": [ + ("height", dict(name="height", unique=True)), ], - 'abci_chains': [ - ('height', dict(name='height', unique=True)), - ('chain_id', dict(name='chain_id', unique=True)), + "abci_chains": [ + ("height", dict(name="height", unique=True)), + ("chain_id", dict(name="chain_id", unique=True)), ], } @register_schema(LocalMongoDBConnection) def create_database(conn, dbname): - logger.info('Create database `%s`.', dbname) + logger.info("Create database `%s`.", dbname) # TODO: read and write concerns can be declared here conn.conn.get_database(dbname) @@ -72,15 +72,15 @@ def create_tables(conn, dbname): # create the table # TODO: read and write concerns can be declared here try: - logger.info(f'Create `{table_name}` table.') + logger.info(f"Create `{table_name}` table.") conn.conn[dbname].create_collection(table_name) except CollectionInvalid: - logger.info(f'Collection {table_name} already exists.') + logger.info(f"Collection {table_name} already exists.") create_indexes(conn, dbname, table_name, INDEXES[table_name]) def create_indexes(conn, dbname, collection, indexes): - logger.info(f'Ensure secondary indexes for `{collection}`.') + logger.info(f"Ensure secondary indexes for `{collection}`.") for fields, kwargs in indexes: conn.conn[dbname][collection].create_index(fields, **kwargs) diff --git a/planetmint/backend/query.py b/planetmint/backend/query.py index 0f4d044..41f1fac 100644 --- a/planetmint/backend/query.py +++ b/planetmint/backend/query.py @@ -27,12 +27,12 @@ def store_asset(asset: dict, connection): @singledispatch def store_assets(assets: list, connection): """Write a list of assets to the assets table. -backend - Args: - assets (list): a list of assets to write. + backend + Args: + assets (list): a list of assets to write. - Returns: - The database response. + Returns: + The database response. """ raise NotImplementedError @@ -215,8 +215,17 @@ def get_txids_filtered(connection, asset_id, operation=None): @singledispatch -def text_search(conn, search, *, language='english', case_sensitive=False, - diacritic_sensitive=False, text_score=False, limit=0, table=None): +def text_search( + conn, + search, + *, + language="english", + case_sensitive=False, + diacritic_sensitive=False, + text_score=False, + limit=0, + table=None +): """Return all the assets that match the text search. The results are sorted by text score. @@ -243,8 +252,7 @@ def text_search(conn, search, *, language='english', case_sensitive=False, OperationError: If the backend does not support text search """ - raise OperationError('This query is only supported when running ' - 'Planetmint with MongoDB as the backend.') + raise OperationError("This query is only supported when running " "Planetmint with MongoDB as the backend.") @singledispatch @@ -384,8 +392,7 @@ def get_validator_set(conn, height): @singledispatch def get_election(conn, election_id): - """Return the election record - """ + """Return the election record""" raise NotImplementedError @@ -432,6 +439,5 @@ def get_latest_abci_chain(conn): @singledispatch def _group_transaction_by_ids(txids: list, connection): - """Returns the transactions object (JSON TYPE), from list of ids. - """ + """Returns the transactions object (JSON TYPE), from list of ids.""" raise NotImplementedError diff --git a/planetmint/backend/schema.py b/planetmint/backend/schema.py index 7204ea8..9d10e57 100644 --- a/planetmint/backend/schema.py +++ b/planetmint/backend/schema.py @@ -12,23 +12,74 @@ from planetmint.config import Config from planetmint.backend.connection import connect from planetmint.transactions.common.exceptions import ValidationError from planetmint.transactions.common.utils import ( - validate_all_values_for_key_in_obj, validate_all_values_for_key_in_list) + validate_all_values_for_key_in_obj, + validate_all_values_for_key_in_list, +) logger = logging.getLogger(__name__) # Tables/collections that every backend database must create -TABLES = ('transactions', 'blocks', 'assets', 'metadata', - 'validators', 'elections', 'pre_commit', 'utxos', 'abci_chains') +TABLES = ( + "transactions", + "blocks", + "assets", + "metadata", + "validators", + "elections", + "pre_commit", + "utxos", + "abci_chains", +) -SPACE_NAMES = ("abci_chains", "assets", "blocks", "blocks_tx", - "elections", "meta_data", "pre_commits", "validators", - "transactions", "inputs", "outputs", "keys", "utxos") +SPACE_NAMES = ( + "abci_chains", + "assets", + "blocks", + "blocks_tx", + "elections", + "meta_data", + "pre_commits", + "validators", + "transactions", + "inputs", + "outputs", + "keys", + "utxos", +) -VALID_LANGUAGES = ('danish', 'dutch', 'english', 'finnish', 'french', 'german', - 'hungarian', 'italian', 'norwegian', 'portuguese', 'romanian', - 'russian', 'spanish', 'swedish', 'turkish', 'none', - 'da', 'nl', 'en', 'fi', 'fr', 'de', 'hu', 'it', 'nb', 'pt', - 'ro', 'ru', 'es', 'sv', 'tr') +VALID_LANGUAGES = ( + "danish", + "dutch", + "english", + "finnish", + "french", + "german", + "hungarian", + "italian", + "norwegian", + "portuguese", + "romanian", + "russian", + "spanish", + "swedish", + "turkish", + "none", + "da", + "nl", + "en", + "fi", + "fr", + "de", + "hu", + "it", + "nb", + "pt", + "ro", + "ru", + "es", + "sv", + "tr", +) @singledispatch @@ -84,7 +135,7 @@ def init_database(connection=None, dbname=None): """ connection = connection or connect() - dbname = dbname or Config().get()['database']['name'] + dbname = dbname or Config().get()["database"]["name"] create_database(connection, dbname) create_tables(connection, dbname) @@ -93,41 +144,43 @@ def init_database(connection=None, dbname=None): def validate_language_key(obj, key): """Validate all nested "language" key in `obj`. - Args: - obj (dict): dictionary whose "language" key is to be validated. + Args: + obj (dict): dictionary whose "language" key is to be validated. - Returns: - None: validation successful + Returns: + None: validation successful - Raises: - ValidationError: will raise exception in case language is not valid. + Raises: + ValidationError: will raise exception in case language is not valid. """ - backend = Config().get()['database']['backend'] + backend = Config().get()["database"]["backend"] - if backend == 'localmongodb': + if backend == "localmongodb": data = obj.get(key, {}) if isinstance(data, dict): - validate_all_values_for_key_in_obj(data, 'language', validate_language) + validate_all_values_for_key_in_obj(data, "language", validate_language) elif isinstance(data, list): - validate_all_values_for_key_in_list(data, 'language', validate_language) + validate_all_values_for_key_in_list(data, "language", validate_language) def validate_language(value): """Check if `value` is a valid language. - https://docs.mongodb.com/manual/reference/text-search-languages/ + https://docs.mongodb.com/manual/reference/text-search-languages/ - Args: - value (str): language to validated + Args: + value (str): language to validated - Returns: - None: validation successful + Returns: + None: validation successful - Raises: - ValidationError: will raise exception in case language is not valid. + Raises: + ValidationError: will raise exception in case language is not valid. """ if value not in VALID_LANGUAGES: - error_str = ('MongoDB does not support text search for the ' - 'language "{}". If you do not understand this error ' - 'message then please rename key/field "language" to ' - 'something else like "lang".').format(value) + error_str = ( + "MongoDB does not support text search for the " + 'language "{}". If you do not understand this error ' + 'message then please rename key/field "language" to ' + 'something else like "lang".' + ).format(value) raise ValidationError(error_str) diff --git a/planetmint/backend/tarantool/__init__.py b/planetmint/backend/tarantool/__init__.py index 4ee14e1..1e667c0 100644 --- a/planetmint/backend/tarantool/__init__.py +++ b/planetmint/backend/tarantool/__init__.py @@ -1,5 +1,5 @@ # Register the single dispatched modules on import. -from planetmint.backend.tarantool import query, connection, schema, convert # noqa +from planetmint.backend.tarantool import query, connection, schema, convert # noqa # MongoDBConnection should always be accessed via # ``planetmint.backend.connect()``. diff --git a/planetmint/backend/tarantool/convert.py b/planetmint/backend/tarantool/convert.py index b58ec87..a7475e5 100644 --- a/planetmint/backend/tarantool/convert.py +++ b/planetmint/backend/tarantool/convert.py @@ -16,11 +16,10 @@ register_query = module_dispatch_registrar(convert) def prepare_asset(connection, transaction_type, transaction_id, filter_operation, asset): asset_id = transaction_id if transaction_type != filter_operation: - asset_id = asset['id'] + asset_id = asset["id"] return tuple([asset, transaction_id, asset_id]) @register_query(TarantoolDBConnection) def prepare_metadata(connection, transaction_id, metadata): - return {'id': transaction_id, - 'metadata': metadata} + return {"id": transaction_id, "metadata": metadata} diff --git a/planetmint/backend/tarantool/query.py b/planetmint/backend/tarantool/query.py index a172db3..f269202 100644 --- a/planetmint/backend/tarantool/query.py +++ b/planetmint/backend/tarantool/query.py @@ -57,40 +57,22 @@ def store_transactions(connection, signed_transactions: list): txprepare = TransactionDecompose(transaction) txtuples = txprepare.convert_to_tuple() try: - connection.run( - connection.space("transactions").insert(txtuples["transactions"]), - only_data=False - ) + connection.run(connection.space("transactions").insert(txtuples["transactions"]), only_data=False) except: # This is used for omitting duplicate error in database for test -> test_bigchain_api::test_double_inclusion # noqa: E501, E722 continue for _in in txtuples["inputs"]: - connection.run( - connection.space("inputs").insert(_in), - only_data=False - ) + connection.run(connection.space("inputs").insert(_in), only_data=False) for _out in txtuples["outputs"]: - connection.run( - connection.space("outputs").insert(_out), - only_data=False - ) + connection.run(connection.space("outputs").insert(_out), only_data=False) for _key in txtuples["keys"]: - connection.run( - connection.space("keys").insert(_key), - only_data=False - ) + connection.run(connection.space("keys").insert(_key), only_data=False) if txtuples["metadata"] is not None: - connection.run( - connection.space("meta_data").insert(txtuples["metadata"]), - only_data=False - ) + connection.run(connection.space("meta_data").insert(txtuples["metadata"]), only_data=False) if txtuples["asset"] is not None: - connection.run( - connection.space("assets").insert(txtuples["asset"]), - only_data=False - ) + connection.run(connection.space("assets").insert(txtuples["asset"]), only_data=False) @register_query(TarantoolDBConnection) @@ -110,7 +92,8 @@ def store_metadatas(connection, metadata: list): for meta in metadata: connection.run( connection.space("meta_data").insert( - (meta["id"], json.dumps(meta["data"] if not "metadata" in meta else meta["metadata"]))) # noqa: E713 + (meta["id"], json.dumps(meta["data"] if not "metadata" in meta else meta["metadata"])) + ) # noqa: E713 ) @@ -118,9 +101,7 @@ def store_metadatas(connection, metadata: list): def get_metadata(connection, transaction_ids: list): _returned_data = [] for _id in transaction_ids: - metadata = connection.run( - connection.space("meta_data").select(_id, index="id_search") - ) + metadata = connection.run(connection.space("meta_data").select(_id, index="id_search")) if metadata is not None: if len(metadata) > 0: metadata[0] = list(metadata[0]) @@ -139,14 +120,13 @@ def store_asset(connection, asset): return tuple(obj) else: return (json.dumps(obj), obj["id"], obj["id"]) + try: - return connection.run( - connection.space("assets").insert(convert(asset)), - only_data=False - ) + return connection.run(connection.space("assets").insert(convert(asset)), only_data=False) except DatabaseError: pass + @register_query(TarantoolDBConnection) def store_assets(connection, assets: list): for asset in assets: @@ -155,9 +135,7 @@ def store_assets(connection, assets: list): @register_query(TarantoolDBConnection) def get_asset(connection, asset_id: str): - _data = connection.run( - connection.space("assets").select(asset_id, index="txid_search") - ) + _data = connection.run(connection.space("assets").select(asset_id, index="txid_search")) return json.loads(_data[0][0]) if len(_data) > 0 else [] @@ -166,9 +144,7 @@ def get_asset(connection, asset_id: str): def get_assets(connection, assets_ids: list) -> list: _returned_data = [] for _id in list(set(assets_ids)): - res = connection.run( - connection.space("assets").select(_id, index="txid_search") - ) + res = connection.run(connection.space("assets").select(_id, index="txid_search")) _returned_data.append(res[0]) sorted_assets = sorted(_returned_data, key=lambda k: k[1], reverse=False) @@ -186,17 +162,13 @@ def get_spent(connection, fullfil_transaction_id: str, fullfil_output_index: str @register_query(TarantoolDBConnection) def get_latest_block(connection): # TODO Here is used DESCENDING OPERATOR - _all_blocks = connection.run( - connection.space("blocks").select() - ) - block = {"app_hash": '', "height": 0, "transactions": []} + _all_blocks = connection.run(connection.space("blocks").select()) + block = {"app_hash": "", "height": 0, "transactions": []} if _all_blocks is not None: if len(_all_blocks) > 0: _block = sorted(_all_blocks, key=itemgetter(1), reverse=True)[0] - _txids = connection.run( - connection.space("blocks_tx").select(_block[2], index="block_search") - ) + _txids = connection.run(connection.space("blocks_tx").select(_block[2], index="block_search")) block["app_hash"] = _block[0] block["height"] = _block[1] block["transactions"] = [tx[0] for tx in _txids] @@ -209,27 +181,22 @@ def get_latest_block(connection): # TODO Here is used DESCENDING OPERATOR def store_block(connection, block: dict): block_unique_id = token_hex(8) connection.run( - connection.space("blocks").insert((block["app_hash"], - block["height"], - block_unique_id)), - only_data=False + connection.space("blocks").insert((block["app_hash"], block["height"], block_unique_id)), only_data=False ) for txid in block["transactions"]: - connection.run( - connection.space("blocks_tx").insert((txid, block_unique_id)), - only_data=False - ) + connection.run(connection.space("blocks_tx").insert((txid, block_unique_id)), only_data=False) @register_query(TarantoolDBConnection) -def get_txids_filtered(connection, asset_id: str, operation: str = None, - last_tx: any = None): # TODO here is used 'OR' operator +def get_txids_filtered( + connection, asset_id: str, operation: str = None, last_tx: any = None +): # TODO here is used 'OR' operator actions = { "CREATE": {"sets": ["CREATE", asset_id], "index": "transaction_search"}, # 1 - operation, 2 - id (only in transactions) + "TRANSFER": {"sets": ["TRANSFER", asset_id], "index": "transaction_search"}, # 1 - operation, 2 - asset.id (linked mode) + OPERATOR OR - None: {"sets": [asset_id, asset_id]} + None: {"sets": [asset_id, asset_id]}, }[operation] _transactions = [] if actions["sets"][0] == "CREATE": # + @@ -237,9 +204,7 @@ def get_txids_filtered(connection, asset_id: str, operation: str = None, connection.space("transactions").select([operation, asset_id], index=actions["index"]) ) elif actions["sets"][0] == "TRANSFER": # + - _assets = connection.run( - connection.space("assets").select([asset_id], index="only_asset_search") - ) + _assets = connection.run(connection.space("assets").select([asset_id], index="only_asset_search")) for asset in _assets: _txid = asset[1] _transactions = connection.run( @@ -248,12 +213,8 @@ def get_txids_filtered(connection, asset_id: str, operation: str = None, if len(_transactions) != 0: break else: - _tx_ids = connection.run( - connection.space("transactions").select([asset_id], index="id_search") - ) - _assets_ids = connection.run( - connection.space("assets").select([asset_id], index="only_asset_search") - ) + _tx_ids = connection.run(connection.space("transactions").select([asset_id], index="id_search")) + _assets_ids = connection.run(connection.space("assets").select([asset_id], index="only_asset_search")) return tuple(set([sublist[1] for sublist in _assets_ids] + [sublist[0] for sublist in _tx_ids])) if last_tx: @@ -261,43 +222,34 @@ def get_txids_filtered(connection, asset_id: str, operation: str = None, return tuple([elem[0] for elem in _transactions]) + @register_query(TarantoolDBConnection) -def text_search(conn, search, table='assets', limit=0): +def text_search(conn, search, table="assets", limit=0): pattern = ".{}.".format(search) - field_no = 1 if table == 'assets' else 2 # 2 for meta_data - res = conn.run( - conn.space(table).call('indexed_pattern_search', (table, field_no, pattern)) - ) + field_no = 1 if table == "assets" else 2 # 2 for meta_data + res = conn.run(conn.space(table).call("indexed_pattern_search", (table, field_no, pattern))) to_return = [] if len(res[0]): # NEEDS BEAUTIFICATION - if table == 'assets': + if table == "assets": for result in res[0]: - to_return.append({ - 'data': json.loads(result[0])['data'], - 'id': result[1] - }) + to_return.append({"data": json.loads(result[0])["data"], "id": result[1]}) else: for result in res[0]: - to_return.append({ - 'metadata': json.loads(result[1]), - 'id': result[0] - }) + to_return.append({"metadata": json.loads(result[1]), "id": result[0]}) return to_return if limit == 0 else to_return[:limit] def _remove_text_score(asset): - asset.pop('score', None) + asset.pop("score", None) return asset @register_query(TarantoolDBConnection) def get_owned_ids(connection, owner: str): - _keys = connection.run( - connection.space("keys").select(owner, index="keys_search") - ) + _keys = connection.run(connection.space("keys").select(owner, index="keys_search")) if _keys is None or len(_keys) == 0: return [] _transactionids = list(set([key[1] for key in _keys])) @@ -310,9 +262,11 @@ def get_spending_transactions(connection, inputs): _transactions = [] for inp in inputs: - _trans_list = get_spent(fullfil_transaction_id=inp["transaction_id"], - fullfil_output_index=inp["output_index"], - connection=connection) + _trans_list = get_spent( + fullfil_transaction_id=inp["transaction_id"], + fullfil_output_index=inp["output_index"], + connection=connection, + ) _transactions.extend(_trans_list) return _transactions @@ -320,28 +274,20 @@ def get_spending_transactions(connection, inputs): @register_query(TarantoolDBConnection) def get_block(connection, block_id=[]): - _block = connection.run( - connection.space("blocks").select(block_id, index="block_search", limit=1) - ) + _block = connection.run(connection.space("blocks").select(block_id, index="block_search", limit=1)) if _block is None or len(_block) == 0: return [] _block = _block[0] - _txblock = connection.run( - connection.space("blocks_tx").select(_block[2], index="block_search") - ) + _txblock = connection.run(connection.space("blocks_tx").select(_block[2], index="block_search")) return {"app_hash": _block[0], "height": _block[1], "transactions": [_tx[0] for _tx in _txblock]} @register_query(TarantoolDBConnection) def get_block_with_transaction(connection, txid: str): - _all_blocks_tx = connection.run( - connection.space("blocks_tx").select(txid, index="id_search") - ) + _all_blocks_tx = connection.run(connection.space("blocks_tx").select(txid, index="id_search")) if _all_blocks_tx is None or len(_all_blocks_tx) == 0: return [] - _block = connection.run( - connection.space("blocks").select(_all_blocks_tx[0][1], index="block_id_search") - ) + _block = connection.run(connection.space("blocks").select(_all_blocks_tx[0][1], index="block_id_search")) return [{"height": _height[1]} for _height in _block] @@ -373,7 +319,7 @@ def store_unspent_outputs(connection, *unspent_outputs: list): if unspent_outputs: for utxo in unspent_outputs: output = connection.run( - connection.space("utxos").insert((utxo['transaction_id'], utxo['output_index'], dumps(utxo))) + connection.space("utxos").insert((utxo["transaction_id"], utxo["output_index"], dumps(utxo))) ) result.append(output) return result @@ -384,42 +330,36 @@ def delete_unspent_outputs(connection, *unspent_outputs: list): result = [] if unspent_outputs: for utxo in unspent_outputs: - output = connection.run( - connection.space("utxos").delete((utxo['transaction_id'], utxo['output_index'])) - ) + output = connection.run(connection.space("utxos").delete((utxo["transaction_id"], utxo["output_index"]))) result.append(output) return result @register_query(TarantoolDBConnection) def get_unspent_outputs(connection, query=None): # for now we don't have implementation for 'query'. - _utxos = connection.run( - connection.space("utxos").select([]) - ) + _utxos = connection.run(connection.space("utxos").select([])) return [loads(utx[2]) for utx in _utxos] @register_query(TarantoolDBConnection) def store_pre_commit_state(connection, state: dict): - _precommit = connection.run( - connection.space("pre_commits").select([], limit=1) + _precommit = connection.run(connection.space("pre_commits").select([], limit=1)) + _precommitTuple = ( + (token_hex(8), state["height"], state["transactions"]) + if _precommit is None or len(_precommit) == 0 + else _precommit[0] ) - _precommitTuple = (token_hex(8), state["height"], state["transactions"]) if _precommit is None or len( - _precommit) == 0 else _precommit[0] connection.run( - connection.space("pre_commits").upsert(_precommitTuple, - op_list=[('=', 1, state["height"]), - ('=', 2, state["transactions"])], - limit=1), - only_data=False + connection.space("pre_commits").upsert( + _precommitTuple, op_list=[("=", 1, state["height"]), ("=", 2, state["transactions"])], limit=1 + ), + only_data=False, ) @register_query(TarantoolDBConnection) def get_pre_commit_state(connection): - _commit = connection.run( - connection.space("pre_commits").select([], index="id_search") - ) + _commit = connection.run(connection.space("pre_commits").select([], index="id_search")) if _commit is None or len(_commit) == 0: return None _commit = sorted(_commit, key=itemgetter(1), reverse=False)[0] @@ -428,39 +368,32 @@ def get_pre_commit_state(connection): @register_query(TarantoolDBConnection) def store_validator_set(conn, validators_update: dict): - _validator = conn.run( - conn.space("validators").select(validators_update["height"], index="height_search", limit=1) - ) + _validator = conn.run(conn.space("validators").select(validators_update["height"], index="height_search", limit=1)) unique_id = token_hex(8) if _validator is None or len(_validator) == 0 else _validator[0][0] conn.run( - conn.space("validators").upsert((unique_id, validators_update["height"], validators_update["validators"]), - op_list=[('=', 1, validators_update["height"]), - ('=', 2, validators_update["validators"])], - limit=1), - only_data=False + conn.space("validators").upsert( + (unique_id, validators_update["height"], validators_update["validators"]), + op_list=[("=", 1, validators_update["height"]), ("=", 2, validators_update["validators"])], + limit=1, + ), + only_data=False, ) @register_query(TarantoolDBConnection) def delete_validator_set(connection, height: int): - _validators = connection.run( - connection.space("validators").select(height, index="height_search") - ) + _validators = connection.run(connection.space("validators").select(height, index="height_search")) for _valid in _validators: - connection.run( - connection.space("validators").delete(_valid[0]), - only_data=False - ) + connection.run(connection.space("validators").delete(_valid[0]), only_data=False) @register_query(TarantoolDBConnection) def store_election(connection, election_id: str, height: int, is_concluded: bool): connection.run( - connection.space("elections").upsert((election_id, height, is_concluded), - op_list=[('=', 1, height), - ('=', 2, is_concluded)], - limit=1), - only_data=False + connection.space("elections").upsert( + (election_id, height, is_concluded), op_list=[("=", 1, height), ("=", 2, is_concluded)], limit=1 + ), + only_data=False, ) @@ -468,33 +401,27 @@ def store_election(connection, election_id: str, height: int, is_concluded: bool def store_elections(connection, elections: list): for election in elections: _election = connection.run( # noqa: F841 - connection.space("elections").insert((election["election_id"], - election["height"], - election["is_concluded"])), - only_data=False + connection.space("elections").insert( + (election["election_id"], election["height"], election["is_concluded"]) + ), + only_data=False, ) @register_query(TarantoolDBConnection) def delete_elections(connection, height: int): - _elections = connection.run( - connection.space("elections").select(height, index="height_search") - ) + _elections = connection.run(connection.space("elections").select(height, index="height_search")) for _elec in _elections: - connection.run( - connection.space("elections").delete(_elec[0]), - only_data=False - ) + connection.run(connection.space("elections").delete(_elec[0]), only_data=False) @register_query(TarantoolDBConnection) def get_validator_set(connection, height: int = None): - _validators = connection.run( - connection.space("validators").select() - ) + _validators = connection.run(connection.space("validators").select()) if height is not None and _validators is not None: - _validators = [{"height": validator[1], "validators": validator[2]} for validator in _validators if - validator[1] <= height] + _validators = [ + {"height": validator[1], "validators": validator[2]} for validator in _validators if validator[1] <= height + ] return next(iter(sorted(_validators, key=lambda k: k["height"], reverse=True)), None) elif _validators is not None: _validators = [{"height": validator[1], "validators": validator[2]} for validator in _validators] @@ -504,9 +431,7 @@ def get_validator_set(connection, height: int = None): @register_query(TarantoolDBConnection) def get_election(connection, election_id: str): - _elections = connection.run( - connection.space("elections").select(election_id, index="id_search") - ) + _elections = connection.run(connection.space("elections").select(election_id, index="id_search")) if _elections is None or len(_elections) == 0: return None _election = sorted(_elections, key=itemgetter(0), reverse=True)[0] @@ -514,13 +439,12 @@ def get_election(connection, election_id: str): @register_query(TarantoolDBConnection) -def get_asset_tokens_for_public_key(connection, asset_id: str, - public_key: str): # FIXME Something can be wrong with this function ! (public_key) is not used # noqa: E501 +def get_asset_tokens_for_public_key( + connection, asset_id: str, public_key: str +): # FIXME Something can be wrong with this function ! (public_key) is not used # noqa: E501 # space = connection.space("keys") # _keys = space.select([public_key], index="keys_search") - _transactions = connection.run( - connection.space("assets").select([asset_id], index="assetid_search") - ) + _transactions = connection.run(connection.space("assets").select([asset_id], index="assetid_search")) # _transactions = _transactions # _keys = _keys.data _grouped_transactions = _group_transaction_by_ids(connection=connection, txids=[_tx[1] for _tx in _transactions]) @@ -531,30 +455,23 @@ def get_asset_tokens_for_public_key(connection, asset_id: str, def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = True): hash_id_primarykey = sha256(dumps(obj={"height": height}).encode()).hexdigest() connection.run( - connection.space("abci_chains").upsert((height, is_synced, chain_id, hash_id_primarykey), - op_list=[ - ('=', 0, height), - ('=', 1, is_synced), - ('=', 2, chain_id) - ]), - only_data=False + connection.space("abci_chains").upsert( + (height, is_synced, chain_id, hash_id_primarykey), + op_list=[("=", 0, height), ("=", 1, is_synced), ("=", 2, chain_id)], + ), + only_data=False, ) @register_query(TarantoolDBConnection) def delete_abci_chain(connection, height: int): hash_id_primarykey = sha256(dumps(obj={"height": height}).encode()).hexdigest() - connection.run( - connection.space("abci_chains").delete(hash_id_primarykey), - only_data=False - ) + connection.run(connection.space("abci_chains").delete(hash_id_primarykey), only_data=False) @register_query(TarantoolDBConnection) def get_latest_abci_chain(connection): - _all_chains = connection.run( - connection.space("abci_chains").select() - ) + _all_chains = connection.run(connection.space("abci_chains").select()) if _all_chains is None or len(_all_chains) == 0: return None _chain = sorted(_all_chains, key=itemgetter(0), reverse=True)[0] diff --git a/planetmint/backend/tarantool/schema.py b/planetmint/backend/tarantool/schema.py index 80cc833..401a96b 100644 --- a/planetmint/backend/tarantool/schema.py +++ b/planetmint/backend/tarantool/schema.py @@ -9,9 +9,21 @@ from planetmint.backend.tarantool.connection import TarantoolDBConnection logger = logging.getLogger(__name__) register_schema = module_dispatch_registrar(backend.schema) -SPACE_NAMES = ("abci_chains", "assets", "blocks", "blocks_tx", - "elections", "meta_data", "pre_commits", "validators", - "transactions", "inputs", "outputs", "keys", "utxos") +SPACE_NAMES = ( + "abci_chains", + "assets", + "blocks", + "blocks_tx", + "elections", + "meta_data", + "pre_commits", + "validators", + "transactions", + "inputs", + "outputs", + "keys", + "utxos", +) SPACE_COMMANDS = { "abci_chains": "abci_chains = box.schema.space.create('abci_chains', {engine='memtx', is_sync = false})", @@ -26,110 +38,86 @@ SPACE_COMMANDS = { "inputs": "inputs = box.schema.space.create('inputs')", "outputs": "outputs = box.schema.space.create('outputs')", "keys": "keys = box.schema.space.create('keys')", - "utxos": "utxos = box.schema.space.create('utxos', {engine = 'memtx' , is_sync = false})" + "utxos": "utxos = box.schema.space.create('utxos', {engine = 'memtx' , is_sync = false})", } INDEX_COMMANDS = { - "abci_chains": - { - "id_search": "abci_chains:create_index('id_search' ,{type='hash', parts={'id'}})", - "height_search": "abci_chains:create_index('height_search' ,{type='tree', unique=false, parts={'height'}})" - }, - "assets": - { - "txid_search": "assets:create_index('txid_search', {type='hash', parts={'tx_id'}})", - "assetid_search": "assets:create_index('assetid_search', {type='tree',unique=false, parts={'asset_id', 'tx_id'}})", # noqa: E501 - "only_asset_search": "assets:create_index('only_asset_search', {type='tree', unique=false, parts={'asset_id'}})", # noqa: E501 - "text_search": "assets:create_index('secondary', {unique=false,parts={1,'string'}})" - }, - "blocks": - { - "id_search": "blocks:create_index('id_search' , {type='hash' , parts={'block_id'}})", - "block_search": "blocks:create_index('block_search' , {type='tree', unique = false, parts={'height'}})", - "block_id_search": "blocks:create_index('block_id_search', {type = 'hash', parts ={'block_id'}})" - }, - "blocks_tx": - { - "id_search": "blocks_tx:create_index('id_search',{ type = 'hash', parts={'transaction_id'}})", - "block_search": "blocks_tx:create_index('block_search', {type = 'tree',unique=false, parts={'block_id'}})" - }, - "elections": - { - "id_search": "elections:create_index('id_search' , {type='hash', parts={'election_id'}})", - "height_search": "elections:create_index('height_search' , {type='tree',unique=false, parts={'height'}})", - "update_search": "elections:create_index('update_search', {type='tree', unique=false, parts={'election_id', 'height'}})" # noqa: E501 - }, - "meta_data": - { - "id_search": "meta_datas:create_index('id_search', { type='hash' , parts={'transaction_id'}})", - "text_search": "meta_datas:create_index('secondary', {unique=false,parts={2,'string'}})" - }, - "pre_commits": - { - "id_search": "pre_commits:create_index('id_search', {type ='hash' , parts={'commit_id'}})", - "height_search": "pre_commits:create_index('height_search', {type ='tree',unique=true, parts={'height'}})" - }, - "validators": - { - "id_search": "validators:create_index('id_search' , {type='hash' , parts={'validator_id'}})", - "height_search": "validators:create_index('height_search' , {type='tree', unique=true, parts={'height'}})" - }, - "transactions": - { - "id_search": "transactions:create_index('id_search' , {type = 'hash' , parts={'transaction_id'}})", - "transaction_search": "transactions:create_index('transaction_search' , {type = 'tree',unique=false, parts={'operation', 'transaction_id'}})" # noqa: E501 - }, - "inputs": - { - "delete_search": "inputs:create_index('delete_search' , {type = 'hash', parts={'input_id'}})", - "spent_search": "inputs:create_index('spent_search' , {type = 'tree', unique=false, parts={'fulfills_transaction_id', 'fulfills_output_index'}})", # noqa: E501 - "id_search": "inputs:create_index('id_search', {type = 'tree', unique=false, parts = {'transaction_id'}})" - }, - "outputs": - { - "unique_search": "outputs:create_index('unique_search' ,{type='hash', parts={'output_id'}})", - "id_search": "outputs:create_index('id_search' ,{type='tree', unique=false, parts={'transaction_id'}})" - }, - "keys": - { - "id_search": "keys:create_index('id_search', {type = 'hash', parts={'id'}})", - "keys_search": "keys:create_index('keys_search', {type = 'tree', unique=false, parts={'public_key'}})", - "txid_search": "keys:create_index('txid_search', {type = 'tree', unique=false, parts={'transaction_id'}})", - "output_search": "keys:create_index('output_search', {type = 'tree', unique=false, parts={'output_id'}})" - }, - "utxos": - { - "id_search": "utxos:create_index('id_search', {type='hash' , parts={'transaction_id', 'output_index'}})", - "transaction_search": "utxos:create_index('transaction_search', {type='tree', unique=false, parts={'transaction_id'}})", # noqa: E501 - "index_Search": "utxos:create_index('index_search', {type='tree', unique=false, parts={'output_index'}})" - } + "abci_chains": { + "id_search": "abci_chains:create_index('id_search' ,{type='hash', parts={'id'}})", + "height_search": "abci_chains:create_index('height_search' ,{type='tree', unique=false, parts={'height'}})", + }, + "assets": { + "txid_search": "assets:create_index('txid_search', {type='hash', parts={'tx_id'}})", + "assetid_search": "assets:create_index('assetid_search', {type='tree',unique=false, parts={'asset_id', 'tx_id'}})", # noqa: E501 + "only_asset_search": "assets:create_index('only_asset_search', {type='tree', unique=false, parts={'asset_id'}})", # noqa: E501 + "text_search": "assets:create_index('secondary', {unique=false,parts={1,'string'}})", + }, + "blocks": { + "id_search": "blocks:create_index('id_search' , {type='hash' , parts={'block_id'}})", + "block_search": "blocks:create_index('block_search' , {type='tree', unique = false, parts={'height'}})", + "block_id_search": "blocks:create_index('block_id_search', {type = 'hash', parts ={'block_id'}})", + }, + "blocks_tx": { + "id_search": "blocks_tx:create_index('id_search',{ type = 'hash', parts={'transaction_id'}})", + "block_search": "blocks_tx:create_index('block_search', {type = 'tree',unique=false, parts={'block_id'}})", + }, + "elections": { + "id_search": "elections:create_index('id_search' , {type='hash', parts={'election_id'}})", + "height_search": "elections:create_index('height_search' , {type='tree',unique=false, parts={'height'}})", + "update_search": "elections:create_index('update_search', {type='tree', unique=false, parts={'election_id', 'height'}})", # noqa: E501 + }, + "meta_data": { + "id_search": "meta_datas:create_index('id_search', { type='hash' , parts={'transaction_id'}})", + "text_search": "meta_datas:create_index('secondary', {unique=false,parts={2,'string'}})", + }, + "pre_commits": { + "id_search": "pre_commits:create_index('id_search', {type ='hash' , parts={'commit_id'}})", + "height_search": "pre_commits:create_index('height_search', {type ='tree',unique=true, parts={'height'}})", + }, + "validators": { + "id_search": "validators:create_index('id_search' , {type='hash' , parts={'validator_id'}})", + "height_search": "validators:create_index('height_search' , {type='tree', unique=true, parts={'height'}})", + }, + "transactions": { + "id_search": "transactions:create_index('id_search' , {type = 'hash' , parts={'transaction_id'}})", + "transaction_search": "transactions:create_index('transaction_search' , {type = 'tree',unique=false, parts={'operation', 'transaction_id'}})", # noqa: E501 + }, + "inputs": { + "delete_search": "inputs:create_index('delete_search' , {type = 'hash', parts={'input_id'}})", + "spent_search": "inputs:create_index('spent_search' , {type = 'tree', unique=false, parts={'fulfills_transaction_id', 'fulfills_output_index'}})", # noqa: E501 + "id_search": "inputs:create_index('id_search', {type = 'tree', unique=false, parts = {'transaction_id'}})", + }, + "outputs": { + "unique_search": "outputs:create_index('unique_search' ,{type='hash', parts={'output_id'}})", + "id_search": "outputs:create_index('id_search' ,{type='tree', unique=false, parts={'transaction_id'}})", + }, + "keys": { + "id_search": "keys:create_index('id_search', {type = 'hash', parts={'id'}})", + "keys_search": "keys:create_index('keys_search', {type = 'tree', unique=false, parts={'public_key'}})", + "txid_search": "keys:create_index('txid_search', {type = 'tree', unique=false, parts={'transaction_id'}})", + "output_search": "keys:create_index('output_search', {type = 'tree', unique=false, parts={'output_id'}})", + }, + "utxos": { + "id_search": "utxos:create_index('id_search', {type='hash' , parts={'transaction_id', 'output_index'}})", + "transaction_search": "utxos:create_index('transaction_search', {type='tree', unique=false, parts={'transaction_id'}})", # noqa: E501 + "index_Search": "utxos:create_index('index_search', {type='tree', unique=false, parts={'output_index'}})", + }, } SCHEMA_COMMANDS = { - "abci_chains": - "abci_chains:format({{name='height' , type='integer'},{name='is_synched' , type='boolean'},{name='chain_id',type='string'}, {name='id', type='string'}})", # noqa: E501 - "assets": - "assets:format({{name='data' , type='string'}, {name='tx_id', type='string'}, {name='asset_id', type='string'}})", # noqa: E501 - "blocks": - "blocks:format{{name='app_hash',type='string'},{name='height' , type='integer'},{name='block_id' , type='string'}}", # noqa: E501 + "abci_chains": "abci_chains:format({{name='height' , type='integer'},{name='is_synched' , type='boolean'},{name='chain_id',type='string'}, {name='id', type='string'}})", # noqa: E501 + "assets": "assets:format({{name='data' , type='string'}, {name='tx_id', type='string'}, {name='asset_id', type='string'}})", # noqa: E501 + "blocks": "blocks:format{{name='app_hash',type='string'},{name='height' , type='integer'},{name='block_id' , type='string'}}", # noqa: E501 "blocks_tx": "blocks_tx:format{{name='transaction_id', type = 'string'}, {name = 'block_id', type = 'string'}}", - "elections": - "elections:format({{name='election_id' , type='string'},{name='height' , type='integer'}, {name='is_concluded' , type='boolean'}})", # noqa: E501 + "elections": "elections:format({{name='election_id' , type='string'},{name='height' , type='integer'}, {name='is_concluded' , type='boolean'}})", # noqa: E501 "meta_data": "meta_datas:format({{name='transaction_id' , type='string'}, {name='meta_data' , type='string'}})", # noqa: E501 - "pre_commits": - "pre_commits:format({{name='commit_id', type='string'}, {name='height',type='integer'}, {name='transactions',type=any}})", # noqa: E501 - "validators": - "validators:format({{name='validator_id' , type='string'},{name='height',type='integer'},{name='validators' , type='any'}})", # noqa: E501 - "transactions": - "transactions:format({{name='transaction_id' , type='string'}, {name='operation' , type='string'}, {name='version' ,type='string'}, {name='dict_map', type='any'}})", # noqa: E501 - "inputs": - "inputs:format({{name='transaction_id' , type='string'}, {name='fulfillment' , type='any'}, {name='owners_before' , type='array'}, {name='fulfills_transaction_id', type = 'string'}, {name='fulfills_output_index', type = 'string'}, {name='input_id', type='string'}, {name='input_index', type='number'}})", # noqa: E501 - "outputs": - "outputs:format({{name='transaction_id' , type='string'}, {name='amount' , type='string'}, {name='uri', type='string'}, {name='details_type', type='string'}, {name='details_public_key', type='any'}, {name = 'output_id', type = 'string'}, {name='treshold', type='any'}, {name='subconditions', type='any'}, {name='output_index', type='number'}})", # noqa: E501 - "keys": - "keys:format({{name = 'id', type='string'}, {name = 'transaction_id', type = 'string'} ,{name = 'output_id', type = 'string'}, {name = 'public_key', type = 'string'}, {name = 'key_index', type = 'integer'}})", # noqa: E501 - "utxos": - "utxos:format({{name='transaction_id' , type='string'}, {name='output_index' , type='integer'}, {name='utxo_dict', type='string'}})" # noqa: E501 + "pre_commits": "pre_commits:format({{name='commit_id', type='string'}, {name='height',type='integer'}, {name='transactions',type=any}})", # noqa: E501 + "validators": "validators:format({{name='validator_id' , type='string'},{name='height',type='integer'},{name='validators' , type='any'}})", # noqa: E501 + "transactions": "transactions:format({{name='transaction_id' , type='string'}, {name='operation' , type='string'}, {name='version' ,type='string'}, {name='dict_map', type='any'}})", # noqa: E501 + "inputs": "inputs:format({{name='transaction_id' , type='string'}, {name='fulfillment' , type='any'}, {name='owners_before' , type='array'}, {name='fulfills_transaction_id', type = 'string'}, {name='fulfills_output_index', type = 'string'}, {name='input_id', type='string'}, {name='input_index', type='number'}})", # noqa: E501 + "outputs": "outputs:format({{name='transaction_id' , type='string'}, {name='amount' , type='string'}, {name='uri', type='string'}, {name='details_type', type='string'}, {name='details_public_key', type='any'}, {name = 'output_id', type = 'string'}, {name='treshold', type='any'}, {name='subconditions', type='any'}, {name='output_index', type='number'}})", # noqa: E501 + "keys": "keys:format({{name = 'id', type='string'}, {name = 'transaction_id', type = 'string'} ,{name = 'output_id', type = 'string'}, {name = 'public_key', type = 'string'}, {name = 'key_index', type = 'integer'}})", # noqa: E501 + "utxos": "utxos:format({{name='transaction_id' , type='string'}, {name='output_index' , type='integer'}, {name='utxo_dict', type='string'}})", # noqa: E501 } SCHEMA_DROP_COMMANDS = { @@ -145,7 +133,7 @@ SCHEMA_DROP_COMMANDS = { "inputs": "box.space.inputs:drop()", "outputs": "box.space.outputs:drop()", "keys": "box.space.keys:drop()", - "utxos": "box.space.utxos:drop()" + "utxos": "box.space.utxos:drop()", } @@ -159,24 +147,24 @@ def drop_database(connection, not_used=None): except Exception: print(f"Unexpected error while trying to drop space '{_space}'") + @register_schema(TarantoolDBConnection) def create_database(connection, dbname): - ''' + """ For tarantool implementation, this function runs create_tables, to initiate spaces, schema and indexes. - ''' - logger.info('Create database `%s`.', dbname) + """ + logger.info("Create database `%s`.", dbname) create_tables(connection, dbname) def run_command_with_output(command): from subprocess import run + host_port = "%s:%s" % (Config().get()["database"]["host"], Config().get()["database"]["port"]) - output = run(["tarantoolctl", "connect", host_port], - input=command, - capture_output=True).stderr + output = run(["tarantoolctl", "connect", host_port], input=command, capture_output=True).stderr output = output.decode() return output diff --git a/planetmint/backend/tarantool/transaction/tools.py b/planetmint/backend/tarantool/transaction/tools.py index 998a742..4ba2533 100644 --- a/planetmint/backend/tarantool/transaction/tools.py +++ b/planetmint/backend/tarantool/transaction/tools.py @@ -41,13 +41,16 @@ class TransactionDecompose: "outputs": [], "keys": [], "metadata": None, - "asset": None + "asset": None, } def get_map(self, dictionary: dict = None): - return _save_keys_order(dictionary=dictionary) if dictionary is not None else _save_keys_order( - dictionary=self._transaction) + return ( + _save_keys_order(dictionary=dictionary) + if dictionary is not None + else _save_keys_order(dictionary=self._transaction) + ) def __create_hash(self, n: int): return token_hex(n) @@ -71,13 +74,17 @@ class TransactionDecompose: input_index = 0 for _input in self._transaction["inputs"]: - _inputs.append((self._transaction["id"], - _input["fulfillment"], - _input["owners_before"], - _input["fulfills"]["transaction_id"] if _input["fulfills"] is not None else "", - str(_input["fulfills"]["output_index"]) if _input["fulfills"] is not None else "", - self.__create_hash(7), - input_index)) + _inputs.append( + ( + self._transaction["id"], + _input["fulfillment"], + _input["owners_before"], + _input["fulfills"]["transaction_id"] if _input["fulfills"] is not None else "", + str(_input["fulfills"]["output_index"]) if _input["fulfills"] is not None else "", + self.__create_hash(7), + input_index, + ) + ) input_index = input_index + 1 return _inputs @@ -88,27 +95,29 @@ class TransactionDecompose: for _output in self._transaction["outputs"]: output_id = self.__create_hash(7) if _output["condition"]["details"].get("subconditions") is None: - tmp_output = (self._transaction["id"], - _output["amount"], - _output["condition"]["uri"], - _output["condition"]["details"]["type"], - _output["condition"]["details"]["public_key"], - output_id, - None, - None, - output_index - ) + tmp_output = ( + self._transaction["id"], + _output["amount"], + _output["condition"]["uri"], + _output["condition"]["details"]["type"], + _output["condition"]["details"]["public_key"], + output_id, + None, + None, + output_index, + ) else: - tmp_output = (self._transaction["id"], - _output["amount"], - _output["condition"]["uri"], - _output["condition"]["details"]["type"], - None, - output_id, - _output["condition"]["details"]["threshold"], - _output["condition"]["details"]["subconditions"], - output_index - ) + tmp_output = ( + self._transaction["id"], + _output["amount"], + _output["condition"]["uri"], + _output["condition"]["details"]["type"], + None, + output_id, + _output["condition"]["details"]["threshold"], + _output["condition"]["details"]["subconditions"], + output_index, + ) _outputs.append(tmp_output) output_index = output_index + 1 @@ -121,10 +130,7 @@ class TransactionDecompose: def __prepare_transaction(self): _map = self.get_map() - return (self._transaction["id"], - self._transaction["operation"], - self._transaction["version"], - _map) + return (self._transaction["id"], self._transaction["operation"], self._transaction["version"], _map) def convert_to_tuple(self): self._metadata_check() @@ -138,7 +144,6 @@ class TransactionDecompose: class TransactionCompose: - def __init__(self, db_results): self.db_results = db_results self._map = self.db_results["transaction"][3] diff --git a/planetmint/backend/tarantool/utils.py b/planetmint/backend/tarantool/utils.py index 88b9b99..d5f8fc4 100644 --- a/planetmint/backend/tarantool/utils.py +++ b/planetmint/backend/tarantool/utils.py @@ -1,11 +1,13 @@ import subprocess + def run_cmd(commands: list, config: dict): ret = subprocess.Popen( - ['%s %s:%s < %s' % ("tarantoolctl connect", "localhost", "3303", "planetmint/backend/tarantool/init.lua")], + ["%s %s:%s < %s" % ("tarantoolctl connect", "localhost", "3303", "planetmint/backend/tarantool/init.lua")], stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=True, bufsize=0, - shell=True) + shell=True, + ) return True if ret >= 0 else False diff --git a/planetmint/backend/utils.py b/planetmint/backend/utils.py index c8d12c4..4b76642 100644 --- a/planetmint/backend/utils.py +++ b/planetmint/backend/utils.py @@ -19,10 +19,12 @@ def module_dispatch_registrar(module): return dispatch_registrar.register(obj_type)(func) except AttributeError as ex: raise ModuleDispatchRegistrationError( - ('`{module}` does not contain a single-dispatchable ' - 'function named `{func}`. The module being registered ' - 'was not implemented correctly!').format( - func=func_name, module=module.__name__)) from ex + ( + "`{module}` does not contain a single-dispatchable " + "function named `{func}`. The module being registered " + "was not implemented correctly!" + ).format(func=func_name, module=module.__name__) + ) from ex return wrapper diff --git a/planetmint/commands/election_types.py b/planetmint/commands/election_types.py index cfa58b9..b6f4282 100644 --- a/planetmint/commands/election_types.py +++ b/planetmint/commands/election_types.py @@ -1,31 +1,28 @@ elections = { - 'upsert-validator': { - 'help': 'Propose a change to the validator set', - 'args': { - 'public_key': { - 'help': 'Public key of the validator to be added/updated/removed.' + "upsert-validator": { + "help": "Propose a change to the validator set", + "args": { + "public_key": {"help": "Public key of the validator to be added/updated/removed."}, + "power": { + "type": int, + "help": "The proposed power for the validator. Setting to 0 will remove the validator.", }, - 'power': { - 'type': int, - 'help': 'The proposed power for the validator. Setting to 0 will remove the validator.'}, - 'node_id': { - 'help': 'The node_id of the validator.' + "node_id": {"help": "The node_id of the validator."}, + "--private-key": { + "dest": "sk", + "required": True, + "help": "Path to the private key of the election initiator.", }, - '--private-key': { - 'dest': 'sk', - 'required': True, - 'help': 'Path to the private key of the election initiator.' - } - } + }, }, - 'chain-migration': { - 'help': 'Call for a halt to block production to allow for a version change across breaking changes.', - 'args': { - '--private-key': { - 'dest': 'sk', - 'required': True, - 'help': 'Path to the private key of the election initiator.' + "chain-migration": { + "help": "Call for a halt to block production to allow for a version change across breaking changes.", + "args": { + "--private-key": { + "dest": "sk", + "required": True, + "help": "Path to the private key of the election initiator.", } - } - } + }, + }, } diff --git a/planetmint/commands/planetmint.py b/planetmint/commands/planetmint.py index f3da72d..f2928e8 100644 --- a/planetmint/commands/planetmint.py +++ b/planetmint/commands/planetmint.py @@ -18,18 +18,15 @@ from planetmint.backend.tarantool.connection import TarantoolDBConnection from planetmint.core import rollback from planetmint.utils import load_node_key from planetmint.transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT -from planetmint.transactions.common.exceptions import ( - DatabaseDoesNotExist, ValidationError) +from planetmint.transactions.common.exceptions import DatabaseDoesNotExist, ValidationError from planetmint.transactions.types.elections.vote import Vote from planetmint.transactions.types.elections.chain_migration_election import ChainMigrationElection import planetmint -from planetmint import (backend, ValidatorElection, - Planetmint) +from planetmint import backend, ValidatorElection, Planetmint from planetmint.backend import schema from planetmint.backend import tarantool from planetmint.commands import utils -from planetmint.commands.utils import (configure_planetmint, - input_on_stderr) +from planetmint.commands.utils import configure_planetmint, input_on_stderr from planetmint.log import setup_logging from planetmint.tendermint_utils import public_key_from_base64 from planetmint.commands.election_types import elections @@ -53,7 +50,7 @@ def run_show_config(args): # the system needs to be configured, then display information on how to # configure the system. _config = Config().get() - del _config['CONFIGURED'] + del _config["CONFIGURED"] print(json.dumps(_config, indent=4, sort_keys=True)) @@ -64,47 +61,47 @@ def run_configure(args): config_file_exists = False # if the config path is `-` then it's stdout - if config_path != '-': + if config_path != "-": config_file_exists = os.path.exists(config_path) if config_file_exists and not args.yes: - want = input_on_stderr('Config file `{}` exists, do you want to ' - 'override it? (cannot be undone) [y/N]: '.format(config_path)) - if want != 'y': + want = input_on_stderr( + "Config file `{}` exists, do you want to " "override it? (cannot be undone) [y/N]: ".format(config_path) + ) + if want != "y": return Config().init_config(args.backend) conf = Config().get() # select the correct config defaults based on the backend - print('Generating default configuration for backend {}' - .format(args.backend), file=sys.stderr) + print("Generating default configuration for backend {}".format(args.backend), file=sys.stderr) database_keys = Config().get_db_key_map(args.backend) if not args.yes: - for key in ('bind',): - val = conf['server'][key] - conf['server'][key] = input_on_stderr('API Server {}? (default `{}`): '.format(key, val), val) + for key in ("bind",): + val = conf["server"][key] + conf["server"][key] = input_on_stderr("API Server {}? (default `{}`): ".format(key, val), val) - for key in ('scheme', 'host', 'port'): - val = conf['wsserver'][key] - conf['wsserver'][key] = input_on_stderr('WebSocket Server {}? (default `{}`): '.format(key, val), val) + for key in ("scheme", "host", "port"): + val = conf["wsserver"][key] + conf["wsserver"][key] = input_on_stderr("WebSocket Server {}? (default `{}`): ".format(key, val), val) for key in database_keys: - val = conf['database'][key] - conf['database'][key] = input_on_stderr('Database {}? (default `{}`): '.format(key, val), val) + val = conf["database"][key] + conf["database"][key] = input_on_stderr("Database {}? (default `{}`): ".format(key, val), val) - for key in ('host', 'port'): - val = conf['tendermint'][key] - conf['tendermint'][key] = input_on_stderr('Tendermint {}? (default `{}`)'.format(key, val), val) + for key in ("host", "port"): + val = conf["tendermint"][key] + conf["tendermint"][key] = input_on_stderr("Tendermint {}? (default `{}`)".format(key, val), val) - if config_path != '-': + if config_path != "-": planetmint.config_utils.write_config(conf, config_path) else: print(json.dumps(conf, indent=4, sort_keys=True)) Config().set(conf) - print('Configuration written to {}'.format(config_path), file=sys.stderr) - print('Ready to go!', file=sys.stderr) + print("Configuration written to {}".format(config_path), file=sys.stderr) + print("Ready to go!", file=sys.stderr) @configure_planetmint @@ -114,21 +111,19 @@ def run_election(args): b = Planetmint() # Call the function specified by args.action, as defined above - globals()[f'run_election_{args.action}'](args, b) + globals()[f"run_election_{args.action}"](args, b) def run_election_new(args, planet): - election_type = args.election_type.replace('-', '_') - globals()[f'run_election_new_{election_type}'](args, planet) + election_type = args.election_type.replace("-", "_") + globals()[f"run_election_new_{election_type}"](args, planet) def create_new_election(sk, planet, election_class, data): try: key = load_node_key(sk) voters = election_class.recipients(planet) - election = election_class.generate([key.public_key], - voters, - data, None).sign([key.private_key]) + election = election_class.generate([key.public_key], voters, data, None).sign([key.private_key]) election.validate(planet) except ValidationError as e: logger.error(e) @@ -138,11 +133,11 @@ def create_new_election(sk, planet, election_class, data): return False resp = planet.write_transaction(election, BROADCAST_TX_COMMIT) - if resp == (202, ''): - logger.info('[SUCCESS] Submitted proposal with id: {}'.format(election.id)) + if resp == (202, ""): + logger.info("[SUCCESS] Submitted proposal with id: {}".format(election.id)) return election.id else: - logger.error('Failed to commit election proposal') + logger.error("Failed to commit election proposal") return False @@ -161,10 +156,9 @@ def run_election_new_upsert_validator(args, planet): """ new_validator = { - 'public_key': {'value': public_key_from_base64(args.public_key), - 'type': 'ed25519-base16'}, - 'power': args.power, - 'node_id': args.node_id + "public_key": {"value": public_key_from_base64(args.public_key), "type": "ed25519-base16"}, + "power": args.power, + "node_id": args.node_id, } return create_new_election(args.sk, planet, ValidatorElection, new_validator) @@ -202,23 +196,21 @@ def run_election_approve(args, planet): if len(voting_powers) > 0: voting_power = voting_powers[0] else: - logger.error('The key you provided does not match any of the eligible voters in this election.') + logger.error("The key you provided does not match any of the eligible voters in this election.") return False inputs = [i for i in tx.to_inputs() if key.public_key in i.owners_before] election_pub_key = ValidatorElection.to_public_key(tx.id) - approval = Vote.generate(inputs, - [([election_pub_key], voting_power)], - tx.id).sign([key.private_key]) + approval = Vote.generate(inputs, [([election_pub_key], voting_power)], tx.id).sign([key.private_key]) approval.validate(planet) resp = planet.write_transaction(approval, BROADCAST_TX_COMMIT) - if resp == (202, ''): - logger.info('[SUCCESS] Your vote has been submitted') + if resp == (202, ""): + logger.info("[SUCCESS] Your vote has been submitted") return approval.id else: - logger.error('Failed to commit vote') + logger.error("Failed to commit vote") return False @@ -234,7 +226,7 @@ def run_election_show(args, planet): election = planet.get_transaction(args.election_id) if not election: - logger.error(f'No election found with election_id {args.election_id}') + logger.error(f"No election found with election_id {args.election_id}") return response = election.show_election(planet) @@ -260,11 +252,12 @@ def run_drop(args): """Drop the database""" if not args.yes: - response = input_on_stderr('Do you want to drop `{}` database? [y/n]: ') - if response != 'y': + response = input_on_stderr("Do you want to drop `{}` database? [y/n]: ") + if response != "y": return from planetmint.backend.connection import connect + conn = connect() try: schema.drop_database(conn) @@ -284,115 +277,103 @@ def run_start(args): setup_logging() if not args.skip_initialize_database: - logger.info('Initializing database') + logger.info("Initializing database") _run_init() - logger.info('Planetmint Version %s', planetmint.version.__version__) + logger.info("Planetmint Version %s", planetmint.version.__version__) run_recover(planetmint.lib.Planetmint()) - logger.info('Starting Planetmint main process.') + logger.info("Starting Planetmint main process.") from planetmint.start import start + start(args) def run_tendermint_version(args): """Show the supported Tendermint version(s)""" supported_tm_ver = { - 'description': 'Planetmint supports the following Tendermint version(s)', - 'tendermint': __tm_supported_versions__, + "description": "Planetmint supports the following Tendermint version(s)", + "tendermint": __tm_supported_versions__, } print(json.dumps(supported_tm_ver, indent=4, sort_keys=True)) def create_parser(): - parser = argparse.ArgumentParser( - description='Control your Planetmint node.', - parents=[utils.base_parser]) + parser = argparse.ArgumentParser(description="Control your Planetmint node.", parents=[utils.base_parser]) # all the commands are contained in the subparsers object, # the command selected by the user will be stored in `args.command` # that is used by the `main` function to select which other # function to call. - subparsers = parser.add_subparsers(title='Commands', - dest='command') + subparsers = parser.add_subparsers(title="Commands", dest="command") # parser for writing a config file - config_parser = subparsers.add_parser('configure', - help='Prepare the config file.') + config_parser = subparsers.add_parser("configure", help="Prepare the config file.") - config_parser.add_argument('backend', - choices=['tarantool_db', 'localmongodb'], - default='tarantool_db', - const='tarantool_db', - nargs='?', - help='The backend to use. It can only be ' - '"tarantool_db", currently.') + config_parser.add_argument( + "backend", + choices=["tarantool_db", "localmongodb"], + default="tarantool_db", + const="tarantool_db", + nargs="?", + help="The backend to use. It can only be " '"tarantool_db", currently.', + ) # parser for managing elections - election_parser = subparsers.add_parser('election', - help='Manage elections.') + election_parser = subparsers.add_parser("election", help="Manage elections.") - election_subparser = election_parser.add_subparsers(title='Action', - dest='action') + election_subparser = election_parser.add_subparsers(title="Action", dest="action") - new_election_parser = election_subparser.add_parser('new', - help='Calls a new election.') + new_election_parser = election_subparser.add_parser("new", help="Calls a new election.") - new_election_subparser = new_election_parser.add_subparsers(title='Election_Type', - dest='election_type') + new_election_subparser = new_election_parser.add_subparsers(title="Election_Type", dest="election_type") # Parser factory for each type of new election, so we get a bunch of commands that look like this: # election new ... for name, data in elections.items(): - args = data['args'] - generic_parser = new_election_subparser.add_parser(name, help=data['help']) + args = data["args"] + generic_parser = new_election_subparser.add_parser(name, help=data["help"]) for arg, kwargs in args.items(): generic_parser.add_argument(arg, **kwargs) - approve_election_parser = election_subparser.add_parser('approve', - help='Approve the election.') - approve_election_parser.add_argument('election_id', - help='The election_id of the election.') - approve_election_parser.add_argument('--private-key', - dest='sk', - required=True, - help='Path to the private key of the election initiator.') + approve_election_parser = election_subparser.add_parser("approve", help="Approve the election.") + approve_election_parser.add_argument("election_id", help="The election_id of the election.") + approve_election_parser.add_argument( + "--private-key", dest="sk", required=True, help="Path to the private key of the election initiator." + ) - show_election_parser = election_subparser.add_parser('show', - help='Provides information about an election.') + show_election_parser = election_subparser.add_parser("show", help="Provides information about an election.") - show_election_parser.add_argument('election_id', - help='The transaction id of the election you wish to query.') + show_election_parser.add_argument("election_id", help="The transaction id of the election you wish to query.") # parsers for showing/exporting config values - subparsers.add_parser('show-config', - help='Show the current configuration') + subparsers.add_parser("show-config", help="Show the current configuration") # parser for database-level commands - subparsers.add_parser('init', - help='Init the database') + subparsers.add_parser("init", help="Init the database") - subparsers.add_parser('drop', - help='Drop the database') + subparsers.add_parser("drop", help="Drop the database") # parser for starting Planetmint - start_parser = subparsers.add_parser('start', - help='Start Planetmint') + start_parser = subparsers.add_parser("start", help="Start Planetmint") - start_parser.add_argument('--no-init', - dest='skip_initialize_database', - default=False, - action='store_true', - help='Skip database initialization') + start_parser.add_argument( + "--no-init", + dest="skip_initialize_database", + default=False, + action="store_true", + help="Skip database initialization", + ) - subparsers.add_parser('tendermint-version', - help='Show the Tendermint supported versions') + subparsers.add_parser("tendermint-version", help="Show the Tendermint supported versions") - start_parser.add_argument('--experimental-parallel-validation', - dest='experimental_parallel_validation', - default=False, - action='store_true', - help='💀 EXPERIMENTAL: parallelize validation for better throughput 💀') + start_parser.add_argument( + "--experimental-parallel-validation", + dest="experimental_parallel_validation", + default=False, + action="store_true", + help="💀 EXPERIMENTAL: parallelize validation for better throughput 💀", + ) return parser diff --git a/planetmint/commands/utils.py b/planetmint/commands/utils.py index 5c8a105..0295120 100644 --- a/planetmint/commands/utils.py +++ b/planetmint/commands/utils.py @@ -30,22 +30,22 @@ def configure_planetmint(command): The command wrapper function. """ + @functools.wraps(command) def configure(args): config_from_cmdline = None try: if args.log_level is not None: config_from_cmdline = { - 'log': { - 'level_console': args.log_level, - 'level_logfile': args.log_level, + "log": { + "level_console": args.log_level, + "level_logfile": args.log_level, }, - 'server': {'loglevel': args.log_level}, + "server": {"loglevel": args.log_level}, } except AttributeError: pass - planetmint.config_utils.autoconfigure( - filename=args.config, config=config_from_cmdline, force=True) + planetmint.config_utils.autoconfigure(filename=args.config, config=config_from_cmdline, force=True) command(args) return configure @@ -53,13 +53,13 @@ def configure_planetmint(command): def _convert(value, default=None, convert=None): def convert_bool(value): - if value.lower() in ('true', 't', 'yes', 'y'): + if value.lower() in ("true", "t", "yes", "y"): return True - if value.lower() in ('false', 'f', 'no', 'n'): + if value.lower() in ("false", "f", "no", "n"): return False - raise ValueError('{} cannot be converted to bool'.format(value)) + raise ValueError("{} cannot be converted to bool".format(value)) - if value == '': + if value == "": value = None if convert is None: @@ -80,7 +80,7 @@ def _convert(value, default=None, convert=None): # We need this because `input` always prints on stdout, while it should print # to stderr. It's a very old bug, check it out here: # - https://bugs.python.org/issue1927 -def input_on_stderr(prompt='', default=None, convert=None): +def input_on_stderr(prompt="", default=None, convert=None): """Output a string to stderr and wait for input. Args: @@ -92,7 +92,7 @@ def input_on_stderr(prompt='', default=None, convert=None): ``default`` will be used. """ - print(prompt, end='', file=sys.stderr) + print(prompt, end="", file=sys.stderr) value = builtins.input() return _convert(value, default, convert) @@ -121,14 +121,13 @@ def start(parser, argv, scope): # look up in the current scope for a function called 'run_' # replacing all the dashes '-' with the lowercase character '_' - func = scope.get('run_' + args.command.replace('-', '_')) + func = scope.get("run_" + args.command.replace("-", "_")) # if no command has been found, raise a `NotImplementedError` if not func: - raise NotImplementedError('Command `{}` not yet implemented'. - format(args.command)) + raise NotImplementedError("Command `{}` not yet implemented".format(args.command)) - args.multiprocess = getattr(args, 'multiprocess', False) + args.multiprocess = getattr(args, "multiprocess", False) if args.multiprocess is False: args.multiprocess = 1 @@ -138,24 +137,28 @@ def start(parser, argv, scope): return func(args) -base_parser = argparse.ArgumentParser(add_help=False, prog='planetmint') +base_parser = argparse.ArgumentParser(add_help=False, prog="planetmint") -base_parser.add_argument('-c', '--config', - help='Specify the location of the configuration file ' - '(use "-" for stdout)') +base_parser.add_argument( + "-c", "--config", help="Specify the location of the configuration file " '(use "-" for stdout)' +) # NOTE: this flag should not have any default value because that will override # the environment variables provided to configure the logger. -base_parser.add_argument('-l', '--log-level', - type=str.upper, # convert to uppercase for comparison to choices - choices=['DEBUG', 'BENCHMARK', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], - help='Log level') +base_parser.add_argument( + "-l", + "--log-level", + type=str.upper, # convert to uppercase for comparison to choices + choices=["DEBUG", "BENCHMARK", "INFO", "WARNING", "ERROR", "CRITICAL"], + help="Log level", +) -base_parser.add_argument('-y', '--yes', '--yes-please', - action='store_true', - help='Assume "yes" as answer to all prompts and run ' - 'non-interactively') +base_parser.add_argument( + "-y", + "--yes", + "--yes-please", + action="store_true", + help='Assume "yes" as answer to all prompts and run ' "non-interactively", +) -base_parser.add_argument('-v', '--version', - action='version', - version='%(prog)s {}'.format(__version__)) +base_parser.add_argument("-v", "--version", action="version", version="%(prog)s {}".format(__version__)) diff --git a/planetmint/config.py b/planetmint/config.py index 079fc4f..f9c89e2 100644 --- a/planetmint/config.py +++ b/planetmint/config.py @@ -1,6 +1,7 @@ import copy import logging import os + # from planetmint.log import DEFAULT_LOGGING_CONFIG as log_config from planetmint.version import __version__ # noqa @@ -15,7 +16,6 @@ class Singleton(type): class Config(metaclass=Singleton): - def __init__(self): # from functools import reduce # PORT_NUMBER = reduce(lambda x, y: x * y, map(ord, 'Planetmint')) % 2**16 @@ -26,27 +26,27 @@ class Config(metaclass=Singleton): # _base_database_localmongodb.keys() because dicts are unordered. # I tried to configure self.log_config = DEFAULT_LOGGING_CONFIG - db = 'tarantool_db' + db = "tarantool_db" self.__private_database_keys_map = { # TODO Check if it is working after removing 'name' field - 'tarantool_db': ('host', 'port'), - 'localmongodb': ('host', 'port', 'name') + "tarantool_db": ("host", "port"), + "localmongodb": ("host", "port", "name"), } self.__private_database_localmongodb = { - 'backend': 'localmongodb', - 'host': 'localhost', - 'port': 27017, - 'name': 'bigchain', - 'replicaset': None, - 'login': None, - 'password': None, - 'connection_timeout': 5000, - 'max_tries': 3, - 'ssl': False, - 'ca_cert': None, - 'certfile': None, - 'keyfile': None, - 'keyfile_passphrase': None, - 'crlfile': None + "backend": "localmongodb", + "host": "localhost", + "port": 27017, + "name": "bigchain", + "replicaset": None, + "login": None, + "password": None, + "connection_timeout": 5000, + "max_tries": 3, + "ssl": False, + "ca_cert": None, + "certfile": None, + "keyfile": None, + "keyfile_passphrase": None, + "crlfile": None, } self.__private_init_config = { "absolute_path": os.path.dirname(os.path.abspath(__file__)) + "/backend/tarantool/init.lua" @@ -56,71 +56,68 @@ class Config(metaclass=Singleton): "absolute_path": os.path.dirname(os.path.abspath(__file__)) + "/backend/tarantool/drop.lua" } self.__private_database_tarantool = { - 'backend': 'tarantool_db', - 'connection_timeout': 5000, - 'max_tries': 3, - 'name': 'universe', + "backend": "tarantool_db", + "connection_timeout": 5000, + "max_tries": 3, + "name": "universe", "reconnect_delay": 0.5, - 'host': 'localhost', - 'port': 3303, + "host": "localhost", + "port": 3303, "connect_now": True, "encoding": "utf-8", "login": "guest", - 'password': "", + "password": "", "service": "tarantoolctl connect", "init_config": self.__private_init_config, "drop_config": self.__private_drop_config, } self.__private_database_map = { - 'tarantool_db': self.__private_database_tarantool, - 'localmongodb': self.__private_database_localmongodb + "tarantool_db": self.__private_database_tarantool, + "localmongodb": self.__private_database_localmongodb, } self.__private_config = { - 'server': { + "server": { # Note: this section supports all the Gunicorn settings: # - http://docs.gunicorn.org/en/stable/settings.html - 'bind': 'localhost:9984', - 'loglevel': logging.getLevelName( - self.log_config['handlers']['console']['level']).lower(), - 'workers': None, # if None, the value will be cpu_count * 2 + 1 + "bind": "localhost:9984", + "loglevel": logging.getLevelName(self.log_config["handlers"]["console"]["level"]).lower(), + "workers": None, # if None, the value will be cpu_count * 2 + 1 }, - 'wsserver': { - 'scheme': 'ws', - 'host': 'localhost', - 'port': 9985, - 'advertised_scheme': 'ws', - 'advertised_host': 'localhost', - 'advertised_port': 9985, + "wsserver": { + "scheme": "ws", + "host": "localhost", + "port": 9985, + "advertised_scheme": "ws", + "advertised_host": "localhost", + "advertised_port": 9985, }, - 'tendermint': { - 'host': 'localhost', - 'port': 26657, - 'version': 'v0.31.5', # look for __tm_supported_versions__ + "tendermint": { + "host": "localhost", + "port": 26657, + "version": "v0.31.5", # look for __tm_supported_versions__ }, - 'database': self.__private_database_map, - 'log': { - 'file': self.log_config['handlers']['file']['filename'], - 'error_file': self.log_config['handlers']['errors']['filename'], - 'level_console': logging.getLevelName( - self.log_config['handlers']['console']['level']).lower(), - 'level_logfile': logging.getLevelName( - self.log_config['handlers']['file']['level']).lower(), - 'datefmt_console': self.log_config['formatters']['console']['datefmt'], - 'datefmt_logfile': self.log_config['formatters']['file']['datefmt'], - 'fmt_console': self.log_config['formatters']['console']['format'], - 'fmt_logfile': self.log_config['formatters']['file']['format'], - 'granular_levels': {}, + "database": self.__private_database_map, + "log": { + "file": self.log_config["handlers"]["file"]["filename"], + "error_file": self.log_config["handlers"]["errors"]["filename"], + "level_console": logging.getLevelName(self.log_config["handlers"]["console"]["level"]).lower(), + "level_logfile": logging.getLevelName(self.log_config["handlers"]["file"]["level"]).lower(), + "datefmt_console": self.log_config["formatters"]["console"]["datefmt"], + "datefmt_logfile": self.log_config["formatters"]["file"]["datefmt"], + "fmt_console": self.log_config["formatters"]["console"]["format"], + "fmt_logfile": self.log_config["formatters"]["file"]["format"], + "granular_levels": {}, }, } self._private_real_config = copy.deepcopy(self.__private_config) # select the correct config defaults based on the backend - self._private_real_config['database'] = self.__private_database_map[db] + self._private_real_config["database"] = self.__private_database_map[db] def init_config(self, db): self._private_real_config = copy.deepcopy(self.__private_config) # select the correct config defaults based on the backend - self._private_real_config['database'] = self.__private_database_map[db] + self._private_real_config["database"] = self.__private_database_map[db] return self._private_real_config def get(self): @@ -135,52 +132,55 @@ class Config(metaclass=Singleton): def get_db_map(sefl, db): return sefl.__private_database_map[db] + DEFAULT_LOG_DIR = os.getcwd() DEFAULT_LOGGING_CONFIG = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'console': { - 'class': 'logging.Formatter', - 'format': ('[%(asctime)s] [%(levelname)s] (%(name)s) ' - '%(message)s (%(processName)-10s - pid: %(process)d)'), - 'datefmt': '%Y-%m-%d %H:%M:%S', + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "console": { + "class": "logging.Formatter", + "format": ( + "[%(asctime)s] [%(levelname)s] (%(name)s) " "%(message)s (%(processName)-10s - pid: %(process)d)" + ), + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + "file": { + "class": "logging.Formatter", + "format": ( + "[%(asctime)s] [%(levelname)s] (%(name)s) " "%(message)s (%(processName)-10s - pid: %(process)d)" + ), + "datefmt": "%Y-%m-%d %H:%M:%S", }, - 'file': { - 'class': 'logging.Formatter', - 'format': ('[%(asctime)s] [%(levelname)s] (%(name)s) ' - '%(message)s (%(processName)-10s - pid: %(process)d)'), - 'datefmt': '%Y-%m-%d %H:%M:%S', - } }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'console', - 'level': logging.INFO, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "console", + "level": logging.INFO, }, - 'file': { - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': os.path.join(DEFAULT_LOG_DIR, 'planetmint.log'), - 'mode': 'w', - 'maxBytes': 209715200, - 'backupCount': 5, - 'formatter': 'file', - 'level': logging.INFO, + "file": { + "class": "logging.handlers.RotatingFileHandler", + "filename": os.path.join(DEFAULT_LOG_DIR, "planetmint.log"), + "mode": "w", + "maxBytes": 209715200, + "backupCount": 5, + "formatter": "file", + "level": logging.INFO, + }, + "errors": { + "class": "logging.handlers.RotatingFileHandler", + "filename": os.path.join(DEFAULT_LOG_DIR, "planetmint-errors.log"), + "mode": "w", + "maxBytes": 209715200, + "backupCount": 5, + "formatter": "file", + "level": logging.ERROR, }, - 'errors': { - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': os.path.join(DEFAULT_LOG_DIR, 'planetmint-errors.log'), - 'mode': 'w', - 'maxBytes': 209715200, - 'backupCount': 5, - 'formatter': 'file', - 'level': logging.ERROR, - } }, - 'loggers': {}, - 'root': { - 'level': logging.DEBUG, - 'handlers': ['console', 'file', 'errors'], + "loggers": {}, + "root": { + "level": logging.DEBUG, + "handlers": ["console", "file", "errors"], }, } diff --git a/planetmint/config_utils.py b/planetmint/config_utils.py index 23e783c..9fb288c 100644 --- a/planetmint/config_utils.py +++ b/planetmint/config_utils.py @@ -29,16 +29,16 @@ from planetmint.transactions.common import exceptions from planetmint.validation import BaseValidationRules # TODO: move this to a proper configuration file for logging -logging.getLogger('requests').setLevel(logging.WARNING) +logging.getLogger("requests").setLevel(logging.WARNING) logger = logging.getLogger(__name__) CONFIG_DEFAULT_PATH = os.environ.setdefault( - 'PLANETMINT_CONFIG_PATH', - os.path.join(os.path.expanduser('~'), '.planetmint'), + "PLANETMINT_CONFIG_PATH", + os.path.join(os.path.expanduser("~"), ".planetmint"), ) -CONFIG_PREFIX = 'PLANETMINT' -CONFIG_SEP = '_' +CONFIG_PREFIX = "PLANETMINT" +CONFIG_SEP = "_" def map_leafs(func, mapping): @@ -96,21 +96,21 @@ def file_config(filename=None): dict: The config values in the specified config file (or the file at CONFIG_DEFAULT_PATH, if filename == None) """ - logger.debug('On entry into file_config(), filename = {}'.format(filename)) + logger.debug("On entry into file_config(), filename = {}".format(filename)) if filename is None: filename = CONFIG_DEFAULT_PATH - logger.debug('file_config() will try to open `{}`'.format(filename)) + logger.debug("file_config() will try to open `{}`".format(filename)) with open(filename) as f: try: config = json.load(f) except ValueError as err: raise exceptions.ConfigurationError( - 'Failed to parse the JSON configuration from `{}`, {}'.format(filename, err) + "Failed to parse the JSON configuration from `{}`, {}".format(filename, err) ) - logger.info('Configuration loaded from `{}`'.format(filename)) + logger.info("Configuration loaded from `{}`".format(filename)) return config @@ -136,7 +136,7 @@ def env_config(config): return map_leafs(load_from_env, config) -def update_types(config, reference, list_sep=':'): +def update_types(config, reference, list_sep=":"): """Return a new configuration where all the values types are aligned with the ones in the default configuration """ @@ -192,7 +192,7 @@ def set_config(config): _config = Config().get() # Update the default config with whatever is in the passed config update(_config, update_types(config, _config)) - _config['CONFIGURED'] = True + _config["CONFIGURED"] = True Config().set(_config) @@ -208,7 +208,7 @@ def update_config(config): _config = Config().get() # Update the default config with whatever is in the passed config update(_config, update_types(config, _config)) - _config['CONFIGURED'] = True + _config["CONFIGURED"] = True Config().set(_config) @@ -223,12 +223,12 @@ def write_config(config, filename=None): if not filename: filename = CONFIG_DEFAULT_PATH - with open(filename, 'w') as f: + with open(filename, "w") as f: json.dump(config, f, indent=4) def is_configured(): - return bool(Config().get().get('CONFIGURED')) + return bool(Config().get().get("CONFIGURED")) def autoconfigure(filename=None, config=None, force=False): @@ -236,7 +236,7 @@ def autoconfigure(filename=None, config=None, force=False): been initialized. """ if not force and is_configured(): - logger.debug('System already configured, skipping autoconfiguration') + logger.debug("System already configured, skipping autoconfiguration") return # start with the current configuration @@ -249,7 +249,7 @@ def autoconfigure(filename=None, config=None, force=False): if filename: raise else: - logger.info('Cannot find config file `%s`.' % e.filename) + logger.info("Cannot find config file `%s`." % e.filename) # override configuration with env variables newconfig = env_config(newconfig) @@ -277,20 +277,20 @@ def load_validation_plugin(name=None): # We should probably support Requirements specs in the config, e.g. # validation_plugin: 'my-plugin-package==0.0.1;default' plugin = None - for entry_point in iter_entry_points('planetmint.validation', name): + for entry_point in iter_entry_points("planetmint.validation", name): plugin = entry_point.load() # No matching entry_point found if not plugin: - raise ResolutionError( - 'No plugin found in group `planetmint.validation` with name `{}`'. - format(name)) + raise ResolutionError("No plugin found in group `planetmint.validation` with name `{}`".format(name)) # Is this strictness desireable? # It will probably reduce developer headaches in the wild. if not issubclass(plugin, (BaseValidationRules,)): - raise TypeError('object of type "{}" does not implement `planetmint.' - 'validation.BaseValidationRules`'.format(type(plugin))) + raise TypeError( + 'object of type "{}" does not implement `planetmint.' + "validation.BaseValidationRules`".format(type(plugin)) + ) return plugin @@ -302,7 +302,7 @@ def load_events_plugins(names=None): return plugins for name in names: - for entry_point in iter_entry_points('planetmint.events', name): + for entry_point in iter_entry_points("planetmint.events", name): plugins.append((name, entry_point.load())) return plugins diff --git a/planetmint/core.py b/planetmint/core.py index 3452f70..9fcfc31 100644 --- a/planetmint/core.py +++ b/planetmint/core.py @@ -18,12 +18,11 @@ from tendermint.abci.types_pb2 import ( ResponseDeliverTx, ResponseBeginBlock, ResponseEndBlock, - ResponseCommit + ResponseCommit, ) from planetmint import Planetmint from planetmint.transactions.types.elections.election import Election -from planetmint.tendermint_utils import (decode_transaction, - calculate_hash) +from planetmint.tendermint_utils import decode_transaction, calculate_hash from planetmint.lib import Block import planetmint.upsert_validator.validator_utils as vutils from planetmint.events import EventTypes, Event @@ -42,40 +41,41 @@ class App(BaseApplication): def __init__(self, planetmint_node=None, events_queue=None): # super().__init__(abci) - logger.debug('Checking values of types') + logger.debug("Checking values of types") logger.debug(dir(types_pb2)) self.events_queue = events_queue self.planetmint_node = planetmint_node or Planetmint() self.block_txn_ids = [] - self.block_txn_hash = '' + self.block_txn_hash = "" self.block_transactions = [] self.validators = None self.new_height = None self.chain = self.planetmint_node.get_latest_abci_chain() def log_abci_migration_error(self, chain_id, validators): - logger.error('An ABCI chain migration is in process. ' - 'Download theself.planetmint_node.get_latest_abci_chain new ABCI client and configure it with ' - f'chain_id={chain_id} and validators={validators}.') + logger.error( + "An ABCI chain migration is in process. " + "Download theself.planetmint_node.get_latest_abci_chain new ABCI client and configure it with " + f"chain_id={chain_id} and validators={validators}." + ) def abort_if_abci_chain_is_not_synced(self): - if self.chain is None or self.chain['is_synced']: + if self.chain is None or self.chain["is_synced"]: return validators = self.planetmint_node.get_validators() - self.log_abci_migration_error(self.chain['chain_id'], validators) + self.log_abci_migration_error(self.chain["chain_id"], validators) sys.exit(1) def init_chain(self, genesis): """Initialize chain upon genesis or a migration""" - app_hash = '' + app_hash = "" height = 0 known_chain = self.planetmint_node.get_latest_abci_chain() if known_chain is not None: - chain_id = known_chain['chain_id'] + chain_id = known_chain["chain_id"] - if known_chain['is_synced']: - msg = (f'Got invalid InitChain ABCI request ({genesis}) - ' - f'the chain {chain_id} is already synced.') + if known_chain["is_synced"]: + msg = f"Got invalid InitChain ABCI request ({genesis}) - " f"the chain {chain_id} is already synced." logger.error(msg) sys.exit(1) if chain_id != genesis.chain_id: @@ -84,22 +84,19 @@ class App(BaseApplication): sys.exit(1) # set migration values for app hash and height block = self.planetmint_node.get_latest_block() - app_hash = '' if block is None else block['app_hash'] - height = 0 if block is None else block['height'] + 1 + app_hash = "" if block is None else block["app_hash"] + height = 0 if block is None else block["height"] + 1 known_validators = self.planetmint_node.get_validators() - validator_set = [vutils.decode_validator(v) - for v in genesis.validators] + validator_set = [vutils.decode_validator(v) for v in genesis.validators] if known_validators and known_validators != validator_set: - self.log_abci_migration_error(known_chain['chain_id'], - known_validators) + self.log_abci_migration_error(known_chain["chain_id"], known_validators) sys.exit(1) block = Block(app_hash=app_hash, height=height, transactions=[]) self.planetmint_node.store_block(block._asdict()) self.planetmint_node.store_validator_set(height + 1, validator_set) - abci_chain_height = 0 if known_chain is None else known_chain['height'] + abci_chain_height = 0 if known_chain is None else known_chain["height"] self.planetmint_node.store_abci_chain(abci_chain_height, genesis.chain_id, True) - self.chain = {'height': abci_chain_height, 'is_synced': True, - 'chain_id': genesis.chain_id} + self.chain = {"height": abci_chain_height, "is_synced": True, "chain_id": genesis.chain_id} return ResponseInitChain() def info(self, request): @@ -118,12 +115,12 @@ class App(BaseApplication): r = ResponseInfo() block = self.planetmint_node.get_latest_block() if block: - chain_shift = 0 if self.chain is None else self.chain['height'] - r.last_block_height = block['height'] - chain_shift - r.last_block_app_hash = block['app_hash'].encode('utf-8') + chain_shift = 0 if self.chain is None else self.chain["height"] + r.last_block_height = block["height"] - chain_shift + r.last_block_app_hash = block["app_hash"].encode("utf-8") else: r.last_block_height = 0 - r.last_block_app_hash = b'' + r.last_block_app_hash = b"" return r def check_tx(self, raw_transaction): @@ -136,13 +133,13 @@ class App(BaseApplication): self.abort_if_abci_chain_is_not_synced() - logger.debug('check_tx: %s', raw_transaction) + logger.debug("check_tx: %s", raw_transaction) transaction = decode_transaction(raw_transaction) if self.planetmint_node.is_valid_transaction(transaction): - logger.debug('check_tx: VALID') + logger.debug("check_tx: VALID") return ResponseCheckTx(code=OkCode) else: - logger.debug('check_tx: INVALID') + logger.debug("check_tx: INVALID") return ResponseCheckTx(code=CodeTypeError) def begin_block(self, req_begin_block): @@ -153,10 +150,9 @@ class App(BaseApplication): """ self.abort_if_abci_chain_is_not_synced() - chain_shift = 0 if self.chain is None else self.chain['height'] + chain_shift = 0 if self.chain is None else self.chain["height"] # req_begin_block.header.num_txs not found, so removing it. - logger.debug('BEGIN BLOCK, height:%s', - req_begin_block.header.height + chain_shift) + logger.debug("BEGIN BLOCK, height:%s", req_begin_block.header.height + chain_shift) self.block_txn_ids = [] self.block_transactions = [] @@ -171,15 +167,16 @@ class App(BaseApplication): self.abort_if_abci_chain_is_not_synced() - logger.debug('deliver_tx: %s', raw_transaction) + logger.debug("deliver_tx: %s", raw_transaction) transaction = self.planetmint_node.is_valid_transaction( - decode_transaction(raw_transaction), self.block_transactions) + decode_transaction(raw_transaction), self.block_transactions + ) if not transaction: - logger.debug('deliver_tx: INVALID') + logger.debug("deliver_tx: INVALID") return ResponseDeliverTx(code=CodeTypeError) else: - logger.debug('storing tx') + logger.debug("storing tx") self.block_txn_ids.append(transaction.id) self.block_transactions.append(transaction) return ResponseDeliverTx(code=OkCode) @@ -194,28 +191,25 @@ class App(BaseApplication): self.abort_if_abci_chain_is_not_synced() - chain_shift = 0 if self.chain is None else self.chain['height'] + chain_shift = 0 if self.chain is None else self.chain["height"] height = request_end_block.height + chain_shift self.new_height = height # store pre-commit state to recover in case there is a crash during # `end_block` or `commit` - logger.debug(f'Updating pre-commit state: {self.new_height}') - pre_commit_state = dict(height=self.new_height, - transactions=self.block_txn_ids) + logger.debug(f"Updating pre-commit state: {self.new_height}") + pre_commit_state = dict(height=self.new_height, transactions=self.block_txn_ids) self.planetmint_node.store_pre_commit_state(pre_commit_state) block_txn_hash = calculate_hash(self.block_txn_ids) block = self.planetmint_node.get_latest_block() if self.block_txn_ids: - self.block_txn_hash = calculate_hash([block['app_hash'], block_txn_hash]) + self.block_txn_hash = calculate_hash([block["app_hash"], block_txn_hash]) else: - self.block_txn_hash = block['app_hash'] + self.block_txn_hash = block["app_hash"] - validator_update = Election.process_block(self.planetmint_node, - self.new_height, - self.block_transactions) + validator_update = Election.process_block(self.planetmint_node, self.new_height, self.block_transactions) return ResponseEndBlock(validator_updates=validator_update) @@ -224,29 +218,29 @@ class App(BaseApplication): self.abort_if_abci_chain_is_not_synced() - data = self.block_txn_hash.encode('utf-8') + data = self.block_txn_hash.encode("utf-8") # register a new block only when new transactions are received if self.block_txn_ids: self.planetmint_node.store_bulk_transactions(self.block_transactions) - block = Block(app_hash=self.block_txn_hash, - height=self.new_height, - transactions=self.block_txn_ids) + block = Block(app_hash=self.block_txn_hash, height=self.new_height, transactions=self.block_txn_ids) # NOTE: storing the block should be the last operation during commit # this effects crash recovery. Refer BEP#8 for details self.planetmint_node.store_block(block._asdict()) - logger.debug('Commit-ing new block with hash: apphash=%s ,' - 'height=%s, txn ids=%s', data, self.new_height, - self.block_txn_ids) + logger.debug( + "Commit-ing new block with hash: apphash=%s ," "height=%s, txn ids=%s", + data, + self.new_height, + self.block_txn_ids, + ) if self.events_queue: - event = Event(EventTypes.BLOCK_VALID, { - 'height': self.new_height, - 'hash': self.block_txn_hash, - 'transactions': self.block_transactions - }) + event = Event( + EventTypes.BLOCK_VALID, + {"height": self.new_height, "hash": self.block_txn_hash, "transactions": self.block_transactions}, + ) self.events_queue.put(event) return ResponseCommit(data=data) @@ -266,10 +260,10 @@ def rollback(b): latest_block = b.get_latest_block() if latest_block is None: - logger.error('Found precommit state but no blocks!') + logger.error("Found precommit state but no blocks!") sys.exit(1) # NOTE: the pre-commit state is always at most 1 block ahead of the commited state - if latest_block['height'] < pre_commit['height']: - Election.rollback(b, pre_commit['height'], pre_commit['transactions']) - b.delete_transactions(pre_commit['transactions']) + if latest_block["height"] < pre_commit["height"]: + Election.rollback(b, pre_commit["height"], pre_commit["transactions"]) + b.delete_transactions(pre_commit["transactions"]) diff --git a/planetmint/events.py b/planetmint/events.py index b702b4a..6157138 100644 --- a/planetmint/events.py +++ b/planetmint/events.py @@ -8,7 +8,7 @@ from collections import defaultdict from multiprocessing import Queue -POISON_PILL = 'POISON_PILL' +POISON_PILL = "POISON_PILL" class EventTypes: @@ -73,7 +73,7 @@ class Exchange: try: self.started_queue.get(timeout=1) - raise RuntimeError('Cannot create a new subscriber queue while Exchange is running.') + raise RuntimeError("Cannot create a new subscriber queue while Exchange is running.") except Empty: pass @@ -99,7 +99,7 @@ class Exchange: def run(self): """Start the exchange""" - self.started_queue.put('STARTED') + self.started_queue.put("STARTED") while True: event = self.publisher_queue.get() diff --git a/planetmint/fastquery.py b/planetmint/fastquery.py index bfbb6a8..59f0056 100644 --- a/planetmint/fastquery.py +++ b/planetmint/fastquery.py @@ -8,7 +8,7 @@ from planetmint.backend import query from planetmint.transactions.common.transaction import TransactionLink -class FastQuery(): +class FastQuery: """Database queries that join on block results from a single node.""" def __init__(self, connection): @@ -17,11 +17,12 @@ class FastQuery(): def get_outputs_by_public_key(self, public_key): """Get outputs for a public key""" txs = list(query.get_owned_ids(self.connection, public_key)) - return [TransactionLink(tx['id'], index) - for tx in txs - for index, output in enumerate(tx['outputs']) - if condition_details_has_owner(output['condition']['details'], - public_key)] + return [ + TransactionLink(tx["id"], index) + for tx in txs + for index, output in enumerate(tx["outputs"]) + if condition_details_has_owner(output["condition"]["details"], public_key) + ] def filter_spent_outputs(self, outputs): """Remove outputs that have been spent @@ -31,9 +32,7 @@ class FastQuery(): """ links = [o.to_dict() for o in outputs] txs = list(query.get_spending_transactions(self.connection, links)) - spends = {TransactionLink.from_dict(input_['fulfills']) - for tx in txs - for input_ in tx['inputs']} + spends = {TransactionLink.from_dict(input_["fulfills"]) for tx in txs for input_ in tx["inputs"]} return [ff for ff in outputs if ff not in spends] def filter_unspent_outputs(self, outputs): @@ -44,7 +43,5 @@ class FastQuery(): """ links = [o.to_dict() for o in outputs] txs = list(query.get_spending_transactions(self.connection, links)) - spends = {TransactionLink.from_dict(input_['fulfills']) - for tx in txs - for input_ in tx['inputs']} + spends = {TransactionLink.from_dict(input_["fulfills"]) for tx in txs for input_ in tx["inputs"]} return [ff for ff in outputs if ff in spends] diff --git a/planetmint/lib.py b/planetmint/lib.py index 612f567..b44953a 100644 --- a/planetmint/lib.py +++ b/planetmint/lib.py @@ -25,10 +25,12 @@ import planetmint from planetmint.config import Config from planetmint import backend, config_utils, fastquery from planetmint.models import Transaction -from planetmint.transactions.common.exceptions import ( - SchemaValidationError, ValidationError, DoubleSpend) +from planetmint.transactions.common.exceptions import SchemaValidationError, ValidationError, DoubleSpend from planetmint.transactions.common.transaction_mode_types import ( - BROADCAST_TX_COMMIT, BROADCAST_TX_ASYNC, BROADCAST_TX_SYNC) + BROADCAST_TX_COMMIT, + BROADCAST_TX_ASYNC, + BROADCAST_TX_SYNC, +) from planetmint.tendermint_utils import encode_transaction, merkleroot from planetmint import exceptions as core_exceptions from planetmint.validation import BaseValidationRules @@ -60,14 +62,12 @@ class Planetmint(object): """ config_utils.autoconfigure() self.mode_commit = BROADCAST_TX_COMMIT - self.mode_list = (BROADCAST_TX_ASYNC, - BROADCAST_TX_SYNC, - self.mode_commit) - self.tendermint_host = Config().get()['tendermint']['host'] - self.tendermint_port = Config().get()['tendermint']['port'] - self.endpoint = 'http://{}:{}/'.format(self.tendermint_host, self.tendermint_port) + self.mode_list = (BROADCAST_TX_ASYNC, BROADCAST_TX_SYNC, self.mode_commit) + self.tendermint_host = Config().get()["tendermint"]["host"] + self.tendermint_port = Config().get()["tendermint"]["port"] + self.endpoint = "http://{}:{}/".format(self.tendermint_host, self.tendermint_port) - validationPlugin = Config().get().get('validation_plugin') + validationPlugin = Config().get().get("validation_plugin") if validationPlugin: self.validation = config_utils.load_validation_plugin(validationPlugin) @@ -78,16 +78,10 @@ class Planetmint(object): def post_transaction(self, transaction, mode): """Submit a valid transaction to the mempool.""" if not mode or mode not in self.mode_list: - raise ValidationError('Mode must be one of the following {}.' - .format(', '.join(self.mode_list))) + raise ValidationError("Mode must be one of the following {}.".format(", ".join(self.mode_list))) tx_dict = transaction.tx_dict if transaction.tx_dict else transaction.to_dict() - payload = { - 'method': mode, - 'jsonrpc': '2.0', - 'params': [encode_transaction(tx_dict)], - 'id': str(uuid4()) - } + payload = {"method": mode, "jsonrpc": "2.0", "params": [encode_transaction(tx_dict)], "id": str(uuid4())} # TODO: handle connection errors! return requests.post(self.endpoint, json=payload) @@ -100,29 +94,29 @@ class Planetmint(object): def _process_post_response(self, response, mode): logger.debug(response) - error = response.get('error') + error = response.get("error") if error: status_code = 500 - message = error.get('message', 'Internal Error') - data = error.get('data', '') + message = error.get("message", "Internal Error") + data = error.get("data", "") - if 'Tx already exists in cache' in data: + if "Tx already exists in cache" in data: status_code = 400 - return (status_code, message + ' - ' + data) + return (status_code, message + " - " + data) - result = response['result'] + result = response["result"] if mode == self.mode_commit: - check_tx_code = result.get('check_tx', {}).get('code', 0) - deliver_tx_code = result.get('deliver_tx', {}).get('code', 0) + check_tx_code = result.get("check_tx", {}).get("code", 0) + deliver_tx_code = result.get("deliver_tx", {}).get("code", 0) error_code = check_tx_code or deliver_tx_code else: - error_code = result.get('code', 0) + error_code = result.get("code", 0) if error_code: - return (500, 'Transaction validation failed') + return (500, "Transaction validation failed") - return (202, '') + return (202, "") def store_bulk_transactions(self, transactions): txns = [] @@ -132,18 +126,20 @@ class Planetmint(object): for t in transactions: transaction = t.tx_dict if t.tx_dict else rapidjson.loads(rapidjson.dumps(t.to_dict())) - asset = transaction.pop('asset') - metadata = transaction.pop('metadata') + asset = transaction.pop("asset") + metadata = transaction.pop("metadata") - asset = backend.convert.prepare_asset(self.connection, - transaction_type=transaction["operation"], - transaction_id=transaction["id"], - filter_operation=t.CREATE, - asset=asset) + asset = backend.convert.prepare_asset( + self.connection, + transaction_type=transaction["operation"], + transaction_id=transaction["id"], + filter_operation=t.CREATE, + asset=asset, + ) - metadata = backend.convert.prepare_metadata(self.connection, - transaction_id=transaction["id"], - metadata=metadata) + metadata = backend.convert.prepare_metadata( + self.connection, transaction_id=transaction["id"], metadata=metadata + ) txn_metadatas.append(metadata) assets.append(asset) @@ -167,14 +163,10 @@ class Planetmint(object): transaction incoming into the system for which the UTXOF set needs to be updated. """ - spent_outputs = [ - spent_output for spent_output in transaction.spent_outputs - ] + spent_outputs = [spent_output for spent_output in transaction.spent_outputs] if spent_outputs: self.delete_unspent_outputs(*spent_outputs) - self.store_unspent_outputs( - *[utxo._asdict() for utxo in transaction.unspent_outputs] - ) + self.store_unspent_outputs(*[utxo._asdict() for utxo in transaction.unspent_outputs]) def store_unspent_outputs(self, *unspent_outputs): """Store the given ``unspent_outputs`` (utxos). @@ -184,8 +176,7 @@ class Planetmint(object): length tuple or list of unspent outputs. """ if unspent_outputs: - return backend.query.store_unspent_outputs( - self.connection, *unspent_outputs) + return backend.query.store_unspent_outputs(self.connection, *unspent_outputs) def get_utxoset_merkle_root(self): """Returns the merkle root of the utxoset. This implies that @@ -214,9 +205,7 @@ class Planetmint(object): # TODO Once ready, use the already pre-computed utxo_hash field. # See common/transactions.py for details. hashes = [ - sha3_256( - '{}{}'.format(utxo['transaction_id'], utxo['output_index']).encode() - ).digest() for utxo in utxoset + sha3_256("{}{}".format(utxo["transaction_id"], utxo["output_index"]).encode()).digest() for utxo in utxoset ] # TODO Notice the sorted call! return merkleroot(sorted(hashes)) @@ -238,8 +227,7 @@ class Planetmint(object): length tuple or list of unspent outputs. """ if unspent_outputs: - return backend.query.delete_unspent_outputs( - self.connection, *unspent_outputs) + return backend.query.delete_unspent_outputs(self.connection, *unspent_outputs) def is_committed(self, transaction_id): transaction = backend.query.get_transaction(self.connection, transaction_id) @@ -251,14 +239,14 @@ class Planetmint(object): asset = backend.query.get_asset(self.connection, transaction_id) metadata = backend.query.get_metadata(self.connection, [transaction_id]) if asset: - transaction['asset'] = asset + transaction["asset"] = asset - if 'metadata' not in transaction: + if "metadata" not in transaction: metadata = metadata[0] if metadata else None if metadata: - metadata = metadata.get('metadata') + metadata = metadata.get("metadata") - transaction.update({'metadata': metadata}) + transaction.update({"metadata": metadata}) transaction = Transaction.from_dict(transaction) @@ -268,10 +256,8 @@ class Planetmint(object): return backend.query.get_transactions(self.connection, txn_ids) def get_transactions_filtered(self, asset_id, operation=None, last_tx=None): - """Get a list of transactions filtered on some criteria - """ - txids = backend.query.get_txids_filtered(self.connection, asset_id, - operation, last_tx) + """Get a list of transactions filtered on some criteria""" + txids = backend.query.get_txids_filtered(self.connection, asset_id, operation, last_tx) for txid in txids: yield self.get_transaction(txid) @@ -297,27 +283,24 @@ class Planetmint(object): return self.fastquery.filter_spent_outputs(outputs) def get_spent(self, txid, output, current_transactions=[]): - transactions = backend.query.get_spent(self.connection, txid, - output) + transactions = backend.query.get_spent(self.connection, txid, output) transactions = list(transactions) if transactions else [] if len(transactions) > 1: raise core_exceptions.CriticalDoubleSpend( - '`{}` was spent more than once. There is a problem' - ' with the chain'.format(txid)) + "`{}` was spent more than once. There is a problem" " with the chain".format(txid) + ) current_spent_transactions = [] for ctxn in current_transactions: for ctxn_input in ctxn.inputs: - if ctxn_input.fulfills and \ - ctxn_input.fulfills.txid == txid and \ - ctxn_input.fulfills.output == output: + if ctxn_input.fulfills and ctxn_input.fulfills.txid == txid and ctxn_input.fulfills.output == output: current_spent_transactions.append(ctxn) transaction = None if len(transactions) + len(current_spent_transactions) > 1: raise DoubleSpend('tx "{}" spends inputs twice'.format(txid)) elif transactions: - transaction = backend.query.get_transactions(self.connection, [transactions[0]['id']]) + transaction = backend.query.get_transactions(self.connection, [transactions[0]["id"]]) transaction = Transaction.from_dict(transaction[0]) elif current_spent_transactions: transaction = current_spent_transactions[0] @@ -346,17 +329,16 @@ class Planetmint(object): block = backend.query.get_block(self.connection, block_id) latest_block = self.get_latest_block() - latest_block_height = latest_block['height'] if latest_block else 0 + latest_block_height = latest_block["height"] if latest_block else 0 if not block and block_id > latest_block_height: return - result = {'height': block_id, - 'transactions': []} + result = {"height": block_id, "transactions": []} if block: - transactions = backend.query.get_transactions(self.connection, block['transactions']) - result['transactions'] = [t.to_dict() for t in Transaction.from_db(self, transactions)] + transactions = backend.query.get_transactions(self.connection, block["transactions"]) + result["transactions"] = [t.to_dict() for t in Transaction.from_db(self, transactions)] return result @@ -372,9 +354,9 @@ class Planetmint(object): """ blocks = list(backend.query.get_block_with_transaction(self.connection, txid)) if len(blocks) > 1: - logger.critical('Transaction id %s exists in multiple blocks', txid) + logger.critical("Transaction id %s exists in multiple blocks", txid) - return [block['height'] for block in blocks] + return [block["height"] for block in blocks] def validate_transaction(self, tx, current_transactions=[]): """Validate a transaction against the current status of the database.""" @@ -388,10 +370,10 @@ class Planetmint(object): try: transaction = Transaction.from_dict(tx) except SchemaValidationError as e: - logger.warning('Invalid transaction schema: %s', e.__cause__.message) + logger.warning("Invalid transaction schema: %s", e.__cause__.message) return False except ValidationError as e: - logger.warning('Invalid transaction (%s): %s', type(e).__name__, e) + logger.warning("Invalid transaction (%s): %s", type(e).__name__, e) return False return transaction.validate(self, current_transactions) @@ -401,10 +383,10 @@ class Planetmint(object): try: return self.validate_transaction(tx, current_transactions) except ValidationError as e: - logger.warning('Invalid transaction (%s): %s', type(e).__name__, e) + logger.warning("Invalid transaction (%s): %s", type(e).__name__, e) return False - def text_search(self, search, *, limit=0, table='assets'): + def text_search(self, search, *, limit=0, table="assets"): """Return an iterator of assets that match the text search Args: @@ -414,8 +396,7 @@ class Planetmint(object): Returns: iter: An iterator of assets that match the text search. """ - return backend.query.text_search(self.connection, search, limit=limit, - table=table) + return backend.query.text_search(self.connection, search, limit=limit, table=table) def get_assets(self, asset_ids): """Return a list of assets that match the asset_ids @@ -450,7 +431,7 @@ class Planetmint(object): def get_validators(self, height=None): result = self.get_validator_change(height) - return [] if result is None else result['validators'] + return [] if result is None else result["validators"] def get_election(self, election_id): return backend.query.get_election(self.connection, election_id) @@ -463,18 +444,16 @@ class Planetmint(object): def store_validator_set(self, height, validators): """Store validator set at a given `height`. - NOTE: If the validator set already exists at that `height` then an - exception will be raised. + NOTE: If the validator set already exists at that `height` then an + exception will be raised. """ - return backend.query.store_validator_set(self.connection, {'height': height, - 'validators': validators}) + return backend.query.store_validator_set(self.connection, {"height": height, "validators": validators}) def delete_validator_set(self, height): return backend.query.delete_validator_set(self.connection, height) def store_abci_chain(self, height, chain_id, is_synced=True): - return backend.query.store_abci_chain(self.connection, height, - chain_id, is_synced) + return backend.query.store_abci_chain(self.connection, height, chain_id, is_synced) def delete_abci_chain(self, height): return backend.query.delete_abci_chain(self.connection, height) @@ -499,16 +478,15 @@ class Planetmint(object): block = self.get_latest_block() - suffix = '-migrated-at-height-' - chain_id = latest_chain['chain_id'] - block_height_str = str(block['height']) + suffix = "-migrated-at-height-" + chain_id = latest_chain["chain_id"] + block_height_str = str(block["height"]) new_chain_id = chain_id.split(suffix)[0] + suffix + block_height_str - self.store_abci_chain(block['height'] + 1, new_chain_id, False) + self.store_abci_chain(block["height"] + 1, new_chain_id, False) def store_election(self, election_id, height, is_concluded): - return backend.query.store_election(self.connection, election_id, - height, is_concluded) + return backend.query.store_election(self.connection, election_id, height, is_concluded) def store_elections(self, elections): return backend.query.store_elections(self.connection, elections) @@ -517,4 +495,4 @@ class Planetmint(object): return backend.query.delete_elections(self.connection, height) -Block = namedtuple('Block', ('app_hash', 'height', 'transactions')) +Block = namedtuple("Block", ("app_hash", "height", "transactions")) diff --git a/planetmint/log.py b/planetmint/log.py index 093acab..31bd150 100644 --- a/planetmint/log.py +++ b/planetmint/log.py @@ -11,11 +11,12 @@ from logging.config import dictConfig as set_logging_config from planetmint.config import Config, DEFAULT_LOGGING_CONFIG import os + def _normalize_log_level(level): try: return level.upper() except AttributeError as exc: - raise ConfigurationError('Log level must be a string!') from exc + raise ConfigurationError("Log level must be a string!") from exc def setup_logging(): @@ -32,47 +33,47 @@ def setup_logging(): """ logging_configs = DEFAULT_LOGGING_CONFIG - new_logging_configs = Config().get()['log'] + new_logging_configs = Config().get()["log"] - if 'file' in new_logging_configs: - filename = new_logging_configs['file'] - logging_configs['handlers']['file']['filename'] = filename + if "file" in new_logging_configs: + filename = new_logging_configs["file"] + logging_configs["handlers"]["file"]["filename"] = filename - if 'error_file' in new_logging_configs: - error_filename = new_logging_configs['error_file'] - logging_configs['handlers']['errors']['filename'] = error_filename + if "error_file" in new_logging_configs: + error_filename = new_logging_configs["error_file"] + logging_configs["handlers"]["errors"]["filename"] = error_filename - if 'level_console' in new_logging_configs: - level = _normalize_log_level(new_logging_configs['level_console']) - logging_configs['handlers']['console']['level'] = level + if "level_console" in new_logging_configs: + level = _normalize_log_level(new_logging_configs["level_console"]) + logging_configs["handlers"]["console"]["level"] = level - if 'level_logfile' in new_logging_configs: - level = _normalize_log_level(new_logging_configs['level_logfile']) - logging_configs['handlers']['file']['level'] = level + if "level_logfile" in new_logging_configs: + level = _normalize_log_level(new_logging_configs["level_logfile"]) + logging_configs["handlers"]["file"]["level"] = level - if 'fmt_console' in new_logging_configs: - fmt = new_logging_configs['fmt_console'] - logging_configs['formatters']['console']['format'] = fmt + if "fmt_console" in new_logging_configs: + fmt = new_logging_configs["fmt_console"] + logging_configs["formatters"]["console"]["format"] = fmt - if 'fmt_logfile' in new_logging_configs: - fmt = new_logging_configs['fmt_logfile'] - logging_configs['formatters']['file']['format'] = fmt + if "fmt_logfile" in new_logging_configs: + fmt = new_logging_configs["fmt_logfile"] + logging_configs["formatters"]["file"]["format"] = fmt - if 'datefmt_console' in new_logging_configs: - fmt = new_logging_configs['datefmt_console'] - logging_configs['formatters']['console']['datefmt'] = fmt + if "datefmt_console" in new_logging_configs: + fmt = new_logging_configs["datefmt_console"] + logging_configs["formatters"]["console"]["datefmt"] = fmt - if 'datefmt_logfile' in new_logging_configs: - fmt = new_logging_configs['datefmt_logfile'] - logging_configs['formatters']['file']['datefmt'] = fmt + if "datefmt_logfile" in new_logging_configs: + fmt = new_logging_configs["datefmt_logfile"] + logging_configs["formatters"]["file"]["datefmt"] = fmt - log_levels = new_logging_configs.get('granular_levels', {}) + log_levels = new_logging_configs.get("granular_levels", {}) for logger_name, level in log_levels.items(): level = _normalize_log_level(level) try: - logging_configs['loggers'][logger_name]['level'] = level + logging_configs["loggers"][logger_name]["level"] = level except KeyError: - logging_configs['loggers'][logger_name] = {'level': level} + logging_configs["loggers"][logger_name] = {"level": level} set_logging_config(logging_configs) diff --git a/planetmint/models.py b/planetmint/models.py index 419cb44..04e534c 100644 --- a/planetmint/models.py +++ b/planetmint/models.py @@ -4,16 +4,16 @@ # Code is Apache-2.0 and docs are CC-BY-4.0 from planetmint.backend.schema import validate_language_key -from planetmint.transactions.common.exceptions import (InvalidSignature, DuplicateTransaction) +from planetmint.transactions.common.exceptions import InvalidSignature, DuplicateTransaction from planetmint.transactions.common.schema import validate_transaction_schema from planetmint.transactions.common.transaction import Transaction -from planetmint.transactions.common.utils import (validate_txn_obj, validate_key) +from planetmint.transactions.common.utils import validate_txn_obj, validate_key class Transaction(Transaction): - ASSET = 'asset' - METADATA = 'metadata' - DATA = 'data' + ASSET = "asset" + METADATA = "metadata" + DATA = "data" def validate(self, planet, current_transactions=[]): """Validate transaction spend @@ -31,11 +31,10 @@ class Transaction(Transaction): if self.operation == Transaction.CREATE: duplicates = any(txn for txn in current_transactions if txn.id == self.id) if planet.is_committed(self.id) or duplicates: - raise DuplicateTransaction('transaction `{}` already exists' - .format(self.id)) + raise DuplicateTransaction("transaction `{}` already exists".format(self.id)) if not self.inputs_valid(input_conditions): - raise InvalidSignature('Transaction signature is invalid.') + raise InvalidSignature("Transaction signature is invalid.") elif self.operation == Transaction.TRANSFER: self.validate_transfer_inputs(planet, current_transactions) @@ -68,7 +67,7 @@ class FastTransaction: @property def id(self): - return self.data['id'] + return self.data["id"] def to_dict(self): return self.data diff --git a/planetmint/parallel_validation.py b/planetmint/parallel_validation.py index 77c4a02..844db0e 100644 --- a/planetmint/parallel_validation.py +++ b/planetmint/parallel_validation.py @@ -39,8 +39,8 @@ class ParallelValidationApp(App): return super().end_block(request_end_block) -RESET = 'reset' -EXIT = 'exit' +RESET = "reset" +EXIT = "exit" class ParallelValidator: @@ -64,7 +64,7 @@ class ParallelValidator: def validate(self, raw_transaction): dict_transaction = decode_transaction(raw_transaction) - index = int(dict_transaction['id'], 16) % self.number_of_workers + index = int(dict_transaction["id"], 16) % self.number_of_workers self.routing_queues[index].put((self.transaction_index, dict_transaction)) self.transaction_index += 1 @@ -105,13 +105,11 @@ class ValidationWorker: def validate(self, dict_transaction): try: - asset_id = dict_transaction['asset']['id'] + asset_id = dict_transaction["asset"]["id"] except KeyError: - asset_id = dict_transaction['id'] + asset_id = dict_transaction["id"] - transaction = self.planetmint.is_valid_transaction( - dict_transaction, - self.validated_transactions[asset_id]) + transaction = self.planetmint.is_valid_transaction(dict_transaction, self.validated_transactions[asset_id]) if transaction: self.validated_transactions[asset_id].append(transaction) diff --git a/planetmint/start.py b/planetmint/start.py index 2944a1e..c911c66 100644 --- a/planetmint/start.py +++ b/planetmint/start.py @@ -40,13 +40,12 @@ def start(args): exchange = Exchange() # start the web api app_server = server.create_server( - settings=Config().get()['server'], - log_config=Config().get()['log'], - planetmint_factory=Planetmint) - p_webapi = Process(name='planetmint_webapi', target=app_server.run, daemon=True) + settings=Config().get()["server"], log_config=Config().get()["log"], planetmint_factory=Planetmint + ) + p_webapi = Process(name="planetmint_webapi", target=app_server.run, daemon=True) p_webapi.start() - logger.info(BANNER.format(Config().get()['server']['bind'])) + logger.info(BANNER.format(Config().get()["server"]["bind"])) # start websocket server p_websocket_server = Process( diff --git a/planetmint/tendermint_utils.py b/planetmint/tendermint_utils.py index 84d967e..a71103e 100644 --- a/planetmint/tendermint_utils.py +++ b/planetmint/tendermint_utils.py @@ -17,28 +17,28 @@ except ImportError: def encode_transaction(value): """Encode a transaction (dict) to Base64.""" - return base64.b64encode(json.dumps(value).encode('utf8')).decode('utf8') + return base64.b64encode(json.dumps(value).encode("utf8")).decode("utf8") def decode_transaction(raw): """Decode a transaction from bytes to a dict.""" - return json.loads(raw.decode('utf8')) + return json.loads(raw.decode("utf8")) def decode_transaction_base64(value): """Decode a transaction from Base64.""" - return json.loads(base64.b64decode(value.encode('utf8')).decode('utf8')) + return json.loads(base64.b64decode(value.encode("utf8")).decode("utf8")) def calculate_hash(key_list): if not key_list: - return '' + return "" full_hash = sha3_256() for key in key_list: - full_hash.update(key.encode('utf8')) + full_hash.update(key.encode("utf8")) return full_hash.hexdigest() @@ -59,16 +59,13 @@ def merkleroot(hashes): # i.e. an empty list, then the hash of the empty string is returned. # This seems too easy but maybe that is good enough? TO REVIEW! if not hashes: - return sha3_256(b'').hexdigest() + return sha3_256(b"").hexdigest() # XXX END TEMPORARY -- MUST REVIEW ... if len(hashes) == 1: return hexlify(hashes[0]).decode() if len(hashes) % 2 == 1: hashes.append(hashes[-1]) - parent_hashes = [ - sha3_256(hashes[i] + hashes[i + 1]).digest() - for i in range(0, len(hashes) - 1, 2) - ] + parent_hashes = [sha3_256(hashes[i] + hashes[i + 1]).digest() for i in range(0, len(hashes) - 1, 2)] return merkleroot(parent_hashes) @@ -76,7 +73,7 @@ def public_key64_to_address(base64_public_key): """Note this only compatible with Tendermint 0.19.x""" ed25519_public_key = public_key_from_base64(base64_public_key) encoded_public_key = amino_encoded_public_key(ed25519_public_key) - return hashlib.new('ripemd160', encoded_public_key).hexdigest().upper() + return hashlib.new("ripemd160", encoded_public_key).hexdigest().upper() def public_key_from_base64(base64_public_key): @@ -93,8 +90,8 @@ def public_key_to_base64(ed25519_public_key): def key_to_base64(ed25519_key): ed25519_key = bytes.fromhex(ed25519_key) - return base64.b64encode(ed25519_key).decode('utf-8') + return base64.b64encode(ed25519_key).decode("utf-8") def amino_encoded_public_key(ed25519_public_key): - return bytes.fromhex('1624DE6220{}'.format(ed25519_public_key)) + return bytes.fromhex("1624DE6220{}".format(ed25519_public_key)) diff --git a/planetmint/transactions/common/crypto.py b/planetmint/transactions/common/crypto.py index 9205c27..0812018 100644 --- a/planetmint/transactions/common/crypto.py +++ b/planetmint/transactions/common/crypto.py @@ -14,7 +14,7 @@ except ImportError: from cryptoconditions import crypto -CryptoKeypair = namedtuple('CryptoKeypair', ('private_key', 'public_key')) +CryptoKeypair = namedtuple("CryptoKeypair", ("private_key", "public_key")) def hash_data(data): @@ -33,8 +33,7 @@ def generate_key_pair(): """ # TODO FOR CC: Adjust interface so that this function becomes unnecessary - return CryptoKeypair( - *(k.decode() for k in crypto.ed25519_generate_key_pair())) + return CryptoKeypair(*(k.decode() for k in crypto.ed25519_generate_key_pair())) PrivateKey = crypto.Ed25519SigningKey @@ -43,13 +42,15 @@ PublicKey = crypto.Ed25519VerifyingKey def key_pair_from_ed25519_key(hex_private_key): """Generate base58 encode public-private key pair from a hex encoded private key""" - priv_key = crypto.Ed25519SigningKey(bytes.fromhex(hex_private_key)[:32], encoding='bytes') + priv_key = crypto.Ed25519SigningKey(bytes.fromhex(hex_private_key)[:32], encoding="bytes") public_key = priv_key.get_verifying_key() - return CryptoKeypair(private_key=priv_key.encode(encoding='base58').decode('utf-8'), - public_key=public_key.encode(encoding='base58').decode('utf-8')) + return CryptoKeypair( + private_key=priv_key.encode(encoding="base58").decode("utf-8"), + public_key=public_key.encode(encoding="base58").decode("utf-8"), + ) def public_key_from_ed25519_key(hex_public_key): """Generate base58 public key from hex encoded public key""" - public_key = crypto.Ed25519VerifyingKey(bytes.fromhex(hex_public_key), encoding='bytes') - return public_key.encode(encoding='base58').decode('utf-8') + public_key = crypto.Ed25519VerifyingKey(bytes.fromhex(hex_public_key), encoding="bytes") + return public_key.encode(encoding="base58").decode("utf-8") diff --git a/planetmint/transactions/common/input.py b/planetmint/transactions/common/input.py index e20a915..6508cc0 100644 --- a/planetmint/transactions/common/input.py +++ b/planetmint/transactions/common/input.py @@ -30,19 +30,19 @@ class Input(object): def __init__(self, fulfillment, owners_before, fulfills=None): """Create an instance of an :class:`~.Input`. - Args: - fulfillment (:class:`cryptoconditions.Fulfillment`): A - Fulfillment to be signed with a private key. - owners_before (:obj:`list` of :obj:`str`): A list of owners - after a Transaction was confirmed. - fulfills (:class:`~planetmint.transactions.common.transaction. - TransactionLink`, optional): A link representing the input - of a `TRANSFER` Transaction. + Args: + fulfillment (:class:`cryptoconditions.Fulfillment`): A + Fulfillment to be signed with a private key. + owners_before (:obj:`list` of :obj:`str`): A list of owners + after a Transaction was confirmed. + fulfills (:class:`~planetmint.transactions.common.transaction. + TransactionLink`, optional): A link representing the input + of a `TRANSFER` Transaction. """ if fulfills is not None and not isinstance(fulfills, TransactionLink): - raise TypeError('`fulfills` must be a TransactionLink instance') + raise TypeError("`fulfills` must be a TransactionLink instance") if not isinstance(owners_before, list): - raise TypeError('`owners_before` must be a list instance') + raise TypeError("`owners_before` must be a list instance") self.fulfillment = fulfillment self.fulfills = fulfills @@ -60,12 +60,12 @@ class Input(object): def to_dict(self): """Transforms the object to a Python dictionary. - Note: - If an Input hasn't been signed yet, this method returns a - dictionary representation. + Note: + If an Input hasn't been signed yet, this method returns a + dictionary representation. - Returns: - dict: The Input as an alternative serialization format. + Returns: + dict: The Input as an alternative serialization format. """ try: fulfillment = self.fulfillment.serialize_uri() @@ -79,9 +79,9 @@ class Input(object): fulfills = None input_ = { - 'owners_before': self.owners_before, - 'fulfills': fulfills, - 'fulfillment': fulfillment, + "owners_before": self.owners_before, + "fulfills": fulfills, + "fulfillment": fulfillment, } return input_ @@ -97,23 +97,23 @@ class Input(object): def from_dict(cls, data): """Transforms a Python dictionary to an Input object. - Note: - Optionally, this method can also serialize a Cryptoconditions- - Fulfillment that is not yet signed. + Note: + Optionally, this method can also serialize a Cryptoconditions- + Fulfillment that is not yet signed. - Args: - data (dict): The Input to be transformed. + Args: + data (dict): The Input to be transformed. - Returns: - :class:`~planetmint.transactions.common.transaction.Input` + Returns: + :class:`~planetmint.transactions.common.transaction.Input` - Raises: - InvalidSignature: If an Input's URI couldn't be parsed. + Raises: + InvalidSignature: If an Input's URI couldn't be parsed. """ - fulfillment = data['fulfillment'] + fulfillment = data["fulfillment"] if not isinstance(fulfillment, (Fulfillment, type(None))): try: - fulfillment = Fulfillment.from_uri(data['fulfillment']) + fulfillment = Fulfillment.from_uri(data["fulfillment"]) except ASN1DecodeError: # TODO Remove as it is legacy code, and simply fall back on # ASN1DecodeError @@ -121,6 +121,6 @@ class Input(object): except TypeError: # NOTE: See comment about this special case in # `Input.to_dict` - fulfillment = _fulfillment_from_details(data['fulfillment']) - fulfills = TransactionLink.from_dict(data['fulfills']) - return cls(fulfillment, data['owners_before'], fulfills) + fulfillment = _fulfillment_from_details(data["fulfillment"]) + fulfills = TransactionLink.from_dict(data["fulfills"]) + return cls(fulfillment, data["owners_before"], fulfills) diff --git a/planetmint/transactions/common/memoize.py b/planetmint/transactions/common/memoize.py index 0ac1908..f5df0de 100644 --- a/planetmint/transactions/common/memoize.py +++ b/planetmint/transactions/common/memoize.py @@ -5,7 +5,7 @@ from functools import lru_cache class HDict(dict): def __hash__(self): - return hash(codecs.decode(self['id'], 'hex')) + return hash(codecs.decode(self["id"], "hex")) @lru_cache(maxsize=16384) @@ -14,12 +14,11 @@ def from_dict(func, *args, **kwargs): def memoize_from_dict(func): - @functools.wraps(func) def memoized_func(*args, **kwargs): if args[1] is None: return None - elif args[1].get('id', None): + elif args[1].get("id", None): args = list(args) args[1] = HDict(args[1]) new_args = tuple(args) @@ -30,7 +29,7 @@ def memoize_from_dict(func): return memoized_func -class ToDictWrapper(): +class ToDictWrapper: def __init__(self, tx): self.tx = tx @@ -47,7 +46,6 @@ def to_dict(func, tx_wrapped): def memoize_to_dict(func): - @functools.wraps(func) def memoized_func(*args, **kwargs): diff --git a/planetmint/transactions/common/schema/__init__.py b/planetmint/transactions/common/schema/__init__.py index 51e092c..df644ef 100644 --- a/planetmint/transactions/common/schema/__init__.py +++ b/planetmint/transactions/common/schema/__init__.py @@ -19,7 +19,7 @@ logger = logging.getLogger(__name__) def _load_schema(name, version, path=__file__): """Load a schema from disk""" - path = os.path.join(os.path.dirname(path), version, name + '.yaml') + path = os.path.join(os.path.dirname(path), version, name + ".yaml") with open(path) as handle: schema = yaml.safe_load(handle) fast_schema = rapidjson.Validator(rapidjson.dumps(schema)) @@ -27,22 +27,17 @@ def _load_schema(name, version, path=__file__): # TODO: make this an env var from a config file -TX_SCHEMA_VERSION = 'v2.0' +TX_SCHEMA_VERSION = "v2.0" -TX_SCHEMA_PATH, TX_SCHEMA_COMMON = _load_schema('transaction', - TX_SCHEMA_VERSION) -_, TX_SCHEMA_CREATE = _load_schema('transaction_create', - TX_SCHEMA_VERSION) -_, TX_SCHEMA_TRANSFER = _load_schema('transaction_transfer', - TX_SCHEMA_VERSION) +TX_SCHEMA_PATH, TX_SCHEMA_COMMON = _load_schema("transaction", TX_SCHEMA_VERSION) +_, TX_SCHEMA_CREATE = _load_schema("transaction_create", TX_SCHEMA_VERSION) +_, TX_SCHEMA_TRANSFER = _load_schema("transaction_transfer", TX_SCHEMA_VERSION) -_, TX_SCHEMA_VALIDATOR_ELECTION = _load_schema('transaction_validator_election', - TX_SCHEMA_VERSION) +_, TX_SCHEMA_VALIDATOR_ELECTION = _load_schema("transaction_validator_election", TX_SCHEMA_VERSION) -_, TX_SCHEMA_CHAIN_MIGRATION_ELECTION = _load_schema('transaction_chain_migration_election', - TX_SCHEMA_VERSION) +_, TX_SCHEMA_CHAIN_MIGRATION_ELECTION = _load_schema("transaction_chain_migration_election", TX_SCHEMA_VERSION) -_, TX_SCHEMA_VOTE = _load_schema('transaction_vote', TX_SCHEMA_VERSION) +_, TX_SCHEMA_VOTE = _load_schema("transaction_vote", TX_SCHEMA_VERSION) def _validate_schema(schema, body): @@ -66,7 +61,7 @@ def _validate_schema(schema, body): jsonschema.validate(body, schema[0]) except jsonschema.ValidationError as exc2: raise SchemaValidationError(str(exc2)) from exc2 - logger.warning('code problem: jsonschema did not raise an exception, wheras rapidjson raised %s', exc) + logger.warning("code problem: jsonschema did not raise an exception, wheras rapidjson raised %s", exc) raise SchemaValidationError(str(exc)) from exc @@ -77,7 +72,7 @@ def validate_transaction_schema(tx): transaction. TX_SCHEMA_[TRANSFER|CREATE] add additional constraints on top. """ _validate_schema(TX_SCHEMA_COMMON, tx) - if tx['operation'] == 'TRANSFER': + if tx["operation"] == "TRANSFER": _validate_schema(TX_SCHEMA_TRANSFER, tx) else: _validate_schema(TX_SCHEMA_CREATE, tx) diff --git a/planetmint/transactions/common/transaction.py b/planetmint/transactions/common/transaction.py index 3d7c081..74146e8 100644 --- a/planetmint/transactions/common/transaction.py +++ b/planetmint/transactions/common/transaction.py @@ -120,26 +120,15 @@ class Transaction(object): # Asset payloads for 'CREATE' operations must be None or # dicts holding a `data` property. Asset payloads for 'TRANSFER' # operations must be dicts holding an `id` property. - if ( - operation == self.CREATE - and asset is not None - and not (isinstance(asset, dict) and "data" in asset) - ): + if operation == self.CREATE and asset is not None and not (isinstance(asset, dict) and "data" in asset): raise TypeError( ( "`asset` must be None or a dict holding a `data` " " property instance for '{}' Transactions".format(operation) ) ) - elif operation == self.TRANSFER and not ( - isinstance(asset, dict) and "id" in asset - ): - raise TypeError( - ( - "`asset` must be a dict holding an `id` property " - "for 'TRANSFER' Transactions" - ) - ) + elif operation == self.TRANSFER and not (isinstance(asset, dict) and "id" in asset): + raise TypeError(("`asset` must be a dict holding an `id` property " "for 'TRANSFER' Transactions")) if outputs and not isinstance(outputs, list): raise TypeError("`outputs` must be a list instance or None") @@ -298,10 +287,7 @@ class Transaction(object): # to decode to convert the bytestring into a python str return public_key.decode() - key_pairs = { - gen_public_key(PrivateKey(private_key)): PrivateKey(private_key) - for private_key in private_keys - } + key_pairs = {gen_public_key(PrivateKey(private_key)): PrivateKey(private_key) for private_key in private_keys} tx_dict = self.to_dict() tx_dict = Transaction._remove_signatures(tx_dict) @@ -336,10 +322,7 @@ class Transaction(object): elif isinstance(input_.fulfillment, ZenroomSha256): return cls._sign_threshold_signature_fulfillment(input_, message, key_pairs) else: - raise ValueError( - "Fulfillment couldn't be matched to " - "Cryptocondition fulfillment type." - ) + raise ValueError("Fulfillment couldn't be matched to " "Cryptocondition fulfillment type.") @classmethod def _sign_zenroom_fulfillment(cls, input_, message, key_pairs): @@ -359,20 +342,15 @@ class Transaction(object): public_key = input_.owners_before[0] message = sha3_256(message.encode()) if input_.fulfills: - message.update( - "{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode() - ) + message.update("{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode()) try: # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings - input_.fulfillment.sign( - message.digest(), base58.b58decode(key_pairs[public_key].encode()) - ) + input_.fulfillment.sign(message.digest(), base58.b58decode(key_pairs[public_key].encode())) except KeyError: raise KeypairMismatchException( - "Public key {} is not a pair to " - "any of the private keys".format(public_key) + "Public key {} is not a pair to " "any of the private keys".format(public_key) ) return input_ @@ -394,20 +372,15 @@ class Transaction(object): public_key = input_.owners_before[0] message = sha3_256(message.encode()) if input_.fulfills: - message.update( - "{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode() - ) + message.update("{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode()) try: # cryptoconditions makes no assumptions of the encoding of the # message to sign or verify. It only accepts bytestrings - input_.fulfillment.sign( - message.digest(), base58.b58decode(key_pairs[public_key].encode()) - ) + input_.fulfillment.sign(message.digest(), base58.b58decode(key_pairs[public_key].encode())) except KeyError: raise KeypairMismatchException( - "Public key {} is not a pair to " - "any of the private keys".format(public_key) + "Public key {} is not a pair to " "any of the private keys".format(public_key) ) return input_ @@ -424,9 +397,7 @@ class Transaction(object): input_ = deepcopy(input_) message = sha3_256(message.encode()) if input_.fulfills: - message.update( - "{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode() - ) + message.update("{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode()) for owner_before in set(input_.owners_before): # TODO: CC should throw a KeypairMismatchException, instead of @@ -442,15 +413,13 @@ class Transaction(object): subffills = ccffill.get_subcondition_from_vk(base58.b58decode(owner_before)) if not subffills: raise KeypairMismatchException( - "Public key {} cannot be found " - "in the fulfillment".format(owner_before) + "Public key {} cannot be found " "in the fulfillment".format(owner_before) ) try: private_key = key_pairs[owner_before] except KeyError: raise KeypairMismatchException( - "Public key {} is not a pair " - "to any of the private keys".format(owner_before) + "Public key {} is not a pair " "to any of the private keys".format(owner_before) ) # cryptoconditions makes no assumptions of the encoding of the @@ -483,9 +452,7 @@ class Transaction(object): # greatly, as we do not have to check against `None` values. return self._inputs_valid(["dummyvalue" for _ in self.inputs]) elif self.operation == self.TRANSFER: - return self._inputs_valid( - [output.fulfillment.condition_uri for output in outputs] - ) + return self._inputs_valid([output.fulfillment.condition_uri for output in outputs]) else: allowed_ops = ", ".join(self.__class__.ALLOWED_OPERATIONS) raise TypeError("`operation` must be one of {}".format(allowed_ops)) @@ -506,9 +473,7 @@ class Transaction(object): """ if len(self.inputs) != len(output_condition_uris): - raise ValueError( - "Inputs and " "output_condition_uris must have the same count" - ) + raise ValueError("Inputs and " "output_condition_uris must have the same count") tx_dict = self.tx_dict if self.tx_dict else self.to_dict() tx_dict = Transaction._remove_signatures(tx_dict) @@ -517,9 +482,7 @@ class Transaction(object): def validate(i, output_condition_uri=None): """Validate input against output condition URI""" - return self._input_valid( - self.inputs[i], self.operation, tx_serialized, output_condition_uri - ) + return self._input_valid(self.inputs[i], self.operation, tx_serialized, output_condition_uri) return all(validate(i, cond) for i, cond in enumerate(output_condition_uris)) @@ -574,9 +537,7 @@ class Transaction(object): else: message = sha3_256(message.encode()) if input_.fulfills: - message.update( - "{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode() - ) + message.update("{}{}".format(input_.fulfills.txid, input_.fulfills.output).encode()) # NOTE: We pass a timestamp to `.validate`, as in case of a timeout # condition we'll have to validate against it @@ -676,19 +637,11 @@ class Transaction(object): transactions = [transactions] # create a set of the transactions' asset ids - asset_ids = { - tx.id if tx.operation == tx.CREATE else tx.asset["id"] - for tx in transactions - } + asset_ids = {tx.id if tx.operation == tx.CREATE else tx.asset["id"] for tx in transactions} # check that all the transasctions have the same asset id if len(asset_ids) > 1: - raise AssetIdMismatch( - ( - "All inputs of all transactions passed" - " need to have the same asset id" - ) - ) + raise AssetIdMismatch(("All inputs of all transactions passed" " need to have the same asset id")) return asset_ids.pop() @staticmethod @@ -712,10 +665,7 @@ class Transaction(object): tx_body_serialized = Transaction._to_str(tx_body) valid_tx_id = Transaction._to_hash(tx_body_serialized) if proposed_tx_id != valid_tx_id: - err_msg = ( - "The transaction's id '{}' isn't equal to " - "the hash of its body, i.e. it's not valid." - ) + err_msg = "The transaction's id '{}' isn't equal to " "the hash of its body, i.e. it's not valid." raise InvalidHash(err_msg.format(proposed_tx_id)) @classmethod @@ -729,27 +679,25 @@ class Transaction(object): Returns: :class:`~planetmint.transactions.common.transaction.Transaction` """ - operation = ( - tx.get("operation", Transaction.CREATE) - if isinstance(tx, dict) - else Transaction.CREATE - ) + operation = tx.get("operation", Transaction.CREATE) if isinstance(tx, dict) else Transaction.CREATE cls = Transaction.resolve_class(operation) id = None try: - id = tx['id'] + id = tx["id"] except KeyError: id = None # tx['asset'] = tx['asset'][0] if isinstance( tx['asset'], list) or isinstance( tx['asset'], tuple) else tx['asset'], # noqa: E501 local_dict = { - 'inputs': tx['inputs'], - 'outputs': tx['outputs'], - 'operation': operation, - 'metadata': tx['metadata'], - 'asset': tx['asset'], # [0] if isinstance( tx['asset'], list) or isinstance( tx['asset'], tuple) else tx['asset'], # noqa: E501 - 'version': tx['version'], - 'id': id + "inputs": tx["inputs"], + "outputs": tx["outputs"], + "operation": operation, + "metadata": tx["metadata"], + "asset": tx[ + "asset" + ], # [0] if isinstance( tx['asset'], list) or isinstance( tx['asset'], tuple) else tx['asset'], # noqa: E501 + "version": tx["version"], + "id": id, } if not skip_schema_validation: @@ -802,14 +750,14 @@ class Transaction(object): if asset is not None: # This is tarantool specific behaviour needs to be addressed tx = tx_map[asset[1]] - tx['asset'] = asset[0] + tx["asset"] = asset[0] tx_ids = list(tx_map.keys()) metadata_list = list(planet.get_metadata(tx_ids)) for metadata in metadata_list: - if 'id' in metadata: - tx = tx_map[metadata['id']] - tx.update({'metadata': metadata.get('metadata')}) + if "id" in metadata: + tx = tx_map[metadata["id"]] + tx.update({"metadata": metadata.get("metadata")}) if return_list: tx_list = [] @@ -851,9 +799,7 @@ class Transaction(object): if input_tx is None: raise InputDoesNotExist("input `{}` doesn't exist".format(input_txid)) - spent = planet.get_spent( - input_txid, input_.fulfills.output, current_transactions - ) + spent = planet.get_spent(input_txid, input_.fulfills.output, current_transactions) if spent: raise DoubleSpend("input `{}` was already spent".format(input_txid)) @@ -869,27 +815,15 @@ class Transaction(object): # validate asset id asset_id = self.get_asset_id(input_txs) if asset_id != self.asset["id"]: - raise AssetIdMismatch( - ( - "The asset id of the input does not" - " match the asset id of the" - " transaction" - ) - ) + raise AssetIdMismatch(("The asset id of the input does not" " match the asset id of the" " transaction")) - input_amount = sum( - [input_condition.amount for input_condition in input_conditions] - ) - output_amount = sum( - [output_condition.amount for output_condition in self.outputs] - ) + input_amount = sum([input_condition.amount for input_condition in input_conditions]) + output_amount = sum([output_condition.amount for output_condition in self.outputs]) if output_amount != input_amount: raise AmountError( ( - "The amount used in the inputs `{}`" - " needs to be same as the amount used" - " in the outputs `{}`" + "The amount used in the inputs `{}`" " needs to be same as the amount used" " in the outputs `{}`" ).format(input_amount, output_amount) ) diff --git a/planetmint/transactions/common/transaction_link.py b/planetmint/transactions/common/transaction_link.py index fcdbeb1..2a93ec4 100644 --- a/planetmint/transactions/common/transaction_link.py +++ b/planetmint/transactions/common/transaction_link.py @@ -3,29 +3,30 @@ # SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) # Code is Apache-2.0 and docs are CC-BY-4.0 + class TransactionLink(object): """An object for unidirectional linking to a Transaction's Output. - Attributes: - txid (str, optional): A Transaction to link to. - output (int, optional): An output's index in a Transaction with id - `txid`. + Attributes: + txid (str, optional): A Transaction to link to. + output (int, optional): An output's index in a Transaction with id + `txid`. """ def __init__(self, txid=None, output=None): """Create an instance of a :class:`~.TransactionLink`. - Note: - In an IPLD implementation, this class is not necessary anymore, - as an IPLD link can simply point to an object, as well as an - objects properties. So instead of having a (de)serializable - class, we can have a simple IPLD link of the form: - `//transaction/outputs//`. + Note: + In an IPLD implementation, this class is not necessary anymore, + as an IPLD link can simply point to an object, as well as an + objects properties. So instead of having a (de)serializable + class, we can have a simple IPLD link of the form: + `//transaction/outputs//`. - Args: - txid (str, optional): A Transaction to link to. - output (int, optional): An Outputs's index in a Transaction with - id `txid`. + Args: + txid (str, optional): A Transaction to link to. + output (int, optional): An Outputs's index in a Transaction with + id `txid`. """ self.txid = txid self.output = output @@ -44,33 +45,32 @@ class TransactionLink(object): def from_dict(cls, link): """Transforms a Python dictionary to a TransactionLink object. - Args: - link (dict): The link to be transformed. + Args: + link (dict): The link to be transformed. - Returns: - :class:`~planetmint.transactions.common.transaction.TransactionLink` + Returns: + :class:`~planetmint.transactions.common.transaction.TransactionLink` """ try: - return cls(link['transaction_id'], link['output_index']) + return cls(link["transaction_id"], link["output_index"]) except TypeError: return cls() def to_dict(self): """Transforms the object to a Python dictionary. - Returns: - (dict|None): The link as an alternative serialization format. + Returns: + (dict|None): The link as an alternative serialization format. """ if self.txid is None and self.output is None: return None else: return { - 'transaction_id': self.txid, - 'output_index': self.output, + "transaction_id": self.txid, + "output_index": self.output, } - def to_uri(self, path=''): + def to_uri(self, path=""): if self.txid is None and self.output is None: return None - return '{}/transactions/{}/outputs/{}'.format(path, self.txid, - self.output) + return "{}/transactions/{}/outputs/{}".format(path, self.txid, self.output) diff --git a/planetmint/transactions/common/transaction_mode_types.py b/planetmint/transactions/common/transaction_mode_types.py index 840dff7..5821d36 100644 --- a/planetmint/transactions/common/transaction_mode_types.py +++ b/planetmint/transactions/common/transaction_mode_types.py @@ -3,6 +3,6 @@ # SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) # Code is Apache-2.0 and docs are CC-BY-4.0 -BROADCAST_TX_COMMIT = 'broadcast_tx_commit' -BROADCAST_TX_ASYNC = 'broadcast_tx_async' -BROADCAST_TX_SYNC = 'broadcast_tx_sync' +BROADCAST_TX_COMMIT = "broadcast_tx_commit" +BROADCAST_TX_ASYNC = "broadcast_tx_async" +BROADCAST_TX_SYNC = "broadcast_tx_sync" diff --git a/planetmint/transactions/common/utils.py b/planetmint/transactions/common/utils.py index 94cc37a..e1b4a4f 100644 --- a/planetmint/transactions/common/utils.py +++ b/planetmint/transactions/common/utils.py @@ -75,7 +75,7 @@ def validate_txn_obj(obj_name, obj, key, validation_fun): Raises: ValidationError: `validation_fun` will raise exception on failure """ - backend = Config().get()['database']['backend'] + backend = Config().get()["database"]["backend"] if backend == "localmongodb": data = obj.get(key, {}) @@ -184,9 +184,7 @@ def _fulfillment_to_details(fulfillment): } if fulfillment.type_name == "threshold-sha-256": - subconditions = [ - _fulfillment_to_details(cond["body"]) for cond in fulfillment.subconditions - ] + subconditions = [_fulfillment_to_details(cond["body"]) for cond in fulfillment.subconditions] return { "type": "threshold-sha-256", "threshold": fulfillment.threshold, diff --git a/planetmint/transactions/types/assets/create.py b/planetmint/transactions/types/assets/create.py index 5cce7fa..e878186 100644 --- a/planetmint/transactions/types/assets/create.py +++ b/planetmint/transactions/types/assets/create.py @@ -10,23 +10,23 @@ from planetmint.transactions.common.output import Output class Create(Transaction): - OPERATION = 'CREATE' + OPERATION = "CREATE" ALLOWED_OPERATIONS = (OPERATION,) @classmethod def validate_create(self, tx_signers, recipients, asset, metadata): if not isinstance(tx_signers, list): - raise TypeError('`tx_signers` must be a list instance') + raise TypeError("`tx_signers` must be a list instance") if not isinstance(recipients, list): - raise TypeError('`recipients` must be a list instance') + raise TypeError("`recipients` must be a list instance") if len(tx_signers) == 0: - raise ValueError('`tx_signers` list cannot be empty') + raise ValueError("`tx_signers` list cannot be empty") if len(recipients) == 0: - raise ValueError('`recipients` list cannot be empty') + raise ValueError("`recipients` list cannot be empty") if not (asset is None or isinstance(asset, dict)): - raise TypeError('`asset` must be a dict or None') + raise TypeError("`asset` must be a dict or None") if not (metadata is None or isinstance(metadata, dict)): - raise TypeError('`metadata` must be a dict or None') + raise TypeError("`metadata` must be a dict or None") inputs = [] outputs = [] @@ -34,9 +34,9 @@ class Create(Transaction): # generate_outputs for recipient in recipients: if not isinstance(recipient, tuple) or len(recipient) != 2: - raise ValueError(('Each `recipient` in the list must be a' - ' tuple of `([],' - ' )`')) + raise ValueError( + ("Each `recipient` in the list must be a" " tuple of `([]," " )`") + ) pub_keys, amount = recipient outputs.append(Output.generate(pub_keys, amount)) @@ -49,30 +49,30 @@ class Create(Transaction): def generate(cls, tx_signers, recipients, metadata=None, asset=None): """A simple way to generate a `CREATE` transaction. - Note: - This method currently supports the following Cryptoconditions - use cases: - - Ed25519 - - ThresholdSha256 + Note: + This method currently supports the following Cryptoconditions + use cases: + - Ed25519 + - ThresholdSha256 - Additionally, it provides support for the following Planetmint - use cases: - - Multiple inputs and outputs. + Additionally, it provides support for the following Planetmint + use cases: + - Multiple inputs and outputs. - Args: - tx_signers (:obj:`list` of :obj:`str`): A list of keys that - represent the signers of the CREATE Transaction. - recipients (:obj:`list` of :obj:`tuple`): A list of - ([keys],amount) that represent the recipients of this - Transaction. - metadata (dict): The metadata to be stored along with the - Transaction. - asset (dict): The metadata associated with the asset that will - be created in this Transaction. + Args: + tx_signers (:obj:`list` of :obj:`str`): A list of keys that + represent the signers of the CREATE Transaction. + recipients (:obj:`list` of :obj:`tuple`): A list of + ([keys],amount) that represent the recipients of this + Transaction. + metadata (dict): The metadata to be stored along with the + Transaction. + asset (dict): The metadata associated with the asset that will + be created in this Transaction. - Returns: - :class:`~planetmint.common.transaction.Transaction` + Returns: + :class:`~planetmint.common.transaction.Transaction` """ (inputs, outputs) = cls.validate_create(tx_signers, recipients, asset, metadata) - return cls(cls.OPERATION, {'data': asset}, inputs, outputs, metadata) + return cls(cls.OPERATION, {"data": asset}, inputs, outputs, metadata) diff --git a/planetmint/transactions/types/assets/transfer.py b/planetmint/transactions/types/assets/transfer.py index 91a1a1e..057150a 100644 --- a/planetmint/transactions/types/assets/transfer.py +++ b/planetmint/transactions/types/assets/transfer.py @@ -10,31 +10,31 @@ from copy import deepcopy class Transfer(Transaction): - OPERATION = 'TRANSFER' + OPERATION = "TRANSFER" ALLOWED_OPERATIONS = (OPERATION,) @classmethod def validate_transfer(cls, inputs, recipients, asset_id, metadata): if not isinstance(inputs, list): - raise TypeError('`inputs` must be a list instance') + raise TypeError("`inputs` must be a list instance") if len(inputs) == 0: - raise ValueError('`inputs` must contain at least one item') + raise ValueError("`inputs` must contain at least one item") if not isinstance(recipients, list): - raise TypeError('`recipients` must be a list instance') + raise TypeError("`recipients` must be a list instance") if len(recipients) == 0: - raise ValueError('`recipients` list cannot be empty') + raise ValueError("`recipients` list cannot be empty") outputs = [] for recipient in recipients: if not isinstance(recipient, tuple) or len(recipient) != 2: - raise ValueError(('Each `recipient` in the list must be a' - ' tuple of `([],' - ' )`')) + raise ValueError( + ("Each `recipient` in the list must be a" " tuple of `([]," " )`") + ) pub_keys, amount = recipient outputs.append(Output.generate(pub_keys, amount)) if not isinstance(asset_id, str): - raise TypeError('`asset_id` must be a string') + raise TypeError("`asset_id` must be a string") return (deepcopy(inputs), outputs) @@ -42,40 +42,40 @@ class Transfer(Transaction): def generate(cls, inputs, recipients, asset_id, metadata=None): """A simple way to generate a `TRANSFER` transaction. - Note: - Different cases for threshold conditions: + Note: + Different cases for threshold conditions: - Combining multiple `inputs` with an arbitrary number of - `recipients` can yield interesting cases for the creation of - threshold conditions we'd like to support. The following - notation is proposed: + Combining multiple `inputs` with an arbitrary number of + `recipients` can yield interesting cases for the creation of + threshold conditions we'd like to support. The following + notation is proposed: - 1. The index of a `recipient` corresponds to the index of - an input: - e.g. `transfer([input1], [a])`, means `input1` would now be - owned by user `a`. + 1. The index of a `recipient` corresponds to the index of + an input: + e.g. `transfer([input1], [a])`, means `input1` would now be + owned by user `a`. - 2. `recipients` can (almost) get arbitrary deeply nested, - creating various complex threshold conditions: - e.g. `transfer([inp1, inp2], [[a, [b, c]], d])`, means - `a`'s signature would have a 50% weight on `inp1` - compared to `b` and `c` that share 25% of the leftover - weight respectively. `inp2` is owned completely by `d`. + 2. `recipients` can (almost) get arbitrary deeply nested, + creating various complex threshold conditions: + e.g. `transfer([inp1, inp2], [[a, [b, c]], d])`, means + `a`'s signature would have a 50% weight on `inp1` + compared to `b` and `c` that share 25% of the leftover + weight respectively. `inp2` is owned completely by `d`. - Args: - inputs (:obj:`list` of :class:`~planetmint.common.transaction. - Input`): Converted `Output`s, intended to - be used as inputs in the transfer to generate. - recipients (:obj:`list` of :obj:`tuple`): A list of - ([keys],amount) that represent the recipients of this - Transaction. - asset_id (str): The asset ID of the asset to be transferred in - this Transaction. - metadata (dict): Python dictionary to be stored along with the - Transaction. + Args: + inputs (:obj:`list` of :class:`~planetmint.common.transaction. + Input`): Converted `Output`s, intended to + be used as inputs in the transfer to generate. + recipients (:obj:`list` of :obj:`tuple`): A list of + ([keys],amount) that represent the recipients of this + Transaction. + asset_id (str): The asset ID of the asset to be transferred in + this Transaction. + metadata (dict): Python dictionary to be stored along with the + Transaction. - Returns: - :class:`~planetmint.common.transaction.Transaction` + Returns: + :class:`~planetmint.common.transaction.Transaction` """ (inputs, outputs) = cls.validate_transfer(inputs, recipients, asset_id, metadata) - return cls(cls.OPERATION, {'id': asset_id}, inputs, outputs, metadata) + return cls(cls.OPERATION, {"id": asset_id}, inputs, outputs, metadata) diff --git a/planetmint/transactions/types/elections/chain_migration_election.py b/planetmint/transactions/types/elections/chain_migration_election.py index 5e23e40..60be48f 100644 --- a/planetmint/transactions/types/elections/chain_migration_election.py +++ b/planetmint/transactions/types/elections/chain_migration_election.py @@ -6,14 +6,14 @@ from planetmint.transactions.types.elections.election import Election class ChainMigrationElection(Election): - OPERATION = 'CHAIN_MIGRATION_ELECTION' + OPERATION = "CHAIN_MIGRATION_ELECTION" CREATE = OPERATION ALLOWED_OPERATIONS = (OPERATION,) TX_SCHEMA_CUSTOM = TX_SCHEMA_CHAIN_MIGRATION_ELECTION def has_concluded(self, planetmint, *args, **kwargs): chain = planetmint.get_latest_abci_chain() - if chain is not None and not chain['is_synced']: + if chain is not None and not chain["is_synced"]: # do not conclude the migration election if # there is another migration in progress return False @@ -26,7 +26,7 @@ class ChainMigrationElection(Election): def show_election(self, planet): output = super().show_election(planet) chain = planet.get_latest_abci_chain() - if chain is None or chain['is_synced']: + if chain is None or chain["is_synced"]: return output output += f'\nchain_id={chain["chain_id"]}' @@ -34,14 +34,15 @@ class ChainMigrationElection(Election): output += f'\napp_hash={block["app_hash"]}' validators = [ { - 'pub_key': { - 'type': 'tendermint/PubKeyEd25519', - 'value': k, + "pub_key": { + "type": "tendermint/PubKeyEd25519", + "value": k, }, - 'power': v, - } for k, v in self.get_validators(planet).items() + "power": v, + } + for k, v in self.get_validators(planet).items() ] - output += f'\nvalidators={json.dumps(validators, indent=4)}' + output += f"\nvalidators={json.dumps(validators, indent=4)}" return output def on_rollback(self, planet, new_height): diff --git a/planetmint/transactions/types/elections/election.py b/planetmint/transactions/types/elections/election.py index 984d179..62b3b88 100644 --- a/planetmint/transactions/types/elections/election.py +++ b/planetmint/transactions/types/elections/election.py @@ -12,30 +12,33 @@ from planetmint.transactions.types.assets.create import Create from planetmint.transactions.types.assets.transfer import Transfer from planetmint.transactions.types.elections.vote import Vote from planetmint.transactions.common.exceptions import ( - InvalidSignature, MultipleInputsError, InvalidProposer, - UnequalValidatorSet, DuplicateTransaction) + InvalidSignature, + MultipleInputsError, + InvalidProposer, + UnequalValidatorSet, + DuplicateTransaction, +) from planetmint.tendermint_utils import key_from_base64, public_key_to_base64 -from planetmint.transactions.common.crypto import (public_key_from_ed25519_key) +from planetmint.transactions.common.crypto import public_key_from_ed25519_key from planetmint.transactions.common.transaction import Transaction -from planetmint.transactions.common.schema import ( - _validate_schema, TX_SCHEMA_COMMON, TX_SCHEMA_CREATE) +from planetmint.transactions.common.schema import _validate_schema, TX_SCHEMA_COMMON, TX_SCHEMA_CREATE class Election(Transaction): """Represents election transactions. - To implement a custom election, create a class deriving from this one - with OPERATION set to the election operation, ALLOWED_OPERATIONS - set to (OPERATION,), CREATE set to OPERATION. + To implement a custom election, create a class deriving from this one + with OPERATION set to the election operation, ALLOWED_OPERATIONS + set to (OPERATION,), CREATE set to OPERATION. """ OPERATION = None # Custom validation schema TX_SCHEMA_CUSTOM = None # Election Statuses: - ONGOING = 'ongoing' - CONCLUDED = 'concluded' - INCONCLUSIVE = 'inconclusive' + ONGOING = "ongoing" + CONCLUDED = "concluded" + INCONCLUSIVE = "inconclusive" # Vote ratio to approve an election ELECTION_THRESHOLD = 2 / 3 @@ -51,18 +54,18 @@ class Election(Transaction): latest_block = planet.get_latest_block() if latest_block is None: return None - return planet.get_validator_change(latest_block['height']) + return planet.get_validator_change(latest_block["height"]) @classmethod def get_validators(cls, planet, height=None): """Return a dictionary of validators with key as `public_key` and - value as the `voting_power` + value as the `voting_power` """ validators = {} for validator in planet.get_validators(height): # NOTE: we assume that Tendermint encodes public key in base64 - public_key = public_key_from_ed25519_key(key_from_base64(validator['public_key']['value'])) - validators[public_key] = validator['voting_power'] + public_key = public_key_from_ed25519_key(key_from_base64(validator["public_key"]["value"])) + validators[public_key] = validator["voting_power"] return validators @@ -114,26 +117,25 @@ class Election(Transaction): duplicates = any(txn for txn in current_transactions if txn.id == self.id) if planet.is_committed(self.id) or duplicates: - raise DuplicateTransaction('transaction `{}` already exists' - .format(self.id)) + raise DuplicateTransaction("transaction `{}` already exists".format(self.id)) if not self.inputs_valid(input_conditions): - raise InvalidSignature('Transaction signature is invalid.') + raise InvalidSignature("Transaction signature is invalid.") current_validators = self.get_validators(planet) # NOTE: Proposer should be a single node if len(self.inputs) != 1 or len(self.inputs[0].owners_before) != 1: - raise MultipleInputsError('`tx_signers` must be a list instance of length one') + raise MultipleInputsError("`tx_signers` must be a list instance of length one") # NOTE: Check if the proposer is a validator. [election_initiator_node_pub_key] = self.inputs[0].owners_before if election_initiator_node_pub_key not in current_validators.keys(): - raise InvalidProposer('Public key is not a part of the validator set') + raise InvalidProposer("Public key is not a part of the validator set") # NOTE: Check if all validators have been assigned votes equal to their voting power if not self.is_same_topology(current_validators, self.outputs): - raise UnequalValidatorSet('Validator set much be exactly same to the outputs of election') + raise UnequalValidatorSet("Validator set much be exactly same to the outputs of election") return self @@ -141,10 +143,10 @@ class Election(Transaction): def generate(cls, initiator, voters, election_data, metadata=None): # Break symmetry in case we need to call an election with the same properties twice uuid = uuid4() - election_data['seed'] = str(uuid) + election_data["seed"] = str(uuid) (inputs, outputs) = Create.validate_create(initiator, voters, election_data, metadata) - election = cls(cls.OPERATION, {'data': election_data}, inputs, outputs, metadata) + election = cls(cls.OPERATION, {"data": election_data}, inputs, outputs, metadata) cls.validate_schema(election.to_dict()) return election @@ -174,21 +176,19 @@ class Election(Transaction): def count_votes(cls, election_pk, transactions, getter=getattr): votes = 0 for txn in transactions: - if getter(txn, 'operation') == Vote.OPERATION: - for output in getter(txn, 'outputs'): + if getter(txn, "operation") == Vote.OPERATION: + for output in getter(txn, "outputs"): # NOTE: We enforce that a valid vote to election id will have only # election_pk in the output public keys, including any other public key # along with election_pk will lead to vote being not considered valid. - if len(getter(output, 'public_keys')) == 1 and [election_pk] == getter(output, 'public_keys'): - votes = votes + int(getter(output, 'amount')) + if len(getter(output, "public_keys")) == 1 and [election_pk] == getter(output, "public_keys"): + votes = votes + int(getter(output, "amount")) return votes def get_commited_votes(self, planet, election_pk=None): if election_pk is None: election_pk = self.to_public_key(self.id) - txns = list(backend.query.get_asset_tokens_for_public_key(planet.connection, - self.id, - election_pk)) + txns = list(backend.query.get_asset_tokens_for_public_key(planet.connection, self.id, election_pk)) return self.count_votes(election_pk, txns, dict.get) def has_concluded(self, planet, current_votes=[]): @@ -208,15 +208,14 @@ class Election(Transaction): votes_current = self.count_votes(election_pk, current_votes) total_votes = sum(output.amount for output in self.outputs) - if (votes_committed < (2 / 3) * total_votes) and \ - (votes_committed + votes_current >= (2 / 3) * total_votes): + if (votes_committed < (2 / 3) * total_votes) and (votes_committed + votes_current >= (2 / 3) * total_votes): return True return False def get_status(self, planet): election = self.get_election(self.id, planet) - if election and election['is_concluded']: + if election and election["is_concluded"]: return self.CONCLUDED return self.INCONCLUSIVE if self.has_validator_set_changed(planet) else self.ONGOING @@ -226,11 +225,11 @@ class Election(Transaction): if latest_change is None: return False - latest_change_height = latest_change['height'] + latest_change_height = latest_change["height"] election = self.get_election(self.id, planet) - return latest_change_height > election['height'] + return latest_change_height > election["height"] def get_election(self, election_id, planet): return planet.get_election(election_id) @@ -239,14 +238,14 @@ class Election(Transaction): planet.store_election(self.id, height, is_concluded) def show_election(self, planet): - data = self.asset['data'] - if 'public_key' in data.keys(): - data['public_key'] = public_key_to_base64(data['public_key']['value']) - response = '' + data = self.asset["data"] + if "public_key" in data.keys(): + data["public_key"] = public_key_to_base64(data["public_key"]["value"]) + response = "" for k, v in data.items(): - if k != 'seed': - response += f'{k}={v}\n' - response += f'status={self.get_status(planet)}' + if k != "seed": + response += f"{k}={v}\n" + response += f"status={self.get_status(planet)}" return response @@ -257,8 +256,7 @@ class Election(Transaction): if not isinstance(tx, Election): continue - elections.append({'election_id': tx.id, 'height': height, - 'is_concluded': False}) + elections.append({"election_id": tx.id, "height": height, "is_concluded": False}) return elections @classmethod @@ -268,7 +266,7 @@ class Election(Transaction): if not isinstance(tx, Vote): continue - election_id = tx.asset['id'] + election_id = tx.asset["id"] if election_id not in elections: elections[election_id] = [] elections[election_id].append(tx) @@ -277,26 +275,26 @@ class Election(Transaction): @classmethod def process_block(cls, planet, new_height, txns): """Looks for election and vote transactions inside the block, records - and processes elections. + and processes elections. - Every election is recorded in the database. + Every election is recorded in the database. - Every vote has a chance to conclude the corresponding election. When - an election is concluded, the corresponding database record is - marked as such. + Every vote has a chance to conclude the corresponding election. When + an election is concluded, the corresponding database record is + marked as such. - Elections and votes are processed in the order in which they - appear in the block. Elections are concluded in the order of - appearance of their first votes in the block. + Elections and votes are processed in the order in which they + appear in the block. Elections are concluded in the order of + appearance of their first votes in the block. - For every election concluded in the block, calls its `on_approval` - method. The returned value of the last `on_approval`, if any, - is a validator set update to be applied in one of the following blocks. + For every election concluded in the block, calls its `on_approval` + method. The returned value of the last `on_approval`, if any, + is a validator set update to be applied in one of the following blocks. - `on_approval` methods are implemented by elections of particular type. - The method may contain side effects but should be idempotent. To account - for other concluded elections, if it requires so, the method should - rely on the database state. + `on_approval` methods are implemented by elections of particular type. + The method may contain side effects but should be idempotent. To account + for other concluded elections, if it requires so, the method should + rely on the database state. """ # elections initiated in this block initiated_elections = cls._get_initiated_elections(new_height, txns) @@ -324,9 +322,9 @@ class Election(Transaction): @classmethod def rollback(cls, planet, new_height, txn_ids): """Looks for election and vote transactions inside the block and - cleans up the database artifacts possibly created in `process_blocks`. + cleans up the database artifacts possibly created in `process_blocks`. - Part of the `end_block`/`commit` crash recovery. + Part of the `end_block`/`commit` crash recovery. """ # delete election records for elections initiated at this height and @@ -342,13 +340,13 @@ class Election(Transaction): def on_approval(self, planet, new_height): """Override to update the database state according to the - election rules. Consider the current database state to account for - other concluded elections, if required. + election rules. Consider the current database state to account for + other concluded elections, if required. """ raise NotImplementedError def on_rollback(self, planet, new_height): """Override to clean up the database artifacts possibly created - in `on_approval`. Part of the `end_block`/`commit` crash recovery. + in `on_approval`. Part of the `end_block`/`commit` crash recovery. """ raise NotImplementedError diff --git a/planetmint/transactions/types/elections/vote.py b/planetmint/transactions/types/elections/vote.py index c102a8c..3430168 100644 --- a/planetmint/transactions/types/elections/vote.py +++ b/planetmint/transactions/types/elections/vote.py @@ -6,12 +6,16 @@ from planetmint.transactions.types.assets.create import Create from planetmint.transactions.types.assets.transfer import Transfer from planetmint.transactions.common.schema import ( - _validate_schema, TX_SCHEMA_COMMON, TX_SCHEMA_TRANSFER, TX_SCHEMA_VOTE) + _validate_schema, + TX_SCHEMA_COMMON, + TX_SCHEMA_TRANSFER, + TX_SCHEMA_VOTE, +) class Vote(Transfer): - OPERATION = 'VOTE' + OPERATION = "VOTE" # NOTE: This class inherits TRANSFER txn type. The `TRANSFER` property is # overriden to re-use methods from parent class TRANSFER = OPERATION @@ -41,14 +45,14 @@ class Vote(Transfer): @classmethod def generate(cls, inputs, recipients, election_id, metadata=None): (inputs, outputs) = cls.validate_transfer(inputs, recipients, election_id, metadata) - election_vote = cls(cls.OPERATION, {'id': election_id}, inputs, outputs, metadata) + election_vote = cls(cls.OPERATION, {"id": election_id}, inputs, outputs, metadata) cls.validate_schema(election_vote.to_dict()) return election_vote @classmethod def validate_schema(cls, tx): """Validate the validator election vote transaction. Since `VOTE` extends `TRANSFER` - transaction, all the validations for `CREATE` transaction should be inherited + transaction, all the validations for `CREATE` transaction should be inherited """ _validate_schema(TX_SCHEMA_COMMON, tx) _validate_schema(TX_SCHEMA_TRANSFER, tx) diff --git a/planetmint/upsert_validator/__init__.py b/planetmint/upsert_validator/__init__.py index 8c004d2..f233a0e 100644 --- a/planetmint/upsert_validator/__init__.py +++ b/planetmint/upsert_validator/__init__.py @@ -4,4 +4,4 @@ # Code is Apache-2.0 and docs are CC-BY-4.0 -from planetmint.upsert_validator.validator_election import ValidatorElection # noqa +from planetmint.upsert_validator.validator_election import ValidatorElection # noqa diff --git a/planetmint/upsert_validator/validator_election.py b/planetmint/upsert_validator/validator_election.py index 31e4161..6ef73a0 100644 --- a/planetmint/upsert_validator/validator_election.py +++ b/planetmint/upsert_validator/validator_election.py @@ -6,12 +6,12 @@ from planetmint.transactions.common.exceptions import InvalidPowerChange from planetmint.transactions.types.elections.election import Election from planetmint.transactions.common.schema import TX_SCHEMA_VALIDATOR_ELECTION -from .validator_utils import (new_validator_set, encode_validator, validate_asset_public_key) +from .validator_utils import new_validator_set, encode_validator, validate_asset_public_key class ValidatorElection(Election): - OPERATION = 'VALIDATOR_ELECTION' + OPERATION = "VALIDATOR_ELECTION" # NOTE: this transaction class extends create so the operation inheritence is achieved # by renaming CREATE to VALIDATOR_ELECTION CREATE = OPERATION @@ -19,29 +19,28 @@ class ValidatorElection(Election): TX_SCHEMA_CUSTOM = TX_SCHEMA_VALIDATOR_ELECTION def validate(self, planet, current_transactions=[]): - """For more details refer BEP-21: https://github.com/planetmint/BEPs/tree/master/21 - """ + """For more details refer BEP-21: https://github.com/planetmint/BEPs/tree/master/21""" current_validators = self.get_validators(planet) super(ValidatorElection, self).validate(planet, current_transactions=current_transactions) # NOTE: change more than 1/3 of the current power is not allowed - if self.asset['data']['power'] >= (1 / 3) * sum(current_validators.values()): - raise InvalidPowerChange('`power` change must be less than 1/3 of total power') + if self.asset["data"]["power"] >= (1 / 3) * sum(current_validators.values()): + raise InvalidPowerChange("`power` change must be less than 1/3 of total power") return self @classmethod def validate_schema(cls, tx): super(ValidatorElection, cls).validate_schema(tx) - validate_asset_public_key(tx['asset']['data']['public_key']) + validate_asset_public_key(tx["asset"]["data"]["public_key"]) def has_concluded(self, planet, *args, **kwargs): latest_block = planet.get_latest_block() if latest_block is not None: - latest_block_height = latest_block['height'] - latest_validator_change = planet.get_validator_change()['height'] + latest_block_height = latest_block["height"] + latest_validator_change = planet.get_validator_change()["height"] # TODO change to `latest_block_height + 3` when upgrading to Tendermint 0.24.0. if latest_validator_change == latest_block_height + 2: @@ -51,17 +50,15 @@ class ValidatorElection(Election): return super().has_concluded(planet, *args, **kwargs) def on_approval(self, planet, new_height): - validator_updates = [self.asset['data']] + validator_updates = [self.asset["data"]] curr_validator_set = planet.get_validators(new_height) - updated_validator_set = new_validator_set(curr_validator_set, - validator_updates) + updated_validator_set = new_validator_set(curr_validator_set, validator_updates) - updated_validator_set = [v for v in updated_validator_set - if v['voting_power'] > 0] + updated_validator_set = [v for v in updated_validator_set if v["voting_power"] > 0] # TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0. planet.store_validator_set(new_height + 1, updated_validator_set) - return encode_validator(self.asset['data']) + return encode_validator(self.asset["data"]) def on_rollback(self, planetmint, new_height): # TODO change to `new_height + 2` when upgrading to Tendermint 0.24.0. diff --git a/planetmint/upsert_validator/validator_utils.py b/planetmint/upsert_validator/validator_utils.py index c515f85..f797860 100644 --- a/planetmint/upsert_validator/validator_utils.py +++ b/planetmint/upsert_validator/validator_utils.py @@ -8,67 +8,72 @@ from planetmint.transactions.common.exceptions import InvalidPublicKey def encode_validator(v): - ed25519_public_key = v['public_key']['value'] + ed25519_public_key = v["public_key"]["value"] pub_key = keys_pb2.PublicKey(ed25519=bytes.fromhex(ed25519_public_key)) - return types_pb2.ValidatorUpdate(pub_key=pub_key, power=v['power']) + return types_pb2.ValidatorUpdate(pub_key=pub_key, power=v["power"]) def decode_validator(v): - return {'public_key': {'type': 'ed25519-base64', - 'value': codecs.encode(v.pub_key.ed25519, 'base64').decode().rstrip('\n')}, - 'voting_power': v.power} + return { + "public_key": { + "type": "ed25519-base64", + "value": codecs.encode(v.pub_key.ed25519, "base64").decode().rstrip("\n"), + }, + "voting_power": v.power, + } def new_validator_set(validators, updates): validators_dict = {} for v in validators: - validators_dict[v['public_key']['value']] = v + validators_dict[v["public_key"]["value"]] = v updates_dict = {} for u in updates: - decoder = get_public_key_decoder(u['public_key']) - public_key64 = base64.b64encode(decoder(u['public_key']['value'])).decode('utf-8') - updates_dict[public_key64] = {'public_key': {'type': 'ed25519-base64', - 'value': public_key64}, - 'voting_power': u['power']} + decoder = get_public_key_decoder(u["public_key"]) + public_key64 = base64.b64encode(decoder(u["public_key"]["value"])).decode("utf-8") + updates_dict[public_key64] = { + "public_key": {"type": "ed25519-base64", "value": public_key64}, + "voting_power": u["power"], + } new_validators_dict = {**validators_dict, **updates_dict} return list(new_validators_dict.values()) def encode_pk_to_base16(validator): - pk = validator['public_key'] + pk = validator["public_key"] decoder = get_public_key_decoder(pk) - public_key16 = base64.b16encode(decoder(pk['value'])).decode('utf-8') + public_key16 = base64.b16encode(decoder(pk["value"])).decode("utf-8") - validator['public_key']['value'] = public_key16 + validator["public_key"]["value"] = public_key16 return validator def validate_asset_public_key(pk): - pk_binary = pk['value'].encode('utf-8') + pk_binary = pk["value"].encode("utf-8") decoder = get_public_key_decoder(pk) try: pk_decoded = decoder(pk_binary) if len(pk_decoded) != 32: - raise InvalidPublicKey('Public key should be of size 32 bytes') + raise InvalidPublicKey("Public key should be of size 32 bytes") except binascii.Error: - raise InvalidPublicKey('Invalid `type` specified for public key `value`') + raise InvalidPublicKey("Invalid `type` specified for public key `value`") def get_public_key_decoder(pk): - encoding = pk['type'] + encoding = pk["type"] decoder = base64.b64decode - if encoding == 'ed25519-base16': + if encoding == "ed25519-base16": decoder = base64.b16decode - elif encoding == 'ed25519-base32': + elif encoding == "ed25519-base32": decoder = base64.b32decode - elif encoding == 'ed25519-base64': + elif encoding == "ed25519-base64": decoder = base64.b64decode else: - raise InvalidPublicKey('Invalid `type` specified for public key `value`') + raise InvalidPublicKey("Invalid `type` specified for public key `value`") return decoder diff --git a/planetmint/utils.py b/planetmint/utils.py index 25dbc82..eff4c25 100644 --- a/planetmint/utils.py +++ b/planetmint/utils.py @@ -17,9 +17,7 @@ from planetmint.transactions.common.crypto import key_pair_from_ed25519_key class ProcessGroup(object): - - def __init__(self, concurrency=None, group=None, target=None, name=None, - args=None, kwargs=None, daemon=None): + def __init__(self, concurrency=None, group=None, target=None, name=None, args=None, kwargs=None, daemon=None): self.concurrency = concurrency or mp.cpu_count() self.group = group self.target = target @@ -31,9 +29,14 @@ class ProcessGroup(object): def start(self): for i in range(self.concurrency): - proc = mp.Process(group=self.group, target=self.target, - name=self.name, args=self.args, - kwargs=self.kwargs, daemon=self.daemon) + proc = mp.Process( + group=self.group, + target=self.target, + name=self.name, + args=self.args, + kwargs=self.kwargs, + daemon=self.daemon, + ) proc.start() self.processes.append(proc) @@ -117,8 +120,8 @@ def condition_details_has_owner(condition_details, owner): bool: True if the public key is found in the condition details, False otherwise """ - if 'subconditions' in condition_details: - result = condition_details_has_owner(condition_details['subconditions'], owner) + if "subconditions" in condition_details: + result = condition_details_has_owner(condition_details["subconditions"], owner) if result: return True @@ -128,8 +131,7 @@ def condition_details_has_owner(condition_details, owner): if result: return True else: - if 'public_key' in condition_details \ - and owner == condition_details['public_key']: + if "public_key" in condition_details and owner == condition_details["public_key"]: return True return False @@ -157,7 +159,7 @@ class Lazy: return self def __getitem__(self, key): - self.stack.append('__getitem__') + self.stack.append("__getitem__") self.stack.append(([key], {})) return self @@ -184,7 +186,7 @@ class Lazy: def load_node_key(path): with open(path) as json_data: priv_validator = json.load(json_data) - priv_key = priv_validator['priv_key']['value'] + priv_key = priv_validator["priv_key"]["value"] hex_private_key = key_from_base64(priv_key) return key_pair_from_ed25519_key(hex_private_key) @@ -200,7 +202,7 @@ def tendermint_version_is_compatible(running_tm_ver): """ # Splitting because version can look like this e.g. 0.22.8-40d6dc2e - tm_ver = running_tm_ver.split('-') + tm_ver = running_tm_ver.split("-") if not tm_ver: return False for ver in __tm_supported_versions__: diff --git a/planetmint/validation.py b/planetmint/validation.py index 4d85bb3..e8d4282 100644 --- a/planetmint/validation.py +++ b/planetmint/validation.py @@ -4,7 +4,7 @@ # Code is Apache-2.0 and docs are CC-BY-4.0 -class BaseValidationRules(): +class BaseValidationRules: """Base validation rules for Planetmint. A validation plugin must expose a class inheriting from this one via an entry_point. diff --git a/planetmint/web/routes.py b/planetmint/web/routes.py index 2c650c0..3579c2e 100644 --- a/planetmint/web/routes.py +++ b/planetmint/web/routes.py @@ -21,7 +21,7 @@ def add_routes(app): for (prefix, routes) in API_SECTIONS: api = Api(app, prefix=prefix) for ((pattern, resource, *args), kwargs) in routes: - kwargs.setdefault('strict_slashes', False) + kwargs.setdefault("strict_slashes", False) api.add_resource(resource, pattern, *args, **kwargs) @@ -30,20 +30,20 @@ def r(*args, **kwargs): ROUTES_API_V1 = [ - r('/', info.ApiV1Index), - r('assets/', assets.AssetListApi), - r('metadata/', metadata.MetadataApi), - r('blocks/', blocks.BlockApi), - r('blocks/latest', blocks.LatestBlock), - r('blocks/', blocks.BlockListApi), - r('transactions/', tx.TransactionApi), - r('transactions', tx.TransactionListApi), - r('outputs/', outputs.OutputListApi), - r('validators/', validators.ValidatorsApi), + r("/", info.ApiV1Index), + r("assets/", assets.AssetListApi), + r("metadata/", metadata.MetadataApi), + r("blocks/", blocks.BlockApi), + r("blocks/latest", blocks.LatestBlock), + r("blocks/", blocks.BlockListApi), + r("transactions/", tx.TransactionApi), + r("transactions", tx.TransactionListApi), + r("outputs/", outputs.OutputListApi), + r("validators/", validators.ValidatorsApi), ] API_SECTIONS = [ - (None, [r('/', info.RootIndex)]), - ('/api/v1/', ROUTES_API_V1), + (None, [r("/", info.RootIndex)]), + ("/api/v1/", ROUTES_API_V1), ] diff --git a/planetmint/web/server.py b/planetmint/web/server.py index e0c4519..fd1f2c6 100644 --- a/planetmint/web/server.py +++ b/planetmint/web/server.py @@ -44,13 +44,14 @@ class StandaloneApplication(gunicorn.app.base.BaseApplication): def load_config(self): # find a better way to pass this such that # the custom logger class can access it. - custom_log_config = self.options.get('custom_log_config') - self.cfg.env_orig['custom_log_config'] = custom_log_config + custom_log_config = self.options.get("custom_log_config") + self.cfg.env_orig["custom_log_config"] = custom_log_config - config = dict((key, value) for key, value in self.options.items() - if key in self.cfg.settings and value is not None) + config = dict( + (key, value) for key, value in self.options.items() if key in self.cfg.settings and value is not None + ) - config['default_proc_name'] = 'planetmint_gunicorn' + config["default_proc_name"] = "planetmint_gunicorn" for key, value in config.items(): # not sure if we need the `key.lower` here, will just keep # keep it for now. @@ -81,7 +82,7 @@ def create_app(*, debug=False, threads=1, planetmint_factory=None): app.debug = debug - app.config['bigchain_pool'] = utils.pool(planetmint_factory, size=threads) + app.config["bigchain_pool"] = utils.pool(planetmint_factory, size=threads) add_routes(app) @@ -101,18 +102,18 @@ def create_server(settings, log_config=None, planetmint_factory=None): settings = copy.deepcopy(settings) - if not settings.get('workers'): - settings['workers'] = (multiprocessing.cpu_count() * 2) + 1 + if not settings.get("workers"): + settings["workers"] = (multiprocessing.cpu_count() * 2) + 1 - if not settings.get('threads'): + if not settings.get("threads"): # Note: Threading is not recommended currently, as the frontend workload # is largely CPU bound and parallisation across Python threads makes it # slower. - settings['threads'] = 1 + settings["threads"] = 1 - settings['custom_log_config'] = log_config - app = create_app(debug=settings.get('debug', False), - threads=settings['threads'], - planetmint_factory=planetmint_factory) + settings["custom_log_config"] = log_config + app = create_app( + debug=settings.get("debug", False), threads=settings["threads"], planetmint_factory=planetmint_factory + ) standalone = StandaloneApplication(app, options=settings) return standalone diff --git a/planetmint/web/strip_content_type_middleware.py b/planetmint/web/strip_content_type_middleware.py index 026b96b..86a564d 100644 --- a/planetmint/web/strip_content_type_middleware.py +++ b/planetmint/web/strip_content_type_middleware.py @@ -22,9 +22,9 @@ class StripContentTypeMiddleware: def __call__(self, environ, start_response): """Run the middleware and then call the original WSGI application.""" - if environ['REQUEST_METHOD'] == 'GET': + if environ["REQUEST_METHOD"] == "GET": try: - del environ['CONTENT_TYPE'] + del environ["CONTENT_TYPE"] except KeyError: pass else: diff --git a/planetmint/web/views/assets.py b/planetmint/web/views/assets.py index e15d639..14fa52a 100644 --- a/planetmint/web/views/assets.py +++ b/planetmint/web/views/assets.py @@ -30,17 +30,17 @@ class AssetListApi(Resource): A list of assets that match the query. """ parser = reqparse.RequestParser() - parser.add_argument('search', type=str, required=True) - parser.add_argument('limit', type=int) + parser.add_argument("search", type=str, required=True) + parser.add_argument("limit", type=int) args = parser.parse_args() - if not args['search']: - return make_error(400, 'text_search cannot be empty') - if not args['limit']: + if not args["search"]: + return make_error(400, "text_search cannot be empty") + if not args["limit"]: # if the limit is not specified do not pass None to `text_search` - del args['limit'] + del args["limit"] - pool = current_app.config['bigchain_pool'] + pool = current_app.config["bigchain_pool"] with pool() as planet: assets = planet.text_search(**args) @@ -49,7 +49,4 @@ class AssetListApi(Resource): # This only works with MongoDB as the backend return list(assets) except OperationError as e: - return make_error( - 400, - '({}): {}'.format(type(e).__name__, e) - ) + return make_error(400, "({}): {}".format(type(e).__name__, e)) diff --git a/planetmint/web/views/base.py b/planetmint/web/views/base.py index 5d84c59..cea665f 100644 --- a/planetmint/web/views/base.py +++ b/planetmint/web/views/base.py @@ -17,13 +17,13 @@ logger = logging.getLogger(__name__) def make_error(status_code, message=None): if status_code == 404 and message is None: - message = 'Not found' + message = "Not found" - response_content = {'status': status_code, 'message': message} - request_info = {'method': request.method, 'path': request.path} + response_content = {"status": status_code, "message": message} + request_info = {"method": request.method, "path": request.path} request_info.update(response_content) - logger.error('HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s', request_info) + logger.error("HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s", request_info) response = jsonify(response_content) response.status_code = status_code @@ -37,10 +37,10 @@ def base_ws_uri(): customized (typically when running behind NAT, firewall, etc.) """ - config_wsserver = Config().get()['wsserver'] + config_wsserver = Config().get()["wsserver"] - scheme = config_wsserver['advertised_scheme'] - host = config_wsserver['advertised_host'] - port = config_wsserver['advertised_port'] + scheme = config_wsserver["advertised_scheme"] + host = config_wsserver["advertised_host"] + port = config_wsserver["advertised_port"] - return '{}://{}:{}'.format(scheme, host, port) + return "{}://{}:{}".format(scheme, host, port) diff --git a/planetmint/web/views/blocks.py b/planetmint/web/views/blocks.py index 4db821e..07c59aa 100644 --- a/planetmint/web/views/blocks.py +++ b/planetmint/web/views/blocks.py @@ -21,7 +21,7 @@ class LatestBlock(Resource): A JSON string containing the data about the block. """ - pool = current_app.config['bigchain_pool'] + pool = current_app.config["bigchain_pool"] with pool() as planet: block = planet.get_latest_block() @@ -43,7 +43,7 @@ class BlockApi(Resource): A JSON string containing the data about the block. """ - pool = current_app.config['bigchain_pool'] + pool = current_app.config["bigchain_pool"] with pool() as planet: block = planet.get_block(block_id=block_id) @@ -64,12 +64,12 @@ class BlockListApi(Resource): "valid", "invalid", "undecided". """ parser = reqparse.RequestParser() - parser.add_argument('transaction_id', type=str, required=True) + parser.add_argument("transaction_id", type=str, required=True) args = parser.parse_args(strict=True) - tx_id = args['transaction_id'] + tx_id = args["transaction_id"] - pool = current_app.config['bigchain_pool'] + pool = current_app.config["bigchain_pool"] with pool() as planet: blocks = planet.get_block_containing_tx(tx_id) diff --git a/planetmint/web/views/info.py b/planetmint/web/views/info.py index 42835b2..e2bcfa4 100644 --- a/planetmint/web/views/info.py +++ b/planetmint/web/views/info.py @@ -15,23 +15,20 @@ from planetmint.web.websocket_server import EVENTS_ENDPOINT, EVENTS_ENDPOINT_BLO class RootIndex(Resource): def get(self): - docs_url = [ - 'https://docs.planetmint.io/projects/server/en/v', - version.__version__ + '/' - ] - return flask.jsonify({ - 'api': { - 'v1': get_api_v1_info('/api/v1/') - }, - 'docs': ''.join(docs_url), - 'software': 'Planetmint', - 'version': version.__version__, - }) + docs_url = ["https://docs.planetmint.io/projects/server/en/v", version.__version__ + "/"] + return flask.jsonify( + { + "api": {"v1": get_api_v1_info("/api/v1/")}, + "docs": "".join(docs_url), + "software": "Planetmint", + "version": version.__version__, + } + ) class ApiV1Index(Resource): def get(self): - return flask.jsonify(get_api_v1_info('/')) + return flask.jsonify(get_api_v1_info("/")) def get_api_v1_info(api_prefix): @@ -41,19 +38,19 @@ def get_api_v1_info(api_prefix): websocket_root_tx = base_ws_uri() + EVENTS_ENDPOINT websocket_root_block = base_ws_uri() + EVENTS_ENDPOINT_BLOCKS docs_url = [ - 'https://docs.planetmint.io/projects/server/en/v', + "https://docs.planetmint.io/projects/server/en/v", version.__version__, - '/http-client-server-api.html', + "/http-client-server-api.html", ] return { - 'docs': ''.join(docs_url), - 'transactions': '{}transactions/'.format(api_prefix), - 'blocks': '{}blocks/'.format(api_prefix), - 'assets': '{}assets/'.format(api_prefix), - 'outputs': '{}outputs/'.format(api_prefix), - 'streams': websocket_root_tx, - 'streamedblocks': websocket_root_block, - 'metadata': '{}metadata/'.format(api_prefix), - 'validators': '{}validators'.format(api_prefix), + "docs": "".join(docs_url), + "transactions": "{}transactions/".format(api_prefix), + "blocks": "{}blocks/".format(api_prefix), + "assets": "{}assets/".format(api_prefix), + "outputs": "{}outputs/".format(api_prefix), + "streams": websocket_root_tx, + "streamedblocks": websocket_root_block, + "metadata": "{}metadata/".format(api_prefix), + "validators": "{}validators".format(api_prefix), } diff --git a/planetmint/web/views/metadata.py b/planetmint/web/views/metadata.py index d9ee588..c122f55 100644 --- a/planetmint/web/views/metadata.py +++ b/planetmint/web/views/metadata.py @@ -30,25 +30,22 @@ class MetadataApi(Resource): A list of metadata that match the query. """ parser = reqparse.RequestParser() - parser.add_argument('search', type=str, required=True) - parser.add_argument('limit', type=int) + parser.add_argument("search", type=str, required=True) + parser.add_argument("limit", type=int) args = parser.parse_args() - if not args['search']: - return make_error(400, 'text_search cannot be empty') - if not args['limit']: - del args['limit'] + if not args["search"]: + return make_error(400, "text_search cannot be empty") + if not args["limit"]: + del args["limit"] - pool = current_app.config['bigchain_pool'] + pool = current_app.config["bigchain_pool"] with pool() as planet: - args['table'] = 'meta_data' + args["table"] = "meta_data" metadata = planet.text_search(**args) try: return list(metadata) except OperationError as e: - return make_error( - 400, - '({}): {}'.format(type(e).__name__, e) - ) + return make_error(400, "({}): {}".format(type(e).__name__, e)) diff --git a/planetmint/web/views/outputs.py b/planetmint/web/views/outputs.py index b4ff6da..1e2c342 100644 --- a/planetmint/web/views/outputs.py +++ b/planetmint/web/views/outputs.py @@ -18,14 +18,11 @@ class OutputListApi(Resource): A :obj:`list` of :cls:`str` of links to outputs. """ parser = reqparse.RequestParser() - parser.add_argument('public_key', type=parameters.valid_ed25519, - required=True) - parser.add_argument('spent', type=parameters.valid_bool) + parser.add_argument("public_key", type=parameters.valid_ed25519, required=True) + parser.add_argument("spent", type=parameters.valid_bool) args = parser.parse_args(strict=True) - pool = current_app.config['bigchain_pool'] + pool = current_app.config["bigchain_pool"] with pool() as planet: - outputs = planet.get_outputs_filtered(args['public_key'], - args['spent']) - return [{'transaction_id': output.txid, 'output_index': output.output} - for output in outputs] + outputs = planet.get_outputs_filtered(args["public_key"], args["spent"]) + return [{"transaction_id": output.txid, "output_index": output.output} for output in outputs] diff --git a/planetmint/web/views/parameters.py b/planetmint/web/views/parameters.py index 8b4024f..931d34a 100644 --- a/planetmint/web/views/parameters.py +++ b/planetmint/web/views/parameters.py @@ -6,45 +6,47 @@ import re from planetmint.transactions.common.transaction_mode_types import ( - BROADCAST_TX_COMMIT, BROADCAST_TX_ASYNC, BROADCAST_TX_SYNC) + BROADCAST_TX_COMMIT, + BROADCAST_TX_ASYNC, + BROADCAST_TX_SYNC, +) def valid_txid(txid): - if re.match('^[a-fA-F0-9]{64}$', txid): + if re.match("^[a-fA-F0-9]{64}$", txid): return txid.lower() - raise ValueError('Invalid hash') + raise ValueError("Invalid hash") def valid_bool(val): val = val.lower() - if val == 'true': + if val == "true": return True - if val == 'false': + if val == "false": return False raise ValueError('Boolean value must be "true" or "false" (lowercase)') def valid_ed25519(key): - if (re.match('^[1-9a-zA-Z]{43,44}$', key) and not - re.match('.*[Il0O]', key)): + if re.match("^[1-9a-zA-Z]{43,44}$", key) and not re.match(".*[Il0O]", key): return key - raise ValueError('Invalid base58 ed25519 key') + raise ValueError("Invalid base58 ed25519 key") def valid_operation(op): op = op.upper() - if op == 'CREATE': - return 'CREATE' - if op == 'TRANSFER': - return 'TRANSFER' + if op == "CREATE": + return "CREATE" + if op == "TRANSFER": + return "TRANSFER" raise ValueError('Operation must be "CREATE" or "TRANSFER"') def valid_mode(mode): - if mode == 'async': + if mode == "async": return BROADCAST_TX_ASYNC - if mode == 'sync': + if mode == "sync": return BROADCAST_TX_SYNC - if mode == 'commit': + if mode == "commit": return BROADCAST_TX_COMMIT raise ValueError('Mode must be "async", "sync" or "commit"') diff --git a/planetmint/web/views/transactions.py b/planetmint/web/views/transactions.py index 16f0ceb..4fb8482 100644 --- a/planetmint/web/views/transactions.py +++ b/planetmint/web/views/transactions.py @@ -65,9 +65,7 @@ class TransactionListApi(Resource): A ``dict`` containing the data about the transaction. """ parser = reqparse.RequestParser() - parser.add_argument( - "mode", type=parameters.valid_mode, default=BROADCAST_TX_ASYNC - ) + parser.add_argument("mode", type=parameters.valid_mode, default=BROADCAST_TX_ASYNC) args = parser.parse_args() mode = str(args["mode"]) @@ -85,21 +83,15 @@ class TransactionListApi(Resource): message="Invalid transaction schema: {}".format(e.__cause__.message), ) except KeyError as e: - return make_error( - 400, "Invalid transaction ({}): {}".format(type(e).__name__, e) - ) + return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e)) except ValidationError as e: - return make_error( - 400, "Invalid transaction ({}): {}".format(type(e).__name__, e) - ) + return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e)) with pool() as planet: try: planet.validate_transaction(tx_obj) except ValidationError as e: - return make_error( - 400, "Invalid transaction ({}): {}".format(type(e).__name__, e) - ) + return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e)) else: status_code, message = planet.write_transaction(tx_obj, mode) diff --git a/planetmint/web/views/validators.py b/planetmint/web/views/validators.py index 4b0efd6..2f41eb9 100644 --- a/planetmint/web/views/validators.py +++ b/planetmint/web/views/validators.py @@ -15,7 +15,7 @@ class ValidatorsApi(Resource): A JSON string containing the validator set of the current node. """ - pool = current_app.config['bigchain_pool'] + pool = current_app.config["bigchain_pool"] with pool() as planet: validators = planet.get_validators() diff --git a/planetmint/web/websocket_dispatcher.py b/planetmint/web/websocket_dispatcher.py index fa53945..65e9940 100644 --- a/planetmint/web/websocket_dispatcher.py +++ b/planetmint/web/websocket_dispatcher.py @@ -15,7 +15,7 @@ class Dispatcher: This class implements a simple publish/subscribe pattern. """ - def __init__(self, event_source, type='tx'): + def __init__(self, event_source, type="tx"): """Create a new instance. Args: @@ -49,20 +49,18 @@ class Dispatcher: @staticmethod def simplified_block(block): txids = [] - for tx in block['transactions']: + for tx in block["transactions"]: txids.append(tx.id) - return {'height': block['height'], 'hash': block['hash'], 'transaction_ids': txids} + return {"height": block["height"], "hash": block["hash"], "transaction_ids": txids} @staticmethod def eventify_block(block): - for tx in block['transactions']: + for tx in block["transactions"]: if tx.asset: - asset_id = tx.asset.get('id', tx.id) + asset_id = tx.asset.get("id", tx.id) else: asset_id = tx.id - yield {'height': block['height'], - 'asset_id': asset_id, - 'transaction_id': tx.id} + yield {"height": block["height"], "asset_id": asset_id, "transaction_id": tx.id} async def publish(self): """Publish new events to the subscribers.""" @@ -77,9 +75,9 @@ class Dispatcher: if isinstance(event, str): str_buffer.append(event) elif event.type == EventTypes.BLOCK_VALID: - if self.type == 'tx': + if self.type == "tx": str_buffer = map(json.dumps, self.eventify_block(event.data)) - elif self.type == 'blk': + elif self.type == "blk": str_buffer = [json.dumps(self.simplified_block(event.data))] else: return diff --git a/planetmint/web/websocket_server.py b/planetmint/web/websocket_server.py index 029c2f0..3e09a59 100644 --- a/planetmint/web/websocket_server.py +++ b/planetmint/web/websocket_server.py @@ -29,8 +29,8 @@ from planetmint.web.websocket_dispatcher import Dispatcher logger = logging.getLogger(__name__) -EVENTS_ENDPOINT = '/api/v1/streams/valid_transactions' -EVENTS_ENDPOINT_BLOCKS = '/api/v1/streams/valid_blocks' +EVENTS_ENDPOINT = "/api/v1/streams/valid_transactions" +EVENTS_ENDPOINT_BLOCKS = "/api/v1/streams/valid_blocks" def _multiprocessing_to_asyncio(in_queue, out_queue1, out_queue2, loop): @@ -51,60 +51,60 @@ def _multiprocessing_to_asyncio(in_queue, out_queue1, out_queue2, loop): async def websocket_tx_handler(request): """Handle a new socket connection.""" - logger.debug('New TX websocket connection.') + logger.debug("New TX websocket connection.") websocket = aiohttp.web.WebSocketResponse() await websocket.prepare(request) uuid = uuid4() - request.app['tx_dispatcher'].subscribe(uuid, websocket) + request.app["tx_dispatcher"].subscribe(uuid, websocket) while True: # Consume input buffer try: msg = await websocket.receive() except RuntimeError as e: - logger.debug('Websocket exception: %s', str(e)) + logger.debug("Websocket exception: %s", str(e)) break except CancelledError: - logger.debug('Websocket closed') + logger.debug("Websocket closed") break if msg.type == aiohttp.WSMsgType.CLOSED: - logger.debug('Websocket closed') + logger.debug("Websocket closed") break elif msg.type == aiohttp.WSMsgType.ERROR: - logger.debug('Websocket exception: %s', websocket.exception()) + logger.debug("Websocket exception: %s", websocket.exception()) break - request.app['tx_dispatcher'].unsubscribe(uuid) + request.app["tx_dispatcher"].unsubscribe(uuid) return websocket async def websocket_blk_handler(request): """Handle a new socket connection.""" - logger.debug('New BLK websocket connection.') + logger.debug("New BLK websocket connection.") websocket = aiohttp.web.WebSocketResponse() await websocket.prepare(request) uuid = uuid4() - request.app['blk_dispatcher'].subscribe(uuid, websocket) + request.app["blk_dispatcher"].subscribe(uuid, websocket) while True: # Consume input buffer try: msg = await websocket.receive() except RuntimeError as e: - logger.debug('Websocket exception: %s', str(e)) + logger.debug("Websocket exception: %s", str(e)) break except CancelledError: - logger.debug('Websocket closed') + logger.debug("Websocket closed") break if msg.type == aiohttp.WSMsgType.CLOSED: - logger.debug('Websocket closed') + logger.debug("Websocket closed") break elif msg.type == aiohttp.WSMsgType.ERROR: - logger.debug('Websocket exception: %s', websocket.exception()) + logger.debug("Websocket exception: %s", websocket.exception()) break - request.app['blk_dispatcher'].unsubscribe(uuid) + request.app["blk_dispatcher"].unsubscribe(uuid) return websocket @@ -115,16 +115,16 @@ def init_app(tx_source, blk_source, *, loop=None): An aiohttp application. """ - blk_dispatcher = Dispatcher(blk_source, 'blk') - tx_dispatcher = Dispatcher(tx_source, 'tx') + blk_dispatcher = Dispatcher(blk_source, "blk") + tx_dispatcher = Dispatcher(tx_source, "tx") # Schedule the dispatcher - loop.create_task(blk_dispatcher.publish(), name='blk') - loop.create_task(tx_dispatcher.publish(), name='tx') + loop.create_task(blk_dispatcher.publish(), name="blk") + loop.create_task(tx_dispatcher.publish(), name="tx") app = aiohttp.web.Application(loop=loop) - app['tx_dispatcher'] = tx_dispatcher - app['blk_dispatcher'] = blk_dispatcher + app["tx_dispatcher"] = tx_dispatcher + app["blk_dispatcher"] = blk_dispatcher app.router.add_get(EVENTS_ENDPOINT, websocket_tx_handler) app.router.add_get(EVENTS_ENDPOINT_BLOCKS, websocket_blk_handler) return app @@ -139,13 +139,12 @@ def start(sync_event_source, loop=None): tx_source = asyncio.Queue(loop=loop) blk_source = asyncio.Queue(loop=loop) - bridge = threading.Thread(target=_multiprocessing_to_asyncio, - args=(sync_event_source, tx_source, blk_source, loop), - daemon=True) + bridge = threading.Thread( + target=_multiprocessing_to_asyncio, args=(sync_event_source, tx_source, blk_source, loop), daemon=True + ) bridge.start() app = init_app(tx_source, blk_source, loop=loop) - aiohttp.web.run_app(app, - host=Config().get()['wsserver']['host'], - port=Config().get()['wsserver']['port'], - loop=loop) + aiohttp.web.run_app( + app, host=Config().get()["wsserver"]["host"], port=Config().get()["wsserver"]["port"], loop=loop + ) diff --git a/.ci/entrypoint.sh b/scripts/entrypoint.sh similarity index 100% rename from .ci/entrypoint.sh rename to scripts/entrypoint.sh diff --git a/setup.cfg b/setup.cfg index cdec23f..0eb958c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -4,6 +4,3 @@ test=pytest [coverage:run] source = . omit = *test* - -[flake8] -max_line_length = 119 diff --git a/setup.py b/setup.py index 87a9455..a42a6d5 100644 --- a/setup.py +++ b/setup.py @@ -89,21 +89,12 @@ docs_require = [ check_setuptools_features() -dev_require = [ - "ipdb", - "ipython", - "watchdog", - "logging_tree", - "pre-commit", - "twine", - "ptvsd" -] +dev_require = ["ipdb", "ipython", "watchdog", "logging_tree", "pre-commit", "twine", "ptvsd"] tests_require = [ "coverage", "pep8", - "flake8", - "flake8-quotes==0.8.1", + "black", "hypothesis>=5.3.0", "pytest>=3.0.0", "pytest-cov==2.8.1", @@ -116,27 +107,27 @@ tests_require = [ ] + docs_require install_requires = [ - 'chardet==3.0.4', - 'aiohttp==3.8.1', - 'abci==0.8.3', - 'planetmint-cryptoconditions>=0.9.9', - 'flask-cors==3.0.10', - 'flask-restful==0.3.9', - 'flask==2.1.2', - 'gunicorn==20.1.0', - 'jsonschema==3.2.0', - 'logstats==0.3.0', - 'packaging>=20.9', + "chardet==3.0.4", + "aiohttp==3.8.1", + "abci==0.8.3", + "planetmint-cryptoconditions>=0.9.9", + "flask-cors==3.0.10", + "flask-restful==0.3.9", + "flask==2.1.2", + "gunicorn==20.1.0", + "jsonschema==3.2.0", + "logstats==0.3.0", + "packaging>=20.9", # TODO Consider not installing the db drivers, or putting them in extras. - 'pymongo==3.11.4', - 'tarantool==0.7.1', - 'python-rapidjson==1.0', - 'pyyaml==5.4.1', - 'requests==2.25.1', - 'setproctitle==1.2.2', - 'werkzeug==2.0.3', - 'nest-asyncio==1.5.5', - 'protobuf==3.20.1' + "pymongo==3.11.4", + "tarantool==0.7.1", + "python-rapidjson==1.0", + "pyyaml==5.4.1", + "requests==2.25.1", + "setproctitle==1.2.2", + "werkzeug==2.0.3", + "nest-asyncio==1.5.5", + "protobuf==3.20.1", ] setup( diff --git a/tests/assets/test_digital_assets.py b/tests/assets/test_digital_assets.py index 3a5b88b..cce224d 100644 --- a/tests/assets/test_digital_assets.py +++ b/tests/assets/test_digital_assets.py @@ -8,23 +8,22 @@ import random from planetmint.transactions.types.assets.create import Create from planetmint.transactions.types.assets.transfer import Transfer + def test_asset_transfer(b, signed_create_tx, user_pk, user_sk): - tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], - signed_create_tx.id) + tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], signed_create_tx.id) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([signed_create_tx]) assert tx_transfer_signed.validate(b) == tx_transfer_signed - assert tx_transfer_signed.asset['id'] == signed_create_tx.id + assert tx_transfer_signed.asset["id"] == signed_create_tx.id def test_validate_transfer_asset_id_mismatch(b, signed_create_tx, user_pk, user_sk): from planetmint.transactions.common.exceptions import AssetIdMismatch - tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], - signed_create_tx.id) - tx_transfer.asset['id'] = 'a' * 64 + tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], signed_create_tx.id) + tx_transfer.asset["id"] = "a" * 64 tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([signed_create_tx]) @@ -35,6 +34,7 @@ def test_validate_transfer_asset_id_mismatch(b, signed_create_tx, user_pk, user_ def test_get_asset_id_create_transaction(alice, user_pk): from planetmint.models import Transaction + tx_create = Create.generate([alice.public_key], [([user_pk], 1)]) assert Transaction.get_asset_id(tx_create) == tx_create.id @@ -42,21 +42,18 @@ def test_get_asset_id_create_transaction(alice, user_pk): def test_get_asset_id_transfer_transaction(b, signed_create_tx, user_pk): from planetmint.models import Transaction - tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], - signed_create_tx.id) + tx_transfer = Transfer.generate(signed_create_tx.to_inputs(), [([user_pk], 1)], signed_create_tx.id) asset_id = Transaction.get_asset_id(tx_transfer) - assert asset_id == tx_transfer.asset['id'] + assert asset_id == tx_transfer.asset["id"] def test_asset_id_mismatch(alice, user_pk): from planetmint.models import Transaction from planetmint.transactions.common.exceptions import AssetIdMismatch - tx1 = Create.generate([alice.public_key], [([user_pk], 1)], - metadata={'msg': random.random()}) + tx1 = Create.generate([alice.public_key], [([user_pk], 1)], metadata={"msg": random.random()}) tx1.sign([alice.private_key]) - tx2 = Create.generate([alice.public_key], [([user_pk], 1)], - metadata={'msg': random.random()}) + tx2 = Create.generate([alice.public_key], [([user_pk], 1)], metadata={"msg": random.random()}) tx2.sign([alice.private_key]) with pytest.raises(AssetIdMismatch): diff --git a/tests/assets/test_divisible_assets.py b/tests/assets/test_divisible_assets.py index 5919025..cd8e374 100644 --- a/tests/assets/test_divisible_assets.py +++ b/tests/assets/test_divisible_assets.py @@ -19,7 +19,7 @@ from planetmint.transactions.common.exceptions import DoubleSpend # Single owners_after def test_single_in_single_own_single_out_single_own_create(alice, user_pk, b): - tx = Create.generate([alice.public_key], [([user_pk], 100)], asset={'name': random.random()}) + tx = Create.generate([alice.public_key], [([user_pk], 100)], asset={"name": random.random()}) tx_signed = tx.sign([alice.private_key]) assert tx_signed.validate(b) == tx_signed @@ -35,8 +35,7 @@ def test_single_in_single_own_single_out_single_own_create(alice, user_pk, b): # Single owners_after per output def test_single_in_single_own_multiple_out_single_own_create(alice, user_pk, b): - tx = Create.generate([alice.public_key], [([user_pk], 50), ([user_pk], 50)], - asset={'name': random.random()}) + tx = Create.generate([alice.public_key], [([user_pk], 50), ([user_pk], 50)], asset={"name": random.random()}) tx_signed = tx.sign([alice.private_key]) assert tx_signed.validate(b) == tx_signed @@ -53,7 +52,7 @@ def test_single_in_single_own_multiple_out_single_own_create(alice, user_pk, b): # Multiple owners_after def test_single_in_single_own_single_out_multiple_own_create(alice, user_pk, b): - tx = Create.generate([alice.public_key], [([user_pk, user_pk], 100)], asset={'name': random.random()}) + tx = Create.generate([alice.public_key], [([user_pk, user_pk], 100)], asset={"name": random.random()}) tx_signed = tx.sign([alice.private_key]) assert tx_signed.validate(b) == tx_signed @@ -61,8 +60,8 @@ def test_single_in_single_own_single_out_multiple_own_create(alice, user_pk, b): assert tx_signed.outputs[0].amount == 100 output = tx_signed.outputs[0].to_dict() - assert 'subconditions' in output['condition']['details'] - assert len(output['condition']['details']['subconditions']) == 2 + assert "subconditions" in output["condition"]["details"] + assert len(output["condition"]["details"]["subconditions"]) == 2 assert len(tx_signed.inputs) == 1 @@ -75,8 +74,9 @@ def test_single_in_single_own_single_out_multiple_own_create(alice, user_pk, b): # owners_after def test_single_in_single_own_multiple_out_mix_own_create(alice, user_pk, b): - tx = Create.generate([alice.public_key], [([user_pk], 50), ([user_pk, user_pk], 50)], - asset={'name': random.random()}) + tx = Create.generate( + [alice.public_key], [([user_pk], 50), ([user_pk, user_pk], 50)], asset={"name": random.random()} + ) tx_signed = tx.sign([alice.private_key]) assert tx_signed.validate(b) == tx_signed @@ -85,8 +85,8 @@ def test_single_in_single_own_multiple_out_mix_own_create(alice, user_pk, b): assert tx_signed.outputs[1].amount == 50 output_cid1 = tx_signed.outputs[1].to_dict() - assert 'subconditions' in output_cid1['condition']['details'] - assert len(output_cid1['condition']['details']['subconditions']) == 2 + assert "subconditions" in output_cid1["condition"]["details"] + assert len(output_cid1["condition"]["details"]["subconditions"]) == 2 assert len(tx_signed.inputs) == 1 @@ -95,11 +95,10 @@ def test_single_in_single_own_multiple_out_mix_own_create(alice, user_pk, b): # Single input # Multiple owners_before # Output combinations already tested above -def test_single_in_multiple_own_single_out_single_own_create(alice, b, user_pk, - user_sk): +def test_single_in_multiple_own_single_out_single_own_create(alice, b, user_pk, user_sk): from planetmint.transactions.common.utils import _fulfillment_to_details - tx = Create.generate([alice.public_key, user_pk], [([user_pk], 100)], asset={'name': random.random()}) + tx = Create.generate([alice.public_key, user_pk], [([user_pk], 100)], asset={"name": random.random()}) tx_signed = tx.sign([alice.private_key, user_sk]) assert tx_signed.validate(b) == tx_signed assert len(tx_signed.outputs) == 1 @@ -107,8 +106,8 @@ def test_single_in_multiple_own_single_out_single_own_create(alice, b, user_pk, assert len(tx_signed.inputs) == 1 ffill = _fulfillment_to_details(tx_signed.inputs[0].fulfillment) - assert 'subconditions' in ffill - assert len(ffill['subconditions']) == 2 + assert "subconditions" in ffill + assert len(ffill["subconditions"]) == 2 # TRANSFER divisible asset @@ -116,16 +115,14 @@ def test_single_in_multiple_own_single_out_single_own_create(alice, b, user_pk, # Single owners_before # Single output # Single owners_after -def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk, - user_sk): +def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk, user_sk): # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={'name': random.random()}) + tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={"name": random.random()}) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -141,17 +138,16 @@ def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk, # Single owners_before # Multiple output # Single owners_after -def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk, - user_sk): +def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk, user_sk): # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={'name': random.random()}) + tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={"name": random.random()}) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), - [([alice.public_key], 50), ([alice.public_key], 50)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate( + tx_create.to_inputs(), [([alice.public_key], 50), ([alice.public_key], 50)], asset_id=tx_create.id + ) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -168,17 +164,16 @@ def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk # Single owners_before # Single output # Multiple owners_after -def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk, - user_sk): +def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk, user_sk): # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={'name': random.random()}) + tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={"name": random.random()}) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), - [([alice.public_key, alice.public_key], 100)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate( + tx_create.to_inputs(), [([alice.public_key, alice.public_key], 100)], asset_id=tx_create.id + ) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -188,8 +183,8 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk assert tx_transfer_signed.outputs[0].amount == 100 condition = tx_transfer_signed.outputs[0].to_dict() - assert 'subconditions' in condition['condition']['details'] - assert len(condition['condition']['details']['subconditions']) == 2 + assert "subconditions" in condition["condition"]["details"] + assert len(condition["condition"]["details"]["subconditions"]) == 2 assert len(tx_transfer_signed.inputs) == 1 b.store_bulk_transactions([tx_transfer_signed]) @@ -203,17 +198,18 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk # Multiple outputs # Mix: one output with a single owners_after, one output with multiple # owners_after -def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, - user_sk): +def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, user_sk): # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={'name': random.random()}) + tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={"name": random.random()}) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), - [([alice.public_key], 50), ([alice.public_key, alice.public_key], 50)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate( + tx_create.to_inputs(), + [([alice.public_key], 50), ([alice.public_key, alice.public_key], 50)], + asset_id=tx_create.id, + ) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -224,8 +220,8 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, assert tx_transfer_signed.outputs[1].amount == 50 output_cid1 = tx_transfer_signed.outputs[1].to_dict() - assert 'subconditions' in output_cid1['condition']['details'] - assert len(output_cid1['condition']['details']['subconditions']) == 2 + assert "subconditions" in output_cid1["condition"]["details"] + assert len(output_cid1["condition"]["details"]["subconditions"]) == 2 assert len(tx_transfer_signed.inputs) == 1 @@ -239,18 +235,17 @@ def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, # Multiple owners_before # Single output # Single owners_after -def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk, - user_sk): +def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk, user_sk): from planetmint.transactions.common.utils import _fulfillment_to_details # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([alice.public_key, user_pk], 100)], - asset={'name': random.random()}) + tx_create = Create.generate( + [alice.public_key], [([alice.public_key, user_pk], 100)], asset={"name": random.random()} + ) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -261,8 +256,8 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk assert len(tx_transfer_signed.inputs) == 1 ffill = _fulfillment_to_details(tx_transfer_signed.inputs[0].fulfillment) - assert 'subconditions' in ffill - assert len(ffill['subconditions']) == 2 + assert "subconditions" in ffill + assert len(ffill["subconditions"]) == 2 b.store_bulk_transactions([tx_transfer_signed]) with pytest.raises(DoubleSpend): @@ -274,16 +269,15 @@ def test_single_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk # Single owners_before per input # Single output # Single owners_after -def test_multiple_in_single_own_single_out_single_own_transfer(alice, b, user_pk, - user_sk): +def test_multiple_in_single_own_single_out_single_own_transfer(alice, b, user_pk, user_sk): # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 50), ([user_pk], 50)], - asset={'name': random.random()}) + tx_create = Create.generate( + [alice.public_key], [([user_pk], 50), ([user_pk], 50)], asset={"name": random.random()} + ) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -303,19 +297,19 @@ def test_multiple_in_single_own_single_out_single_own_transfer(alice, b, user_pk # Multiple owners_before per input # Single output # Single owners_after -def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk, - user_sk): +def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_pk, user_sk): from planetmint.transactions.common.utils import _fulfillment_to_details # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk, alice.public_key], 50), - ([user_pk, alice.public_key], 50)], - asset={'name': random.random()}) + tx_create = Create.generate( + [alice.public_key], + [([user_pk, alice.public_key], 50), ([user_pk, alice.public_key], 50)], + asset={"name": random.random()}, + ) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -327,10 +321,10 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_ ffill_fid0 = _fulfillment_to_details(tx_transfer_signed.inputs[0].fulfillment) ffill_fid1 = _fulfillment_to_details(tx_transfer_signed.inputs[1].fulfillment) - assert 'subconditions' in ffill_fid0 - assert 'subconditions' in ffill_fid1 - assert len(ffill_fid0['subconditions']) == 2 - assert len(ffill_fid1['subconditions']) == 2 + assert "subconditions" in ffill_fid0 + assert "subconditions" in ffill_fid1 + assert len(ffill_fid0["subconditions"]) == 2 + assert len(ffill_fid1["subconditions"]) == 2 b.store_bulk_transactions([tx_transfer_signed]) with pytest.raises(DoubleSpend): @@ -343,18 +337,17 @@ def test_multiple_in_multiple_own_single_out_single_own_transfer(alice, b, user_ # owners_before # Single output # Single owners_after -def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk, - user_sk): +def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk, user_sk): from planetmint.transactions.common.utils import _fulfillment_to_details # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 50), ([user_pk, alice.public_key], 50)], - asset={'name': random.random()}) + tx_create = Create.generate( + [alice.public_key], [([user_pk], 50), ([user_pk, alice.public_key], 50)], asset={"name": random.random()} + ) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -365,9 +358,9 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk ffill_fid0 = _fulfillment_to_details(tx_transfer_signed.inputs[0].fulfillment) ffill_fid1 = _fulfillment_to_details(tx_transfer_signed.inputs[1].fulfillment) - assert 'subconditions' not in ffill_fid0 - assert 'subconditions' in ffill_fid1 - assert len(ffill_fid1['subconditions']) == 2 + assert "subconditions" not in ffill_fid0 + assert "subconditions" in ffill_fid1 + assert len(ffill_fid1["subconditions"]) == 2 b.store_bulk_transactions([tx_transfer_signed]) with pytest.raises(DoubleSpend): @@ -381,18 +374,18 @@ def test_muiltiple_in_mix_own_multiple_out_single_own_transfer(alice, b, user_pk # Multiple outputs # Mix: one output with a single owners_after, one output with multiple # owners_after -def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, - user_sk): +def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, user_sk): from planetmint.transactions.common.utils import _fulfillment_to_details # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 50), ([user_pk, alice.public_key], 50)], - asset={'name': random.random()}) + tx_create = Create.generate( + [alice.public_key], [([user_pk], 50), ([user_pk, alice.public_key], 50)], asset={"name": random.random()} + ) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), - [([alice.public_key], 50), ([alice.public_key, user_pk], 50)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate( + tx_create.to_inputs(), [([alice.public_key], 50), ([alice.public_key, user_pk], 50)], asset_id=tx_create.id + ) tx_transfer_signed = tx_transfer.sign([alice.private_key, user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -404,15 +397,15 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, cond_cid0 = tx_transfer_signed.outputs[0].to_dict() cond_cid1 = tx_transfer_signed.outputs[1].to_dict() - assert 'subconditions' not in cond_cid0['condition']['details'] - assert 'subconditions' in cond_cid1['condition']['details'] - assert len(cond_cid1['condition']['details']['subconditions']) == 2 + assert "subconditions" not in cond_cid0["condition"]["details"] + assert "subconditions" in cond_cid1["condition"]["details"] + assert len(cond_cid1["condition"]["details"]["subconditions"]) == 2 ffill_fid0 = _fulfillment_to_details(tx_transfer_signed.inputs[0].fulfillment) ffill_fid1 = _fulfillment_to_details(tx_transfer_signed.inputs[1].fulfillment) - assert 'subconditions' not in ffill_fid0 - assert 'subconditions' in ffill_fid1 - assert len(ffill_fid1['subconditions']) == 2 + assert "subconditions" not in ffill_fid0 + assert "subconditions" in ffill_fid1 + assert len(ffill_fid1["subconditions"]) == 2 b.store_bulk_transactions([tx_transfer_signed]) with pytest.raises(DoubleSpend): @@ -429,26 +422,24 @@ def test_multiple_in_different_transactions(alice, b, user_pk, user_sk): # CREATE divisible asset # `b` creates a divisible asset and assigns 50 shares to `b` and # 50 shares to `user_pk` - tx_create = Create.generate([alice.public_key], [([user_pk], 50), ([alice.public_key], 50)], - asset={'name': random.random()}) + tx_create = Create.generate( + [alice.public_key], [([user_pk], 50), ([alice.public_key], 50)], asset={"name": random.random()} + ) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER divisible asset # `b` transfers its 50 shares to `user_pk` # after this transaction `user_pk` will have a total of 100 shares # split across two different transactions - tx_transfer1 = Transfer.generate(tx_create.to_inputs([1]), - [([user_pk], 50)], - asset_id=tx_create.id) + tx_transfer1 = Transfer.generate(tx_create.to_inputs([1]), [([user_pk], 50)], asset_id=tx_create.id) tx_transfer1_signed = tx_transfer1.sign([alice.private_key]) # TRANSFER # `user_pk` combines two different transaction with 50 shares each and # transfers a total of 100 shares back to `b` - tx_transfer2 = Transfer.generate(tx_create.to_inputs([0]) + - tx_transfer1.to_inputs([0]), - [([alice.private_key], 100)], - asset_id=tx_create.id) + tx_transfer2 = Transfer.generate( + tx_create.to_inputs([0]) + tx_transfer1.to_inputs([0]), [([alice.private_key], 100)], asset_id=tx_create.id + ) tx_transfer2_signed = tx_transfer2.sign([user_sk]) b.store_bulk_transactions([tx_create_signed, tx_transfer1_signed]) @@ -471,15 +462,14 @@ def test_amount_error_transfer(alice, b, user_pk, user_sk): from planetmint.transactions.common.exceptions import AmountError # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={'name': random.random()}) + tx_create = Create.generate([alice.public_key], [([user_pk], 100)], asset={"name": random.random()}) tx_create_signed = tx_create.sign([alice.private_key]) b.store_bulk_transactions([tx_create_signed]) # TRANSFER # output amount less than input amount - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 50)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 50)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([user_sk]) with pytest.raises(AmountError): @@ -487,8 +477,7 @@ def test_amount_error_transfer(alice, b, user_pk, user_sk): # TRANSFER # output amount greater than input amount - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 101)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 101)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([user_sk]) with pytest.raises(AmountError): @@ -504,13 +493,11 @@ def test_threshold_same_public_key(alice, b, user_pk, user_sk): # that does not mean that the code shouldn't work. # CREATE divisible asset - tx_create = Create.generate([alice.public_key], [([user_pk, user_pk], 100)], - asset={'name': random.random()}) + tx_create = Create.generate([alice.public_key], [([user_pk, user_pk], 100)], asset={"name": random.random()}) tx_create_signed = tx_create.sign([alice.private_key]) # TRANSFER - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 100)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([user_sk, user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -524,14 +511,14 @@ def test_threshold_same_public_key(alice, b, user_pk, user_sk): def test_sum_amount(alice, b, user_pk, user_sk): # CREATE divisible asset with 3 outputs with amount 1 - tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1), ([user_pk], 1)], - asset={'name': random.random()}) + tx_create = Create.generate( + [alice.public_key], [([user_pk], 1), ([user_pk], 1), ([user_pk], 1)], asset={"name": random.random()} + ) tx_create_signed = tx_create.sign([alice.private_key]) # create a transfer transaction with one output and check if the amount # is 3 - tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 3)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(tx_create.to_inputs(), [([alice.public_key], 3)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_create_signed]) @@ -548,14 +535,16 @@ def test_sum_amount(alice, b, user_pk, user_sk): def test_divide(alice, b, user_pk, user_sk): # CREATE divisible asset with 1 output with amount 3 - tx_create = Create.generate([alice.public_key], [([user_pk], 3)], asset={'name': random.random()}) + tx_create = Create.generate([alice.public_key], [([user_pk], 3)], asset={"name": random.random()}) tx_create_signed = tx_create.sign([alice.private_key]) # create a transfer transaction with 3 outputs and check if the amount # of each output is 1 - tx_transfer = Transfer.generate(tx_create.to_inputs(), - [([alice.public_key], 1), ([alice.public_key], 1), ([alice.public_key], 1)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate( + tx_create.to_inputs(), + [([alice.public_key], 1), ([alice.public_key], 1), ([alice.public_key], 1)], + asset_id=tx_create.id, + ) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_create_signed]) diff --git a/tests/assets/test_zenroom_signing.py b/tests/assets/test_zenroom_signing.py index 0a9dc8e..0b1a9f3 100644 --- a/tests/assets/test_zenroom_signing.py +++ b/tests/assets/test_zenroom_signing.py @@ -63,22 +63,10 @@ def test_zenroom_signing(): alice = json.loads(zencode_exec(GENERATE_KEYPAIR).output)["keyring"] bob = json.loads(zencode_exec(GENERATE_KEYPAIR).output)["keyring"] - zen_public_keys = json.loads( - zencode_exec( - SK_TO_PK.format("Alice"), keys=json.dumps({"keyring": alice}) - ).output - ) - zen_public_keys.update( - json.loads( - zencode_exec( - SK_TO_PK.format("Bob"), keys=json.dumps({"keyring": bob}) - ).output - ) - ) + zen_public_keys = json.loads(zencode_exec(SK_TO_PK.format("Alice"), keys=json.dumps({"keyring": alice})).output) + zen_public_keys.update(json.loads(zencode_exec(SK_TO_PK.format("Bob"), keys=json.dumps({"keyring": bob})).output)) - zenroomscpt = ZenroomSha256( - script=FULFILL_SCRIPT, data=ZENROOM_DATA, keys=zen_public_keys - ) + zenroomscpt = ZenroomSha256(script=FULFILL_SCRIPT, data=ZENROOM_DATA, keys=zen_public_keys) print(f"zenroom is: {zenroomscpt.script}") # CRYPTO-CONDITIONS: generate the condition uri @@ -107,11 +95,7 @@ def test_zenroom_signing(): biolabs.public_key, ], } - metadata = { - "result": { - "output": ["ok"] - } - } + metadata = {"result": {"output": ["ok"]}} token_creation_tx = { "operation": "CREATE", "asset": HOUSE_ASSETS, diff --git a/tests/backend/tarantool/test_queries.py b/tests/backend/tarantool/test_queries.py index 609efef..f613640 100644 --- a/tests/backend/tarantool/test_queries.py +++ b/tests/backend/tarantool/test_queries.py @@ -44,11 +44,11 @@ def test_write_assets(db_conn): from planetmint.backend.tarantool import query assets = [ - {'id': '1', 'data': '1'}, - {'id': '2', 'data': '2'}, - {'id': '3', 'data': '3'}, + {"id": "1", "data": "1"}, + {"id": "2", "data": "2"}, + {"id": "3", "data": "3"}, # Duplicated id. Should not be written to the database - {'id': '1', 'data': '1'}, + {"id": "1", "data": "1"}, ] # write the assets @@ -66,9 +66,9 @@ def test_get_assets(db_conn): from planetmint.backend.tarantool import query assets = [ - ("1", '1', '1'), - ("2", '2', '2'), - ("3", '3', '3'), + ("1", "1", "1"), + ("2", "2", "2"), + ("3", "3", "3"), ] query.store_assets(assets=assets, connection=db_conn) @@ -77,7 +77,7 @@ def test_get_assets(db_conn): assert query.get_asset(asset_id=asset[2], connection=db_conn) -@pytest.mark.parametrize('table', ['assets', 'metadata']) +@pytest.mark.parametrize("table", ["assets", "metadata"]) def test_text_search(table): assert "PASS FOR NOW" @@ -164,11 +164,7 @@ def test_text_search(table): def test_write_metadata(db_conn): from planetmint.backend.tarantool import query - metadata = [ - {'id': "1", 'data': '1'}, - {'id': "2", 'data': '2'}, - {'id': "3", 'data': '3'} - ] + metadata = [{"id": "1", "data": "1"}, {"id": "2", "data": "2"}, {"id": "3", "data": "3"}] # write the assets query.store_metadatas(connection=db_conn, metadata=metadata) @@ -188,8 +184,8 @@ def test_get_metadata(db_conn): from planetmint.backend.tarantool import query metadata = [ - {'id': "dd86682db39e4b424df0eec1413cfad65488fd48712097c5d865ca8e8e059b64", 'metadata': None}, - {'id': "55a2303e3bcd653e4b5bd7118d39c0e2d48ee2f18e22fbcf64e906439bdeb45d", 'metadata': {'key': 'value'}}, + {"id": "dd86682db39e4b424df0eec1413cfad65488fd48712097c5d865ca8e8e059b64", "metadata": None}, + {"id": "55a2303e3bcd653e4b5bd7118d39c0e2d48ee2f18e22fbcf64e906439bdeb45d", "metadata": {"key": "value"}}, ] # conn.db.metadata.insert_many(deepcopy(metadata), ordered=False) @@ -243,42 +239,35 @@ def test_get_spending_transactions_multiple_inputs(db_conn): tx1 = Create.generate([alice_pk], out).sign([alice_sk]) inputs1 = tx1.to_inputs() - tx2 = Transfer.generate([inputs1[0]], - [([alice_pk], 6), ([bob_pk], 3)], - tx1.id).sign([alice_sk]) + tx2 = Transfer.generate([inputs1[0]], [([alice_pk], 6), ([bob_pk], 3)], tx1.id).sign([alice_sk]) inputs2 = tx2.to_inputs() - tx3 = Transfer.generate([inputs2[0]], - [([bob_pk], 3), ([carol_pk], 3)], - tx1.id).sign([alice_sk]) + tx3 = Transfer.generate([inputs2[0]], [([bob_pk], 3), ([carol_pk], 3)], tx1.id).sign([alice_sk]) inputs3 = tx3.to_inputs() - tx4 = Transfer.generate([inputs2[1], inputs3[0]], - [([carol_pk], 6)], - tx1.id).sign([bob_sk]) + tx4 = Transfer.generate([inputs2[1], inputs3[0]], [([carol_pk], 6)], tx1.id).sign([bob_sk]) txns = [deepcopy(tx.to_dict()) for tx in [tx1, tx2, tx3, tx4]] query.store_transactions(signed_transactions=txns, connection=db_conn) links = [ - ({'transaction_id': tx2.id, 'output_index': 0}, 1, [tx3.id]), - ({'transaction_id': tx2.id, 'output_index': 1}, 1, [tx4.id]), - ({'transaction_id': tx3.id, 'output_index': 0}, 1, [tx4.id]), - ({'transaction_id': tx3.id, 'output_index': 1}, 0, None), + ({"transaction_id": tx2.id, "output_index": 0}, 1, [tx3.id]), + ({"transaction_id": tx2.id, "output_index": 1}, 1, [tx4.id]), + ({"transaction_id": tx3.id, "output_index": 0}, 1, [tx4.id]), + ({"transaction_id": tx3.id, "output_index": 1}, 0, None), ] for li, num, match in links: txns = list(query.get_spending_transactions(connection=db_conn, inputs=[li])) assert len(txns) == num if len(txns): - assert [tx['id'] for tx in txns] == match + assert [tx["id"] for tx in txns] == match def test_store_block(db_conn): from planetmint.lib import Block from planetmint.backend.tarantool import query - block = Block(app_hash='random_utxo', - height=3, - transactions=[]) + + block = Block(app_hash="random_utxo", height=3, transactions=[]) query.store_block(connection=db_conn, block=block._asdict()) # block = query.get_block(connection=db_conn) blocks = db_conn.run(db_conn.space("blocks").select([])) @@ -289,14 +278,12 @@ def test_get_block(db_conn): from planetmint.lib import Block from planetmint.backend.tarantool import query - block = Block(app_hash='random_utxo', - height=3, - transactions=[]) + block = Block(app_hash="random_utxo", height=3, transactions=[]) query.store_block(connection=db_conn, block=block._asdict()) block = dict(query.get_block(connection=db_conn, block_id=3)) - assert block['height'] == 3 + assert block["height"] == 3 # def test_delete_zero_unspent_outputs(db_context, utxoset): @@ -428,7 +415,7 @@ def test_validator_update(db_conn): from planetmint.backend.tarantool import query def gen_validator_update(height): - return {'validators': [], 'height': height, 'election_id': f'election_id_at_height_{height}'} + return {"validators": [], "height": height, "election_id": f"election_id_at_height_{height}"} # return {'data': 'somedata', 'height': height, 'election_id': f'election_id_at_height_{height}'} for i in range(1, 100, 10): @@ -436,53 +423,56 @@ def test_validator_update(db_conn): query.store_validator_set(conn=db_conn, validators_update=value) v1 = query.get_validator_set(connection=db_conn, height=8) - assert v1['height'] == 1 + assert v1["height"] == 1 v41 = query.get_validator_set(connection=db_conn, height=50) - assert v41['height'] == 41 + assert v41["height"] == 41 v91 = query.get_validator_set(connection=db_conn) - assert v91['height'] == 91 + assert v91["height"] == 91 -@pytest.mark.parametrize('description,stores,expected', [ - ( - 'Query empty database.', +@pytest.mark.parametrize( + "description,stores,expected", + [ + ( + "Query empty database.", [], None, - ), - ( - 'Store one chain with the default value for `is_synced`.', + ), + ( + "Store one chain with the default value for `is_synced`.", [ - {'height': 0, 'chain_id': 'some-id'}, + {"height": 0, "chain_id": "some-id"}, ], - {'height': 0, 'chain_id': 'some-id', 'is_synced': True}, - ), - ( - 'Store one chain with a custom value for `is_synced`.', + {"height": 0, "chain_id": "some-id", "is_synced": True}, + ), + ( + "Store one chain with a custom value for `is_synced`.", [ - {'height': 0, 'chain_id': 'some-id', 'is_synced': False}, + {"height": 0, "chain_id": "some-id", "is_synced": False}, ], - {'height': 0, 'chain_id': 'some-id', 'is_synced': False}, - ), - ( - 'Store one chain, then update it.', + {"height": 0, "chain_id": "some-id", "is_synced": False}, + ), + ( + "Store one chain, then update it.", [ - {'height': 0, 'chain_id': 'some-id', 'is_synced': True}, - {'height': 0, 'chain_id': 'new-id', 'is_synced': False}, + {"height": 0, "chain_id": "some-id", "is_synced": True}, + {"height": 0, "chain_id": "new-id", "is_synced": False}, ], - {'height': 0, 'chain_id': 'new-id', 'is_synced': False}, - ), - ( - 'Store a chain, update it, store another chain.', + {"height": 0, "chain_id": "new-id", "is_synced": False}, + ), + ( + "Store a chain, update it, store another chain.", [ - {'height': 0, 'chain_id': 'some-id', 'is_synced': True}, - {'height': 0, 'chain_id': 'some-id', 'is_synced': False}, - {'height': 10, 'chain_id': 'another-id', 'is_synced': True}, + {"height": 0, "chain_id": "some-id", "is_synced": True}, + {"height": 0, "chain_id": "some-id", "is_synced": False}, + {"height": 10, "chain_id": "another-id", "is_synced": True}, ], - {'height': 10, 'chain_id': 'another-id', 'is_synced': True}, - ), -]) + {"height": 10, "chain_id": "another-id", "is_synced": True}, + ), + ], +) def test_store_abci_chain(description, stores, expected, db_conn): from planetmint.backend.tarantool import query diff --git a/tests/backend/test_connection.py b/tests/backend/test_connection.py index e2d8a85..471b42c 100644 --- a/tests/backend/test_connection.py +++ b/tests/backend/test_connection.py @@ -9,13 +9,14 @@ import pytest def test_get_connection_raises_a_configuration_error(monkeypatch): from planetmint.transactions.common.exceptions import ConfigurationError from planetmint.backend.connection import connect + with pytest.raises(ConfigurationError): - connect('localhost', '1337', 'mydb', 'password', 'msaccess') + connect("localhost", "1337", "mydb", "password", "msaccess") with pytest.raises(ConfigurationError): # We need to force a misconfiguration here - monkeypatch.setattr('planetmint.backend.connection.BACKENDS', - {'catsandra': - 'planetmint.backend.meowmeow.Catsandra'}) + monkeypatch.setattr( + "planetmint.backend.connection.BACKENDS", {"catsandra": "planetmint.backend.meowmeow.Catsandra"} + ) - connect('localhost', '1337', 'mydb', 'password', 'catsandra') + connect("localhost", "1337", "mydb", "password", "catsandra") diff --git a/tests/backend/test_generics.py b/tests/backend/test_generics.py index da964ef..0613fd8 100644 --- a/tests/backend/test_generics.py +++ b/tests/backend/test_generics.py @@ -6,32 +6,40 @@ from pytest import mark, raises -@mark.parametrize('schema_func_name,args_qty', ( - ('create_database', 1), - ('create_tables', 1), - ('drop_database', 1), -)) +@mark.parametrize( + "schema_func_name,args_qty", + ( + ("create_database", 1), + ("create_tables", 1), + ("drop_database", 1), + ), +) def test_schema(schema_func_name, args_qty): from planetmint.backend import schema + schema_func = getattr(schema, schema_func_name) with raises(NotImplementedError): schema_func(None, *range(args_qty)) -@mark.parametrize('query_func_name,args_qty', ( - ('delete_transactions', 1), - ('get_txids_filtered', 1), - ('get_owned_ids', 1), - ('get_block', 1), - ('get_spent', 2), - ('get_spending_transactions', 1), - ('store_assets', 1), - ('get_asset', 1), - ('store_metadatas', 1), - ('get_metadata', 1), -)) +@mark.parametrize( + "query_func_name,args_qty", + ( + ("delete_transactions", 1), + ("get_txids_filtered", 1), + ("get_owned_ids", 1), + ("get_block", 1), + ("get_spent", 2), + ("get_spending_transactions", 1), + ("store_assets", 1), + ("get_asset", 1), + ("store_metadatas", 1), + ("get_metadata", 1), + ), +) def test_query(query_func_name, args_qty): from planetmint.backend import query + query_func = getattr(query, query_func_name) with raises(NotImplementedError): query_func(None, *range(args_qty)) diff --git a/tests/backend/test_utils.py b/tests/backend/test_utils.py index 93dcd79..7786165 100644 --- a/tests/backend/test_utils.py +++ b/tests/backend/test_utils.py @@ -11,7 +11,7 @@ import pytest @pytest.fixture def mock_module(): - return ModuleType('mock_module') + return ModuleType("mock_module") def test_module_dispatch_registers(mock_module): @@ -20,6 +20,7 @@ def test_module_dispatch_registers(mock_module): @singledispatch def dispatcher(t): pass + mock_module.dispatched = dispatcher mock_dispatch = module_dispatch_registrar(mock_module) @@ -36,6 +37,7 @@ def test_module_dispatch_dispatches(mock_module): @singledispatch def dispatcher(t): return False + mock_module.dispatched = dispatcher mock_dispatch = module_dispatch_registrar(mock_module) @@ -44,7 +46,7 @@ def test_module_dispatch_dispatches(mock_module): return True assert mock_module.dispatched(1) is False # Goes to dispatcher() - assert mock_module.dispatched('1') is True # Goes to dispatched() + assert mock_module.dispatched("1") is True # Goes to dispatched() def test_module_dispatch_errors_on_missing_func(mock_module): @@ -52,9 +54,11 @@ def test_module_dispatch_errors_on_missing_func(mock_module): module_dispatch_registrar, ModuleDispatchRegistrationError, ) + mock_dispatch = module_dispatch_registrar(mock_module) with pytest.raises(ModuleDispatchRegistrationError): + @mock_dispatch(str) def dispatched(): pass @@ -68,10 +72,12 @@ def test_module_dispatch_errors_on_non_dispatchable_func(mock_module): def dispatcher(): pass + mock_module.dispatched = dispatcher mock_dispatch = module_dispatch_registrar(mock_module) with pytest.raises(ModuleDispatchRegistrationError): + @mock_dispatch(str) def dispatched(): pass diff --git a/tests/commands/conftest.py b/tests/commands/conftest.py index 3746d06..2d72cd6 100644 --- a/tests/commands/conftest.py +++ b/tests/commands/conftest.py @@ -8,57 +8,62 @@ import pytest from planetmint.config import Config + @pytest.fixture def mock_run_configure(monkeypatch): from planetmint.commands import planetmint - monkeypatch.setattr(planetmint, 'run_configure', lambda *args, **kwargs: None) + + monkeypatch.setattr(planetmint, "run_configure", lambda *args, **kwargs: None) @pytest.fixture def mock_write_config(monkeypatch): from planetmint import config_utils - monkeypatch.setattr(config_utils, 'write_config', lambda *args: None) + + monkeypatch.setattr(config_utils, "write_config", lambda *args: None) @pytest.fixture def mock_db_init_with_existing_db(monkeypatch): from planetmint.commands import planetmint - monkeypatch.setattr(planetmint, '_run_init', lambda: None) + + monkeypatch.setattr(planetmint, "_run_init", lambda: None) @pytest.fixture def mock_processes_start(monkeypatch): from planetmint import start - monkeypatch.setattr(start, 'start', lambda *args: None) + + monkeypatch.setattr(start, "start", lambda *args: None) @pytest.fixture def mock_generate_key_pair(monkeypatch): - monkeypatch.setattr('planetmint.transactions.common.crypto.generate_key_pair', lambda: ('privkey', 'pubkey')) + monkeypatch.setattr("planetmint.transactions.common.crypto.generate_key_pair", lambda: ("privkey", "pubkey")) @pytest.fixture def mock_planetmint_backup_config(monkeypatch): _config = Config().get() - _config['database']['host'] = 'host' - _config['database']['port'] = 12345 - _config['database']['name'] = 'adbname' + _config["database"]["host"] = "host" + _config["database"]["port"] = 12345 + _config["database"]["name"] = "adbname" Config().set(_config) @pytest.fixture def run_start_args(request): - param = getattr(request, 'param', {}) + param = getattr(request, "param", {}) return Namespace( - config=param.get('config'), - skip_initialize_database=param.get('skip_initialize_database', False), + config=param.get("config"), + skip_initialize_database=param.get("skip_initialize_database", False), ) @pytest.fixture def mocked_setup_logging(mocker): return mocker.patch( - 'planetmint.log.setup_logging', + "planetmint.log.setup_logging", autospec=True, spec_set=True, ) diff --git a/tests/commands/test_commands.py b/tests/commands/test_commands.py index e3c4563..60df73c 100644 --- a/tests/commands/test_commands.py +++ b/tests/commands/test_commands.py @@ -20,43 +20,56 @@ from planetmint.transactions.types.elections.chain_migration_election import Cha from tests.utils import generate_election, generate_validators + def test_make_sure_we_dont_remove_any_command(): # thanks to: http://stackoverflow.com/a/18161115/597097 from planetmint.commands.planetmint import create_parser parser = create_parser() - assert parser.parse_args(['configure', 'tarantool_db']).command - assert parser.parse_args(['show-config']).command - assert parser.parse_args(['init']).command - assert parser.parse_args(['drop']).command - assert parser.parse_args(['start']).command - assert parser.parse_args(['election', 'new', 'upsert-validator', 'TEMP_PUB_KEYPAIR', '10', 'TEMP_NODE_ID', - '--private-key', 'TEMP_PATH_TO_PRIVATE_KEY']).command - assert parser.parse_args(['election', 'new', 'chain-migration', - '--private-key', 'TEMP_PATH_TO_PRIVATE_KEY']).command - assert parser.parse_args(['election', 'approve', 'ELECTION_ID', '--private-key', - 'TEMP_PATH_TO_PRIVATE_KEY']).command - assert parser.parse_args(['election', 'show', 'ELECTION_ID']).command - assert parser.parse_args(['tendermint-version']).command + assert parser.parse_args(["configure", "tarantool_db"]).command + assert parser.parse_args(["show-config"]).command + assert parser.parse_args(["init"]).command + assert parser.parse_args(["drop"]).command + assert parser.parse_args(["start"]).command + assert parser.parse_args( + [ + "election", + "new", + "upsert-validator", + "TEMP_PUB_KEYPAIR", + "10", + "TEMP_NODE_ID", + "--private-key", + "TEMP_PATH_TO_PRIVATE_KEY", + ] + ).command + assert parser.parse_args( + ["election", "new", "chain-migration", "--private-key", "TEMP_PATH_TO_PRIVATE_KEY"] + ).command + assert parser.parse_args( + ["election", "approve", "ELECTION_ID", "--private-key", "TEMP_PATH_TO_PRIVATE_KEY"] + ).command + assert parser.parse_args(["election", "show", "ELECTION_ID"]).command + assert parser.parse_args(["tendermint-version"]).command -@patch('planetmint.commands.utils.start') +@patch("planetmint.commands.utils.start") def test_main_entrypoint(mock_start): from planetmint.commands.planetmint import main + main() assert mock_start.called -@patch('planetmint.log.setup_logging') -@patch('planetmint.commands.planetmint._run_init') -@patch('planetmint.config_utils.autoconfigure') -def test_bigchain_run_start(mock_setup_logging, mock_run_init, - mock_autoconfigure, mock_processes_start): +@patch("planetmint.log.setup_logging") +@patch("planetmint.commands.planetmint._run_init") +@patch("planetmint.config_utils.autoconfigure") +def test_bigchain_run_start(mock_setup_logging, mock_run_init, mock_autoconfigure, mock_processes_start): from planetmint.commands.planetmint import run_start - args = Namespace(config=None, yes=True, - skip_initialize_database=False) + + args = Namespace(config=None, yes=True, skip_initialize_database=False) run_start(args) assert mock_setup_logging.called @@ -64,7 +77,7 @@ def test_bigchain_run_start(mock_setup_logging, mock_run_init, # TODO Please beware, that if debugging, the "-s" switch for pytest will # interfere with capsys. # See related issue: https://github.com/pytest-dev/pytest/issues/128 -@pytest.mark.usefixtures('ignore_local_config_file') +@pytest.mark.usefixtures("ignore_local_config_file") def test_bigchain_show_config(capsys): from planetmint.commands.planetmint import run_show_config @@ -82,6 +95,7 @@ def test_bigchain_show_config(capsys): # dict returned is different that what is expected after run_show_config # and run_show_config updates the planetmint.config from planetmint.config import Config + _config = Config().get() sorted_config = json.dumps(_config, indent=4, sort_keys=True) print(f"_config : {sorted_config}") @@ -90,8 +104,7 @@ def test_bigchain_show_config(capsys): def test__run_init(mocker): - init_db_mock = mocker.patch( - 'planetmint.backend.tarantool.connection.TarantoolDBConnection.init_database') + init_db_mock = mocker.patch("planetmint.backend.tarantool.connection.TarantoolDBConnection.init_database") from planetmint.backend.connection import connect @@ -101,27 +114,28 @@ def test__run_init(mocker): init_db_mock.assert_called_once_with() -@patch('planetmint.backend.schema.drop_database') +@patch("planetmint.backend.schema.drop_database") def test_drop_db_when_assumed_yes(mock_db_drop): from planetmint.commands.planetmint import run_drop + args = Namespace(config=None, yes=True) run_drop(args) assert mock_db_drop.called -@patch('planetmint.backend.schema.drop_database') +@patch("planetmint.backend.schema.drop_database") def test_drop_db_when_interactive_yes(mock_db_drop, monkeypatch): from planetmint.commands.planetmint import run_drop + args = Namespace(config=None, yes=False) - monkeypatch.setattr( - 'planetmint.commands.planetmint.input_on_stderr', lambda x: 'y') + monkeypatch.setattr("planetmint.commands.planetmint.input_on_stderr", lambda x: "y") run_drop(args) assert mock_db_drop.called -@patch('planetmint.backend.schema.drop_database') +@patch("planetmint.backend.schema.drop_database") def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys): from planetmint.transactions.common.exceptions import DatabaseDoesNotExist from planetmint.commands.planetmint import run_drop @@ -136,12 +150,12 @@ def test_drop_db_when_db_does_not_exist(mock_db_drop, capsys): # name=Config().get()['database']['name']) -@patch('planetmint.backend.schema.drop_database') +@patch("planetmint.backend.schema.drop_database") def test_drop_db_does_not_drop_when_interactive_no(mock_db_drop, monkeypatch): from planetmint.commands.planetmint import run_drop + args = Namespace(config=None, yes=False) - monkeypatch.setattr( - 'planetmint.commands.planetmint.input_on_stderr', lambda x: 'n') + monkeypatch.setattr("planetmint.commands.planetmint.input_on_stderr", lambda x: "n") run_drop(args) assert not mock_db_drop.called @@ -150,32 +164,31 @@ def test_drop_db_does_not_drop_when_interactive_no(mock_db_drop, monkeypatch): # TODO Beware if you are putting breakpoints in there, and using the '-s' # switch with pytest. It will just hang. Seems related to the monkeypatching of # input_on_stderr. -def test_run_configure_when_config_does_not_exist(monkeypatch, - mock_write_config, - mock_generate_key_pair, - mock_planetmint_backup_config): +def test_run_configure_when_config_does_not_exist( + monkeypatch, mock_write_config, mock_generate_key_pair, mock_planetmint_backup_config +): from planetmint.commands.planetmint import run_configure - monkeypatch.setattr('os.path.exists', lambda path: False) - monkeypatch.setattr('builtins.input', lambda: '\n') - args = Namespace(config=None, backend='localmongodb', yes=True) + + monkeypatch.setattr("os.path.exists", lambda path: False) + monkeypatch.setattr("builtins.input", lambda: "\n") + args = Namespace(config=None, backend="localmongodb", yes=True) return_value = run_configure(args) assert return_value is None -def test_run_configure_when_config_does_exist(monkeypatch, - mock_write_config, - mock_generate_key_pair, - mock_planetmint_backup_config): +def test_run_configure_when_config_does_exist( + monkeypatch, mock_write_config, mock_generate_key_pair, mock_planetmint_backup_config +): value = {} def mock_write_config(newconfig): - value['return'] = newconfig + value["return"] = newconfig from planetmint.commands.planetmint import run_configure - monkeypatch.setattr('os.path.exists', lambda path: True) - monkeypatch.setattr('builtins.input', lambda: '\n') - monkeypatch.setattr( - 'planetmint.config_utils.write_config', mock_write_config) + + monkeypatch.setattr("os.path.exists", lambda path: True) + monkeypatch.setattr("builtins.input", lambda: "\n") + monkeypatch.setattr("planetmint.config_utils.write_config", mock_write_config) args = Namespace(config=None, yes=None) run_configure(args) @@ -183,9 +196,7 @@ def test_run_configure_when_config_does_exist(monkeypatch, @pytest.mark.skip -@pytest.mark.parametrize('backend', ( - 'localmongodb', -)) +@pytest.mark.parametrize("backend", ("localmongodb",)) def test_run_configure_with_backend(backend, monkeypatch, mock_write_config): import planetmint from planetmint.commands.planetmint import run_configure @@ -193,26 +204,24 @@ def test_run_configure_with_backend(backend, monkeypatch, mock_write_config): value = {} def mock_write_config(new_config, filename=None): - value['return'] = new_config + value["return"] = new_config - monkeypatch.setattr('os.path.exists', lambda path: False) - monkeypatch.setattr('builtins.input', lambda: '\n') - monkeypatch.setattr('planetmint.config_utils.write_config', - mock_write_config) + monkeypatch.setattr("os.path.exists", lambda path: False) + monkeypatch.setattr("builtins.input", lambda: "\n") + monkeypatch.setattr("planetmint.config_utils.write_config", mock_write_config) args = Namespace(config=None, backend=backend, yes=True) expected_config = Config().get() run_configure(args) # update the expected config with the correct backend and keypair - backend_conf = getattr(planetmint, '_database_' + backend) - expected_config.update({'database': backend_conf, - 'keypair': value['return']['keypair']}) + backend_conf = getattr(planetmint, "_database_" + backend) + expected_config.update({"database": backend_conf, "keypair": value["return"]["keypair"]}) - assert value['return'] == expected_config + assert value["return"] == expected_config -@patch('planetmint.commands.utils.start') +@patch("planetmint.commands.utils.start") def test_calling_main(start_mock, monkeypatch): from planetmint.commands.planetmint import main @@ -223,36 +232,28 @@ def test_calling_main(start_mock, monkeypatch): subparsers.add_parser.return_value = subsubparsers parser.add_subparsers.return_value = subparsers argparser_mock.return_value = parser - monkeypatch.setattr('argparse.ArgumentParser', argparser_mock) + monkeypatch.setattr("argparse.ArgumentParser", argparser_mock) main() assert argparser_mock.called is True - parser.add_subparsers.assert_called_with(title='Commands', - dest='command') - subparsers.add_parser.assert_any_call('configure', - help='Prepare the config file.') - subparsers.add_parser.assert_any_call('show-config', - help='Show the current ' - 'configuration') - subparsers.add_parser.assert_any_call('init', help='Init the database') - subparsers.add_parser.assert_any_call('drop', help='Drop the database') + parser.add_subparsers.assert_called_with(title="Commands", dest="command") + subparsers.add_parser.assert_any_call("configure", help="Prepare the config file.") + subparsers.add_parser.assert_any_call("show-config", help="Show the current " "configuration") + subparsers.add_parser.assert_any_call("init", help="Init the database") + subparsers.add_parser.assert_any_call("drop", help="Drop the database") - subparsers.add_parser.assert_any_call('start', help='Start Planetmint') - subparsers.add_parser.assert_any_call('tendermint-version', - help='Show the Tendermint supported ' - 'versions') + subparsers.add_parser.assert_any_call("start", help="Start Planetmint") + subparsers.add_parser.assert_any_call("tendermint-version", help="Show the Tendermint supported " "versions") assert start_mock.called is True -@patch('planetmint.commands.planetmint.run_recover') -@patch('planetmint.start.start') -def test_recover_db_on_start(mock_run_recover, - mock_start, - mocked_setup_logging): +@patch("planetmint.commands.planetmint.run_recover") +@patch("planetmint.start.start") +def test_recover_db_on_start(mock_run_recover, mock_start, mocked_setup_logging): from planetmint.commands.planetmint import run_start - args = Namespace(config=None, yes=True, - skip_initialize_database=False) + + args = Namespace(config=None, yes=True, skip_initialize_database=False) run_start(args) assert mock_run_recover.called @@ -266,29 +267,23 @@ def test_run_recover(b, alice, bob): from planetmint.lib import Block from planetmint.backend import query - tx1 = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset={'cycle': 'hero'}, - metadata={'name': 'hohenheim'}) \ - .sign([alice.private_key]) - tx2 = Create.generate([bob.public_key], - [([bob.public_key], 1)], - asset={'cycle': 'hero'}, - metadata={'name': 'hohenheim'}) \ - .sign([bob.private_key]) + tx1 = Create.generate( + [alice.public_key], [([alice.public_key], 1)], asset={"cycle": "hero"}, metadata={"name": "hohenheim"} + ).sign([alice.private_key]) + tx2 = Create.generate( + [bob.public_key], [([bob.public_key], 1)], asset={"cycle": "hero"}, metadata={"name": "hohenheim"} + ).sign([bob.private_key]) print(tx1.id) print(tx2.id) # store the transactions b.store_bulk_transactions([tx1, tx2]) # create a random block - block8 = Block(app_hash='random_app_hash1', height=8, - transactions=['txid_doesnt_matter'])._asdict() + block8 = Block(app_hash="random_app_hash1", height=8, transactions=["txid_doesnt_matter"])._asdict() b.store_block(block8) # create the next block - block9 = Block(app_hash='random_app_hash1', height=9, - transactions=[tx1.id])._asdict() + block9 = Block(app_hash="random_app_hash1", height=9, transactions=[tx1.id])._asdict() b.store_block(block9) # create a pre_commit state which is ahead of the commit state @@ -301,26 +296,27 @@ def test_run_recover(b, alice, bob): # Helper -class MockResponse(): - +class MockResponse: def __init__(self, height): self.height = height def json(self): - return {'result': {'latest_block_height': self.height}} + return {"result": {"latest_block_height": self.height}} @pytest.mark.abci def test_election_new_upsert_validator_with_tendermint(b, priv_validator_path, user_sk, validators): from planetmint.commands.planetmint import run_election_new_upsert_validator - new_args = Namespace(action='new', - election_type='upsert-validator', - public_key='HHG0IQRybpT6nJMIWWFWhMczCLHt6xcm7eP52GnGuPY=', - power=1, - node_id='unique_node_id_for_test_upsert_validator_new_with_tendermint', - sk=priv_validator_path, - config={}) + new_args = Namespace( + action="new", + election_type="upsert-validator", + public_key="HHG0IQRybpT6nJMIWWFWhMczCLHt6xcm7eP52GnGuPY=", + power=1, + node_id="unique_node_id_for_test_upsert_validator_new_with_tendermint", + sk=priv_validator_path, + config={}, + ) election_id = run_election_new_upsert_validator(new_args, b) @@ -333,22 +329,24 @@ def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_valida def mock_write(tx, mode): b.store_bulk_transactions([tx]) - return (202, '') + return (202, "") b.get_validators = mock_get_validators b.write_transaction = mock_write - args = Namespace(action='new', - election_type='upsert-validator', - public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=', - power=1, - node_id='fb7140f03a4ffad899fabbbf655b97e0321add66', - sk=priv_validator_path, - config={}) + args = Namespace( + action="new", + election_type="upsert-validator", + public_key="CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=", + power=1, + node_id="fb7140f03a4ffad899fabbbf655b97e0321add66", + sk=priv_validator_path, + config={}, + ) with caplog.at_level(logging.INFO): election_id = run_election_new_upsert_validator(args, b) - assert caplog.records[0].msg == '[SUCCESS] Submitted proposal with id: ' + election_id + assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id assert b.get_transaction(election_id) @@ -356,10 +354,7 @@ def test_election_new_upsert_validator_without_tendermint(caplog, b, priv_valida def test_election_new_chain_migration_with_tendermint(b, priv_validator_path, user_sk, validators): from planetmint.commands.planetmint import run_election_new_chain_migration - new_args = Namespace(action='new', - election_type='migration', - sk=priv_validator_path, - config={}) + new_args = Namespace(action="new", election_type="migration", sk=priv_validator_path, config={}) election_id = run_election_new_chain_migration(new_args, b) @@ -372,19 +367,16 @@ def test_election_new_chain_migration_without_tendermint(caplog, b, priv_validat def mock_write(tx, mode): b.store_bulk_transactions([tx]) - return (202, '') + return (202, "") b.get_validators = mock_get_validators b.write_transaction = mock_write - args = Namespace(action='new', - election_type='migration', - sk=priv_validator_path, - config={}) + args = Namespace(action="new", election_type="migration", sk=priv_validator_path, config={}) with caplog.at_level(logging.INFO): election_id = run_election_new_chain_migration(args, b) - assert caplog.records[0].msg == '[SUCCESS] Submitted proposal with id: ' + election_id + assert caplog.records[0].msg == "[SUCCESS] Submitted proposal with id: " + election_id assert b.get_transaction(election_id) @@ -392,13 +384,15 @@ def test_election_new_chain_migration_without_tendermint(caplog, b, priv_validat def test_election_new_upsert_validator_invalid_election(caplog, b, priv_validator_path, user_sk): from planetmint.commands.planetmint import run_election_new_upsert_validator - args = Namespace(action='new', - election_type='upsert-validator', - public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=', - power=10, - node_id='fb7140f03a4ffad899fabbbf655b97e0321add66', - sk='/tmp/invalid/path/key.json', - config={}) + args = Namespace( + action="new", + election_type="upsert-validator", + public_key="CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=", + power=10, + node_id="fb7140f03a4ffad899fabbbf655b97e0321add66", + sk="/tmp/invalid/path/key.json", + config={}, + ) with caplog.at_level(logging.ERROR): assert not run_election_new_upsert_validator(args, b) @@ -412,17 +406,19 @@ def test_election_new_upsert_validator_invalid_power(caplog, b, priv_validator_p def mock_write(tx, mode): b.store_bulk_transactions([tx]) - return (400, '') + return (400, "") b.write_transaction = mock_write b.get_validators = mock_get_validators - args = Namespace(action='new', - election_type='upsert-validator', - public_key='CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=', - power=10, - node_id='fb7140f03a4ffad899fabbbf655b97e0321add66', - sk=priv_validator_path, - config={}) + args = Namespace( + action="new", + election_type="upsert-validator", + public_key="CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=", + power=10, + node_id="fb7140f03a4ffad899fabbbf655b97e0321add66", + sk=priv_validator_path, + config={}, + ) with caplog.at_level(logging.ERROR): assert not run_election_new_upsert_validator(args, b) @@ -431,25 +427,23 @@ def test_election_new_upsert_validator_invalid_power(caplog, b, priv_validator_p @pytest.mark.abci def test_election_approve_with_tendermint(b, priv_validator_path, user_sk, validators): - from planetmint.commands.planetmint import (run_election_new_upsert_validator, - run_election_approve) + from planetmint.commands.planetmint import run_election_new_upsert_validator, run_election_approve - public_key = 'CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=' - new_args = Namespace(action='new', - election_type='upsert-validator', - public_key=public_key, - power=1, - node_id='fb7140f03a4ffad899fabbbf655b97e0321add66', - sk=priv_validator_path, - config={}) + public_key = "CJxdItf4lz2PwEf4SmYNAu/c/VpmX39JEgC5YpH7fxg=" + new_args = Namespace( + action="new", + election_type="upsert-validator", + public_key=public_key, + power=1, + node_id="fb7140f03a4ffad899fabbbf655b97e0321add66", + sk=priv_validator_path, + config={}, + ) election_id = run_election_new_upsert_validator(new_args, b) assert election_id - args = Namespace(action='approve', - election_id=election_id, - sk=priv_validator_path, - config={}) + args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={}) approve = run_election_approve(args, b) assert b.get_transaction(approve) @@ -463,15 +457,12 @@ def test_election_approve_without_tendermint(caplog, b, priv_validator_path, new b, election_id = call_election(b, new_validator, node_key) # call run_election_approve with args that point to the election - args = Namespace(action='approve', - election_id=election_id, - sk=priv_validator_path, - config={}) + args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={}) # assert returned id is in the db with caplog.at_level(logging.INFO): approval_id = run_election_approve(args, b) - assert caplog.records[0].msg == '[SUCCESS] Your vote has been submitted' + assert caplog.records[0].msg == "[SUCCESS] Your vote has been submitted" assert b.get_transaction(approval_id) @@ -484,19 +475,16 @@ def test_election_approve_failure(caplog, b, priv_validator_path, new_validator, def mock_write(tx, mode): b.store_bulk_transactions([tx]) - return (400, '') + return (400, "") b.write_transaction = mock_write # call run_upsert_validator_approve with args that point to the election - args = Namespace(action='approve', - election_id=election_id, - sk=priv_validator_path, - config={}) + args = Namespace(action="approve", election_id=election_id, sk=priv_validator_path, config={}) with caplog.at_level(logging.ERROR): assert not run_election_approve(args, b) - assert caplog.records[0].msg == 'Failed to commit vote' + assert caplog.records[0].msg == "Failed to commit vote" @pytest.mark.bdb @@ -507,84 +495,70 @@ def test_election_approve_called_with_bad_key(caplog, b, bad_validator_path, new b, election_id = call_election(b, new_validator, node_key) # call run_upsert_validator_approve with args that point to the election, but a bad signing key - args = Namespace(action='approve', - election_id=election_id, - sk=bad_validator_path, - config={}) + args = Namespace(action="approve", election_id=election_id, sk=bad_validator_path, config={}) with caplog.at_level(logging.ERROR): assert not run_election_approve(args, b) - assert caplog.records[0].msg == 'The key you provided does not match any of ' \ - 'the eligible voters in this election.' + assert ( + caplog.records[0].msg == "The key you provided does not match any of " + "the eligible voters in this election." + ) @pytest.mark.bdb def test_chain_migration_election_show_shows_inconclusive(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, {}, voter_keys) assert not run_election_show(Namespace(election_id=election.id), b) Election.process_block(b, 1, [election]) b.store_bulk_transactions([election]) - assert run_election_show(Namespace(election_id=election.id), b) == \ - 'status=ongoing' + assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing" - b.store_block(Block(height=1, transactions=[], app_hash='')._asdict()) - b.store_validator_set(2, [v['storage'] for v in validators]) + b.store_block(Block(height=1, transactions=[], app_hash="")._asdict()) + b.store_validator_set(2, [v["storage"] for v in validators]) - assert run_election_show(Namespace(election_id=election.id), b) == \ - 'status=ongoing' + assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing" - b.store_block(Block(height=2, transactions=[], app_hash='')._asdict()) + b.store_block(Block(height=2, transactions=[], app_hash="")._asdict()) # TODO insert yet another block here when upgrading to Tendermint 0.22.4. - assert run_election_show(Namespace(election_id=election.id), b) == \ - 'status=inconclusive' + assert run_election_show(Namespace(election_id=election.id), b) == "status=inconclusive" @pytest.mark.bdb def test_chain_migration_election_show_shows_concluded(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, {}, voter_keys) assert not run_election_show(Namespace(election_id=election.id), b) b.store_bulk_transactions([election]) Election.process_block(b, 1, [election]) - assert run_election_show(Namespace(election_id=election.id), b) == \ - 'status=ongoing' + assert run_election_show(Namespace(election_id=election.id), b) == "status=ongoing" - b.store_abci_chain(1, 'chain-X') - b.store_block(Block(height=1, - transactions=[v.id for v in votes], - app_hash='last_app_hash')._asdict()) + b.store_abci_chain(1, "chain-X") + b.store_block(Block(height=1, transactions=[v.id for v in votes], app_hash="last_app_hash")._asdict()) Election.process_block(b, 2, votes) - assert run_election_show(Namespace(election_id=election.id), b) == \ - f'''status=concluded + assert ( + run_election_show(Namespace(election_id=election.id), b) + == f'''status=concluded chain_id=chain-X-migrated-at-height-1 app_hash=last_app_hash validators=[{''.join([f""" @@ -596,6 +570,7 @@ validators=[{''.join([f""" "power": {v['storage']['voting_power']} }}{',' if i + 1 != len(validators) else ''}""" for i, v in enumerate(validators)])} ]''' + ) def test_bigchain_tendermint_version(capsys): @@ -606,22 +581,24 @@ def test_bigchain_tendermint_version(capsys): run_tendermint_version(args) output_config = json.loads(capsys.readouterr()[0]) from planetmint.version import __tm_supported_versions__ + assert len(output_config["tendermint"]) == len(__tm_supported_versions__) assert sorted(output_config["tendermint"]) == sorted(__tm_supported_versions__) def mock_get_validators(height): return [ - {'public_key': {'value': "zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=", - 'type': 'ed25519-base64'}, - 'voting_power': 10} + { + "public_key": {"value": "zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=", "type": "ed25519-base64"}, + "voting_power": 10, + } ] def call_election(b, new_validator, node_key): def mock_write(tx, mode): b.store_bulk_transactions([tx]) - return (202, '') + return (202, "") # patch the validator set. We now have one validator with power 10 b.get_validators = mock_get_validators @@ -631,9 +608,7 @@ def call_election(b, new_validator, node_key): voters = ValidatorElection.recipients(b) # and our voter is the public key from the voter list voter = node_key.public_key - valid_election = ValidatorElection.generate([voter], - voters, - new_validator, None).sign([node_key.private_key]) + valid_election = ValidatorElection.generate([voter], voters, new_validator, None).sign([node_key.private_key]) # patch in an election with a vote issued to the user election_id = valid_election.id diff --git a/tests/commands/test_utils.py b/tests/commands/test_utils.py index 0018568..a731f66 100644 --- a/tests/commands/test_utils.py +++ b/tests/commands/test_utils.py @@ -14,40 +14,41 @@ from unittest.mock import patch @pytest.fixture def reset_planetmint_config(monkeypatch): - monkeypatch.setattr('planetmint.config', Config().init_config('tarantool_db')) + monkeypatch.setattr("planetmint.config", Config().init_config("tarantool_db")) def test_input_on_stderr(): from planetmint.commands.utils import input_on_stderr, _convert - with patch('builtins.input', return_value='I love cats'): - assert input_on_stderr() == 'I love cats' + with patch("builtins.input", return_value="I love cats"): + assert input_on_stderr() == "I love cats" # input_on_stderr uses `_convert` internally, from now on we will # just use that function - assert _convert('hack the planet') == 'hack the planet' - assert _convert('42') == '42' - assert _convert('42', default=10) == 42 - assert _convert('', default=10) == 10 - assert _convert('42', convert=int) == 42 - assert _convert('True', convert=bool) is True - assert _convert('False', convert=bool) is False - assert _convert('t', convert=bool) is True - assert _convert('3.14', default=1.0) == 3.14 - assert _convert('TrUe', default=False) is True + assert _convert("hack the planet") == "hack the planet" + assert _convert("42") == "42" + assert _convert("42", default=10) == 42 + assert _convert("", default=10) == 10 + assert _convert("42", convert=int) == 42 + assert _convert("True", convert=bool) is True + assert _convert("False", convert=bool) is False + assert _convert("t", convert=bool) is True + assert _convert("3.14", default=1.0) == 3.14 + assert _convert("TrUe", default=False) is True with pytest.raises(ValueError): - assert _convert('TRVE', default=False) + assert _convert("TRVE", default=False) with pytest.raises(ValueError): - assert _convert('ಠ_ಠ', convert=int) + assert _convert("ಠ_ಠ", convert=int) -@pytest.mark.usefixtures('ignore_local_config_file', 'reset_planetmint_config') +@pytest.mark.usefixtures("ignore_local_config_file", "reset_planetmint_config") def test_configure_planetmint_configures_planetmint(): from planetmint.commands.utils import configure_planetmint from planetmint.config_utils import is_configured + assert not is_configured() @configure_planetmint @@ -58,17 +59,11 @@ def test_configure_planetmint_configures_planetmint(): test_configure(args) -@pytest.mark.usefixtures('ignore_local_config_file', - 'reset_planetmint_config', - 'reset_logging_config') -@pytest.mark.parametrize('log_level', tuple(map( - logging.getLevelName, - (logging.DEBUG, - logging.INFO, - logging.WARNING, - logging.ERROR, - logging.CRITICAL) -))) +@pytest.mark.usefixtures("ignore_local_config_file", "reset_planetmint_config", "reset_logging_config") +@pytest.mark.parametrize( + "log_level", + tuple(map(logging.getLevelName, (logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL))), +) def test_configure_planetmint_logging(log_level): # TODO: See following comment: # This is a dirty test. If a test *preceding* this test makes use of the logger, and then another test *after* this @@ -84,8 +79,8 @@ def test_configure_planetmint_logging(log_level): args = Namespace(config=None, log_level=log_level) test_configure_logger(args) - assert Config().get()['log']['level_console'] == log_level - assert Config().get()['log']['level_logfile'] == log_level + assert Config().get()["log"]["level_console"] == log_level + assert Config().get()["log"]["level_logfile"] == log_level def test_start_raises_if_command_not_implemented(): @@ -97,7 +92,7 @@ def test_start_raises_if_command_not_implemented(): with pytest.raises(NotImplementedError): # Will raise because `scope`, the third parameter, # doesn't contain the function `run_start` - utils.start(parser, ['start'], {}) + utils.start(parser, ["start"], {}) def test_start_raises_if_no_arguments_given(): @@ -110,7 +105,7 @@ def test_start_raises_if_no_arguments_given(): utils.start(parser, [], {}) -@patch('multiprocessing.cpu_count', return_value=42) +@patch("multiprocessing.cpu_count", return_value=42) def test_start_sets_multiprocess_var_based_on_cli_args(mock_cpu_count): from planetmint.commands import utils @@ -118,14 +113,10 @@ def test_start_sets_multiprocess_var_based_on_cli_args(mock_cpu_count): return args parser = argparse.ArgumentParser() - subparser = parser.add_subparsers(title='Commands', - dest='command') - mp_arg_test_parser = subparser.add_parser('mp_arg_test') - mp_arg_test_parser.add_argument('-m', '--multiprocess', - nargs='?', - type=int, - default=False) + subparser = parser.add_subparsers(title="Commands", dest="command") + mp_arg_test_parser = subparser.add_parser("mp_arg_test") + mp_arg_test_parser.add_argument("-m", "--multiprocess", nargs="?", type=int, default=False) - scope = {'run_mp_arg_test': run_mp_arg_test} - assert utils.start(parser, ['mp_arg_test'], scope).multiprocess == 1 - assert utils.start(parser, ['mp_arg_test', '--multiprocess'], scope).multiprocess == 42 + scope = {"run_mp_arg_test": run_mp_arg_test} + assert utils.start(parser, ["mp_arg_test"], scope).multiprocess == 1 + assert utils.start(parser, ["mp_arg_test", "--multiprocess"], scope).multiprocess == 42 diff --git a/tests/common/conftest.py b/tests/common/conftest.py index eea23ee..22976cf 100644 --- a/tests/common/conftest.py +++ b/tests/common/conftest.py @@ -7,32 +7,24 @@ from base58 import b58decode import pytest -USER_PRIVATE_KEY = '8eJ8q9ZQpReWyQT5aFCiwtZ5wDZC4eDnCen88p3tQ6ie' -USER_PUBLIC_KEY = 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE' +USER_PRIVATE_KEY = "8eJ8q9ZQpReWyQT5aFCiwtZ5wDZC4eDnCen88p3tQ6ie" +USER_PUBLIC_KEY = "JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE" -USER2_PRIVATE_KEY = 'F86PQPiqMTwM2Qi2Sda3U4Vdh3AgadMdX3KNVsu5wNJr' -USER2_PUBLIC_KEY = 'GDxwMFbwdATkQELZbMfW8bd9hbNYMZLyVXA3nur2aNbE' +USER2_PRIVATE_KEY = "F86PQPiqMTwM2Qi2Sda3U4Vdh3AgadMdX3KNVsu5wNJr" +USER2_PUBLIC_KEY = "GDxwMFbwdATkQELZbMfW8bd9hbNYMZLyVXA3nur2aNbE" -USER3_PRIVATE_KEY = '4rNQFzWQbVwuTiDVxwuFMvLG5zd8AhrQKCtVovBvcYsB' -USER3_PUBLIC_KEY = 'Gbrg7JtxdjedQRmr81ZZbh1BozS7fBW88ZyxNDy7WLNC' +USER3_PRIVATE_KEY = "4rNQFzWQbVwuTiDVxwuFMvLG5zd8AhrQKCtVovBvcYsB" +USER3_PUBLIC_KEY = "Gbrg7JtxdjedQRmr81ZZbh1BozS7fBW88ZyxNDy7WLNC" CC_FULFILLMENT_URI = ( - 'pGSAINdamAGCsQq31Uv-08lkBzoO4XLz2qYjJa8CGmj3B1EagUDlVkMAw2CscpCG4syAboKKh' - 'Id_Hrjl2XTYc-BlIkkBVV-4ghWQozusxh45cBz5tGvSW_XwWVu-JGVRQUOOehAL' + "pGSAINdamAGCsQq31Uv-08lkBzoO4XLz2qYjJa8CGmj3B1EagUDlVkMAw2CscpCG4syAboKKh" + "Id_Hrjl2XTYc-BlIkkBVV-4ghWQozusxh45cBz5tGvSW_XwWVu-JGVRQUOOehAL" ) -CC_CONDITION_URI = ('ni:///sha-256;' - 'eZI5q6j8T_fqv7xMROaei9_tmTMk4S7WR5Kr4onPHV8' - '?fpt=ed25519-sha-256&cost=131072') +CC_CONDITION_URI = "ni:///sha-256;" "eZI5q6j8T_fqv7xMROaei9_tmTMk4S7WR5Kr4onPHV8" "?fpt=ed25519-sha-256&cost=131072" -ASSET_DEFINITION = { - 'data': { - 'definition': 'Asset definition' - } -} +ASSET_DEFINITION = {"data": {"definition": "Asset definition"}} -DATA = { - 'msg': 'Hello Planetmint!' -} +DATA = {"msg": "Hello Planetmint!"} @pytest.fixture @@ -78,53 +70,60 @@ def cond_uri(): @pytest.fixture def user_Ed25519(user_pub): from cryptoconditions import Ed25519Sha256 + return Ed25519Sha256(public_key=b58decode(user_pub)) @pytest.fixture def user_user2_threshold(user_pub, user2_pub): from cryptoconditions import ThresholdSha256, Ed25519Sha256 + user_pub_keys = [user_pub, user2_pub] threshold = ThresholdSha256(threshold=len(user_pub_keys)) for user_pub in user_pub_keys: - threshold.add_subfulfillment( - Ed25519Sha256(public_key=b58decode(user_pub))) + threshold.add_subfulfillment(Ed25519Sha256(public_key=b58decode(user_pub))) return threshold @pytest.fixture def user2_Ed25519(user2_pub): from cryptoconditions import Ed25519Sha256 + return Ed25519Sha256(public_key=b58decode(user2_pub)) @pytest.fixture def user_input(user_Ed25519, user_pub): from planetmint.transactions.common.transaction import Input + return Input(user_Ed25519, [user_pub]) @pytest.fixture def user_user2_threshold_output(user_user2_threshold, user_pub, user2_pub): from planetmint.transactions.common.transaction import Output + return Output(user_user2_threshold, [user_pub, user2_pub]) @pytest.fixture def user_user2_threshold_input(user_user2_threshold, user_pub, user2_pub): from planetmint.transactions.common.transaction import Input + return Input(user_user2_threshold, [user_pub, user2_pub]) @pytest.fixture def user_output(user_Ed25519, user_pub): from planetmint.transactions.common.transaction import Output + return Output(user_Ed25519, [user_pub]) @pytest.fixture def user2_output(user2_Ed25519, user2_pub): from planetmint.transactions.common.transaction import Output + return Output(user2_Ed25519, [user2_pub]) @@ -141,8 +140,8 @@ def data(): @pytest.fixture def utx(user_input, user_output): from planetmint.transactions.common.transaction import Transaction - return Transaction(Transaction.CREATE, {'data': None}, [user_input], - [user_output]) + + return Transaction(Transaction.CREATE, {"data": None}, [user_input], [user_output]) @pytest.fixture @@ -152,13 +151,11 @@ def tx(utx, user_priv): @pytest.fixture def transfer_utx(user_output, user2_output, utx): - from planetmint.transactions.common.transaction import ( - Input, TransactionLink, Transaction) + from planetmint.transactions.common.transaction import Input, TransactionLink, Transaction + user_output = user_output.to_dict() - input = Input(utx.outputs[0].fulfillment, - user_output['public_keys'], - TransactionLink(utx.id, 0)) - return Transaction('TRANSFER', {'id': utx.id}, [input], [user2_output]) + input = Input(utx.outputs[0].fulfillment, user_output["public_keys"], TransactionLink(utx.id, 0)) + return Transaction("TRANSFER", {"id": utx.id}, [input], [user2_output]) @pytest.fixture @@ -169,139 +166,163 @@ def transfer_tx(transfer_utx, user_priv): @pytest.fixture(scope="session") def dummy_transaction(): return { - 'asset': {'data': None}, - 'id': 64 * 'a', - 'inputs': [{ - 'fulfillment': 'dummy', - 'fulfills': None, - 'owners_before': [58 * 'a'], - }], - 'metadata': None, - 'operation': 'CREATE', - 'outputs': [{ - 'amount': '1', - 'condition': { - 'details': { - 'public_key': 58 * 'b', - 'type': 'ed25519-sha-256' + "asset": {"data": None}, + "id": 64 * "a", + "inputs": [ + { + "fulfillment": "dummy", + "fulfills": None, + "owners_before": [58 * "a"], + } + ], + "metadata": None, + "operation": "CREATE", + "outputs": [ + { + "amount": "1", + "condition": { + "details": {"public_key": 58 * "b", "type": "ed25519-sha-256"}, + "uri": "dummy", }, - 'uri': 'dummy', - }, - 'public_keys': [58 * 'b'] - }], - 'version': '2.0' + "public_keys": [58 * "b"], + } + ], + "version": "2.0", } @pytest.fixture def unfulfilled_transaction(): return { - 'asset': { - 'data': { - 'msg': 'Hello Planetmint!', + "asset": { + "data": { + "msg": "Hello Planetmint!", } }, - 'id': None, - 'inputs': [{ - # XXX This could be None, see #1925 - # https://github.com/planetmint/planetmint/issues/1925 - 'fulfillment': { - 'public_key': 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE', - 'type': 'ed25519-sha-256' - }, - 'fulfills': None, - 'owners_before': ['JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE'] - }], - 'metadata': None, - 'operation': 'CREATE', - 'outputs': [{ - 'amount': '1', - 'condition': { - 'details': { - 'public_key': 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE', - 'type': 'ed25519-sha-256' + "id": None, + "inputs": [ + { + # XXX This could be None, see #1925 + # https://github.com/planetmint/planetmint/issues/1925 + "fulfillment": { + "public_key": "JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE", + "type": "ed25519-sha-256", }, - 'uri': 'ni:///sha-256;49C5UWNODwtcINxLgLc90bMCFqCymFYONGEmV4a0sG4?fpt=ed25519-sha-256&cost=131072'}, - 'public_keys': ['JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE'] - }], - 'version': '1.0' + "fulfills": None, + "owners_before": ["JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE"], + } + ], + "metadata": None, + "operation": "CREATE", + "outputs": [ + { + "amount": "1", + "condition": { + "details": { + "public_key": "JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE", + "type": "ed25519-sha-256", + }, + "uri": "ni:///sha-256;49C5UWNODwtcINxLgLc90bMCFqCymFYONGEmV4a0sG4?fpt=ed25519-sha-256&cost=131072", + }, + "public_keys": ["JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE"], + } + ], + "version": "1.0", } @pytest.fixture def fulfilled_transaction(): return { - 'asset': { - 'data': { - 'msg': 'Hello Planetmint!', + "asset": { + "data": { + "msg": "Hello Planetmint!", } }, - 'id': None, - 'inputs': [{ - 'fulfillment': ('pGSAIP_2P1Juh-94sD3uno1lxMPd9EkIalRo7QB014pT6dD9g' - 'UANRNxasDy1Dfg9C2Fk4UgHdYFsJzItVYi5JJ_vWc6rKltn0k' - 'jagynI0xfyR6X9NhzccTt5oiNH9mThEb4QmagN'), - 'fulfills': None, - 'owners_before': ['JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE'] - }], - 'metadata': None, - 'operation': 'CREATE', - 'outputs': [{ - 'amount': '1', - 'condition': { - 'details': { - 'public_key': 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE', - 'type': 'ed25519-sha-256' + "id": None, + "inputs": [ + { + "fulfillment": ( + "pGSAIP_2P1Juh-94sD3uno1lxMPd9EkIalRo7QB014pT6dD9g" + "UANRNxasDy1Dfg9C2Fk4UgHdYFsJzItVYi5JJ_vWc6rKltn0k" + "jagynI0xfyR6X9NhzccTt5oiNH9mThEb4QmagN" + ), + "fulfills": None, + "owners_before": ["JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE"], + } + ], + "metadata": None, + "operation": "CREATE", + "outputs": [ + { + "amount": "1", + "condition": { + "details": { + "public_key": "JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE", + "type": "ed25519-sha-256", + }, + "uri": "ni:///sha-256;49C5UWNODwtcINxLgLc90bMCFqCymFYONGEmV4a0sG4?fpt=ed25519-sha-256&cost=131072", }, - 'uri': 'ni:///sha-256;49C5UWNODwtcINxLgLc90bMCFqCymFYONGEmV4a0sG4?fpt=ed25519-sha-256&cost=131072'}, - 'public_keys': ['JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE'] - }], - 'version': '1.0' + "public_keys": ["JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE"], + } + ], + "version": "1.0", } # TODO For reviewers: Pick which approach you like best: parametrized or not? -@pytest.fixture(params=( - {'id': None, - 'fulfillment': { - 'public_key': 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE', - 'type': 'ed25519-sha-256'}}, - {'id': None, - 'fulfillment': ('pGSAIP_2P1Juh-94sD3uno1lxMPd9EkIalRo7QB014pT6dD9g' - 'UANRNxasDy1Dfg9C2Fk4UgHdYFsJzItVYi5JJ_vWc6rKltn0k' - 'jagynI0xfyR6X9NhzccTt5oiNH9mThEb4QmagN')}, - {'id': '7a7c827cf4ef7985f08f4e9d16f5ffc58ca4e82271921dfbed32e70cb462485f', - 'fulfillment': ('pGSAIP_2P1Juh-94sD3uno1lxMPd9EkIalRo7QB014pT6dD9g' - 'UANRNxasDy1Dfg9C2Fk4UgHdYFsJzItVYi5JJ_vWc6rKltn0k' - 'jagynI0xfyR6X9NhzccTt5oiNH9mThEb4QmagN')}, -)) +@pytest.fixture( + params=( + { + "id": None, + "fulfillment": {"public_key": "JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE", "type": "ed25519-sha-256"}, + }, + { + "id": None, + "fulfillment": ( + "pGSAIP_2P1Juh-94sD3uno1lxMPd9EkIalRo7QB014pT6dD9g" + "UANRNxasDy1Dfg9C2Fk4UgHdYFsJzItVYi5JJ_vWc6rKltn0k" + "jagynI0xfyR6X9NhzccTt5oiNH9mThEb4QmagN" + ), + }, + { + "id": "7a7c827cf4ef7985f08f4e9d16f5ffc58ca4e82271921dfbed32e70cb462485f", + "fulfillment": ( + "pGSAIP_2P1Juh-94sD3uno1lxMPd9EkIalRo7QB014pT6dD9g" + "UANRNxasDy1Dfg9C2Fk4UgHdYFsJzItVYi5JJ_vWc6rKltn0k" + "jagynI0xfyR6X9NhzccTt5oiNH9mThEb4QmagN" + ), + }, + ) +) def tri_state_transaction(request): tx = { - 'asset': { - 'data': { - 'msg': 'Hello Planetmint!', + "asset": { + "data": { + "msg": "Hello Planetmint!", } }, - 'id': None, - 'inputs': [{ - 'fulfillment': None, - 'fulfills': None, - 'owners_before': ['JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE'] - }], - 'metadata': None, - 'operation': 'CREATE', - 'outputs': [{ - 'amount': '1', - 'condition': { - 'details': { - 'public_key': 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE', - 'type': 'ed25519-sha-256' + "id": None, + "inputs": [ + {"fulfillment": None, "fulfills": None, "owners_before": ["JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE"]} + ], + "metadata": None, + "operation": "CREATE", + "outputs": [ + { + "amount": "1", + "condition": { + "details": { + "public_key": "JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE", + "type": "ed25519-sha-256", + }, + "uri": "ni:///sha-256;49C5UWNODwtcINxLgLc90bMCFqCymFYONGEmV4a0sG4?fpt=ed25519-sha-256&cost=131072", }, - 'uri': 'ni:///sha-256;49C5UWNODwtcINxLgLc90bMCFqCymFYONGEmV4a0sG4?fpt=ed25519-sha-256&cost=131072'}, - 'public_keys': ['JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE'] - }], - 'version': '2.0' + "public_keys": ["JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE"], + } + ], + "version": "2.0", } - tx['id'] = request.param['id'] - tx['inputs'][0]['fulfillment'] = request.param['fulfillment'] + tx["id"] = request.param["id"] + tx["inputs"][0]["fulfillment"] = request.param["fulfillment"] return tx diff --git a/tests/common/test_memoize.py b/tests/common/test_memoize.py index dd047cb..30cd414 100644 --- a/tests/common/test_memoize.py +++ b/tests/common/test_memoize.py @@ -18,16 +18,17 @@ pytestmark = pytest.mark.bdb def test_memoize_to_dict(b): alice = generate_key_pair() asset = { - 'data': {'id': 'test_id'}, + "data": {"id": "test_id"}, } assert to_dict.cache_info().hits == 0 assert to_dict.cache_info().misses == 0 - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=asset,)\ - .sign([alice.private_key]) + tx = Create.generate( + [alice.public_key], + [([alice.public_key], 1)], + asset=asset, + ).sign([alice.private_key]) tx.to_dict() @@ -44,16 +45,17 @@ def test_memoize_to_dict(b): def test_memoize_from_dict(b): alice = generate_key_pair() asset = { - 'data': {'id': 'test_id'}, + "data": {"id": "test_id"}, } assert from_dict.cache_info().hits == 0 assert from_dict.cache_info().misses == 0 - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=asset,)\ - .sign([alice.private_key]) + tx = Create.generate( + [alice.public_key], + [([alice.public_key], 1)], + asset=asset, + ).sign([alice.private_key]) tx_dict = deepcopy(tx.to_dict()) Transaction.from_dict(tx_dict) @@ -71,16 +73,17 @@ def test_memoize_from_dict(b): def test_memoize_input_valid(b): alice = generate_key_pair() asset = { - 'data': {'id': 'test_id'}, + "data": {"id": "test_id"}, } assert Transaction._input_valid.cache_info().hits == 0 assert Transaction._input_valid.cache_info().misses == 0 - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=asset,)\ - .sign([alice.private_key]) + tx = Create.generate( + [alice.public_key], + [([alice.public_key], 1)], + asset=asset, + ).sign([alice.private_key]) tx.inputs_valid() diff --git a/tests/common/test_schema.py b/tests/common/test_schema.py index 07cda88..478be28 100644 --- a/tests/common/test_schema.py +++ b/tests/common/test_schema.py @@ -19,28 +19,26 @@ from planetmint.transactions.common.schema import ( validate_transaction_schema, ) -SUPPORTED_CRYPTOCONDITION_TYPES = ('threshold-sha-256', 'ed25519-sha-256') -UNSUPPORTED_CRYPTOCONDITION_TYPES = ( - 'preimage-sha-256', 'prefix-sha-256', 'rsa-sha-256') +SUPPORTED_CRYPTOCONDITION_TYPES = ("threshold-sha-256", "ed25519-sha-256") +UNSUPPORTED_CRYPTOCONDITION_TYPES = ("preimage-sha-256", "prefix-sha-256", "rsa-sha-256") ################################################################################ # Test of schema utils -def _test_additionalproperties(node, path=''): +def _test_additionalproperties(node, path=""): """Validate that each object node has additionalProperties set, so that objects with junk keys do not pass as valid. """ if isinstance(node, list): for i, nnode in enumerate(node): - _test_additionalproperties(nnode, path + str(i) + '.') + _test_additionalproperties(nnode, path + str(i) + ".") if isinstance(node, dict): - if node.get('type') == 'object': - assert 'additionalProperties' in node, \ - ('additionalProperties not set at path:' + path) + if node.get("type") == "object": + assert "additionalProperties" in node, "additionalProperties not set at path:" + path for name, val in node.items(): - _test_additionalproperties(val, path + name + '.') + _test_additionalproperties(val, path + name + ".") def test_transaction_schema_additionalproperties(): @@ -69,63 +67,76 @@ def test_validate_transaction_fails(): def test_validate_failure_inconsistent(): - with patch('jsonschema.validate'): + with patch("jsonschema.validate"): with raises(SchemaValidationError): validate_transaction_schema({}) -@given(condition_uri=regex( - r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=({})' - r'&cost=[0-9]+(?![\n])$'.format('|'.join( - t for t in SUPPORTED_CRYPTOCONDITION_TYPES)))) +@given( + condition_uri=regex( + r"^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=({})" + r"&cost=[0-9]+(?![\n])$".format("|".join(t for t in SUPPORTED_CRYPTOCONDITION_TYPES)) + ) +) def test_condition_uri_with_supported_fpt(dummy_transaction, condition_uri): - dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri + dummy_transaction["outputs"][0]["condition"]["uri"] = condition_uri validate_transaction_schema(dummy_transaction) -@given(condition_uri=regex(r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=' - r'({})&cost=[0-9]+(?![\n])$'.format( - '|'.join(UNSUPPORTED_CRYPTOCONDITION_TYPES)))) +@given( + condition_uri=regex( + r"^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=" + r"({})&cost=[0-9]+(?![\n])$".format("|".join(UNSUPPORTED_CRYPTOCONDITION_TYPES)) + ) +) def test_condition_uri_with_unsupported_fpt(dummy_transaction, condition_uri): - dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri + dummy_transaction["outputs"][0]["condition"]["uri"] = condition_uri with raises(SchemaValidationError): validate_transaction_schema(dummy_transaction) -@given(condition_uri=regex( - r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=(?!{})' - r'&cost=[0-9]+(?![\n])$'.format('$|'.join( - t for t in SUPPORTED_CRYPTOCONDITION_TYPES)))) +@given( + condition_uri=regex( + r"^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=(?!{})" + r"&cost=[0-9]+(?![\n])$".format("$|".join(t for t in SUPPORTED_CRYPTOCONDITION_TYPES)) + ) +) def test_condition_uri_with_unknown_fpt(dummy_transaction, condition_uri): - dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri + dummy_transaction["outputs"][0]["condition"]["uri"] = condition_uri with raises(SchemaValidationError): validate_transaction_schema(dummy_transaction) -@given(condition_uri=regex( - r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{0,86})\?fpt=threshold-sha-256' - r'&cost=[0-9]+&subtypes=ed25519-sha-256(?![\n])$')) -def test_condition_uri_with_supported_subtype(dummy_transaction, - condition_uri): - dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri +@given( + condition_uri=regex( + r"^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{0,86})\?fpt=threshold-sha-256" + r"&cost=[0-9]+&subtypes=ed25519-sha-256(?![\n])$" + ) +) +def test_condition_uri_with_supported_subtype(dummy_transaction, condition_uri): + dummy_transaction["outputs"][0]["condition"]["uri"] = condition_uri validate_transaction_schema(dummy_transaction) -@given(condition_uri=regex( - r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{0,86})\?fpt=threshold-sha-256&cost=' - r'[0-9]+&subtypes=(preimage-sha-256|prefix-sha-256|rsa-sha-256)(?![\n])$')) -def test_condition_uri_with_unsupported_subtype(dummy_transaction, - condition_uri): - dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri +@given( + condition_uri=regex( + r"^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{0,86})\?fpt=threshold-sha-256&cost=" + r"[0-9]+&subtypes=(preimage-sha-256|prefix-sha-256|rsa-sha-256)(?![\n])$" + ) +) +def test_condition_uri_with_unsupported_subtype(dummy_transaction, condition_uri): + dummy_transaction["outputs"][0]["condition"]["uri"] = condition_uri with raises(SchemaValidationError): validate_transaction_schema(dummy_transaction) -@given(condition_uri=regex( - r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=threshold-sha-256' - r'&cost=[0-9]+&subtypes=(?!{})(?![\n])$'.format('$|'.join( - t for t in SUPPORTED_CRYPTOCONDITION_TYPES)))) +@given( + condition_uri=regex( + r"^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=threshold-sha-256" + r"&cost=[0-9]+&subtypes=(?!{})(?![\n])$".format("$|".join(t for t in SUPPORTED_CRYPTOCONDITION_TYPES)) + ) +) def test_condition_uri_with_unknown_subtype(dummy_transaction, condition_uri): - dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri + dummy_transaction["outputs"][0]["condition"]["uri"] = condition_uri with raises(SchemaValidationError): validate_transaction_schema(dummy_transaction) diff --git a/tests/common/test_transaction.py b/tests/common/test_transaction.py index 691e475..f1c5fad 100644 --- a/tests/common/test_transaction.py +++ b/tests/common/test_transaction.py @@ -14,6 +14,7 @@ from cryptoconditions import Ed25519Sha256 from planetmint.transactions.types.assets.create import Create from planetmint.transactions.types.assets.transfer import Transfer from pytest import mark, raises + try: from hashlib import sha3_256 except ImportError: @@ -27,9 +28,9 @@ def test_input_serialization(ffill_uri, user_pub): from cryptoconditions import Fulfillment expected = { - 'owners_before': [user_pub], - 'fulfillment': ffill_uri, - 'fulfills': None, + "owners_before": [user_pub], + "fulfillment": ffill_uri, + "fulfills": None, } input = Input(Fulfillment.from_uri(ffill_uri), [user_pub]) assert input.to_dict() == expected @@ -41,23 +42,23 @@ def test_input_deserialization_with_uri(ffill_uri, user_pub): expected = Input(Fulfillment.from_uri(ffill_uri), [user_pub]) ffill = { - 'owners_before': [user_pub], - 'fulfillment': ffill_uri, - 'fulfills': None, + "owners_before": [user_pub], + "fulfillment": ffill_uri, + "fulfills": None, } input = Input.from_dict(ffill) assert input == expected -@mark.skip(reason='None is tolerated because it is None before fulfilling.') +@mark.skip(reason="None is tolerated because it is None before fulfilling.") def test_input_deserialization_with_invalid_input(user_pub): from planetmint.transactions.common.transaction import Input ffill = { - 'owners_before': [user_pub], - 'fulfillment': None, - 'fulfills': None, + "owners_before": [user_pub], + "fulfillment": None, + "fulfills": None, } with raises(TypeError): Input.from_dict(ffill) @@ -68,9 +69,9 @@ def test_input_deserialization_with_invalid_fulfillment_uri(user_pub): from planetmint.transactions.common.transaction import Input ffill = { - 'owners_before': [user_pub], - 'fulfillment': 'an invalid fulfillment', - 'fulfills': None, + "owners_before": [user_pub], + "fulfillment": "an invalid fulfillment", + "fulfills": None, } with raises(InvalidSignature): Input.from_dict(ffill) @@ -82,9 +83,9 @@ def test_input_deserialization_with_unsigned_fulfillment(ffill_uri, user_pub): expected = Input(Fulfillment.from_uri(ffill_uri), [user_pub]) ffill = { - 'owners_before': [user_pub], - 'fulfillment': Fulfillment.from_uri(ffill_uri), - 'fulfills': None, + "owners_before": [user_pub], + "fulfillment": Fulfillment.from_uri(ffill_uri), + "fulfills": None, } input = Input.from_dict(ffill) @@ -95,15 +96,15 @@ def test_output_serialization(user_Ed25519, user_pub): from planetmint.transactions.common.transaction import Output expected = { - 'condition': { - 'uri': user_Ed25519.condition_uri, - 'details': { - 'type': 'ed25519-sha-256', - 'public_key': b58encode(user_Ed25519.public_key).decode(), + "condition": { + "uri": user_Ed25519.condition_uri, + "details": { + "type": "ed25519-sha-256", + "public_key": b58encode(user_Ed25519.public_key).decode(), }, }, - 'public_keys': [user_pub], - 'amount': '1', + "public_keys": [user_pub], + "amount": "1", } cond = Output(user_Ed25519, [user_pub], 1) @@ -116,15 +117,15 @@ def test_output_deserialization(user_Ed25519, user_pub): expected = Output(user_Ed25519, [user_pub], 1) cond = { - 'condition': { - 'uri': user_Ed25519.condition_uri, - 'details': { - 'type': 'ed25519-sha-256', - 'public_key': b58encode(user_Ed25519.public_key).decode(), + "condition": { + "uri": user_Ed25519.condition_uri, + "details": { + "type": "ed25519-sha-256", + "public_key": b58encode(user_Ed25519.public_key).decode(), }, }, - 'public_keys': [user_pub], - 'amount': '1', + "public_keys": [user_pub], + "amount": "1", } cond = Output.from_dict(cond) @@ -135,15 +136,15 @@ def test_output_hashlock_serialization(): from planetmint.transactions.common.transaction import Output from cryptoconditions import PreimageSha256 - secret = b'wow much secret' + secret = b"wow much secret" hashlock = PreimageSha256(preimage=secret).condition_uri expected = { - 'condition': { - 'uri': hashlock, + "condition": { + "uri": hashlock, }, - 'public_keys': None, - 'amount': '1', + "public_keys": None, + "amount": "1", } cond = Output(hashlock, amount=1) @@ -154,16 +155,14 @@ def test_output_hashlock_deserialization(): from planetmint.transactions.common.transaction import Output from cryptoconditions import PreimageSha256 - secret = b'wow much secret' + secret = b"wow much secret" hashlock = PreimageSha256(preimage=secret).condition_uri expected = Output(hashlock, amount=1) cond = { - 'condition': { - 'uri': hashlock - }, - 'public_keys': None, - 'amount': '1', + "condition": {"uri": hashlock}, + "public_keys": None, + "amount": "1", } cond = Output.from_dict(cond) @@ -177,7 +176,7 @@ def test_invalid_output_initialization(cond_uri, user_pub): with raises(TypeError): Output(cond_uri, user_pub) with raises(TypeError): - Output(cond_uri, [user_pub], 'amount') + Output(cond_uri, [user_pub], "amount") with raises(AmountError): Output(cond_uri, [user_pub], 0) @@ -201,8 +200,7 @@ def test_generate_output_split_half_recursive(user_pub, user2_pub, user3_pub): assert cond.fulfillment.to_dict() == expected.to_dict() -def test_generate_outputs_split_half_single_owner(user_pub, - user2_pub, user3_pub): +def test_generate_outputs_split_half_single_owner(user_pub, user2_pub, user3_pub): from planetmint.transactions.common.transaction import Output from cryptoconditions import Ed25519Sha256, ThresholdSha256 @@ -265,7 +263,7 @@ def test_generate_output_invalid_parameters(user_pub, user2_pub, user3_pub): with raises(ValueError): Output.generate([], 1) with raises(TypeError): - Output.generate('not a list', 1) + Output.generate("not a list", 1) with raises(ValueError): Output.generate([[user_pub, [user2_pub, [user3_pub]]]], 1) with raises(ValueError): @@ -278,38 +276,23 @@ def test_invalid_transaction_initialization(asset_definition): from planetmint.transactions.common.transaction import Transaction with raises(ValueError): - Transaction(operation='invalid operation', asset=asset_definition) + Transaction(operation="invalid operation", asset=asset_definition) with raises(TypeError): - Transaction(operation='CREATE', asset='invalid asset') + Transaction(operation="CREATE", asset="invalid asset") with raises(TypeError): - Transaction(operation='TRANSFER', asset={}) + Transaction(operation="TRANSFER", asset={}) with raises(TypeError): - Transaction( - operation='CREATE', - asset=asset_definition, - outputs='invalid outputs' - ) + Transaction(operation="CREATE", asset=asset_definition, outputs="invalid outputs") with raises(TypeError): - Transaction( - operation='CREATE', - asset=asset_definition, - outputs=[], - inputs='invalid inputs' - ) + Transaction(operation="CREATE", asset=asset_definition, outputs=[], inputs="invalid inputs") with raises(TypeError): - Transaction( - operation='CREATE', - asset=asset_definition, - outputs=[], - inputs=[], - metadata='invalid metadata' - ) + Transaction(operation="CREATE", asset=asset_definition, outputs=[], inputs=[], metadata="invalid metadata") def test_create_default_asset_on_tx_initialization(asset_definition): from planetmint.transactions.common.transaction import Transaction - expected = {'data': None} + expected = {"data": None} tx = Transaction(Transaction.CREATE, asset=expected) asset = tx.asset @@ -320,21 +303,20 @@ def test_transaction_serialization(user_input, user_output, data): from planetmint.transactions.common.transaction import Transaction expected = { - 'id': None, - 'version': Transaction.VERSION, + "id": None, + "version": Transaction.VERSION, # NOTE: This test assumes that Inputs and Outputs can # successfully be serialized - 'inputs': [user_input.to_dict()], - 'outputs': [user_output.to_dict()], - 'operation': Transaction.CREATE, - 'metadata': None, - 'asset': { - 'data': data, - } + "inputs": [user_input.to_dict()], + "outputs": [user_output.to_dict()], + "operation": Transaction.CREATE, + "metadata": None, + "asset": { + "data": data, + }, } - tx = Transaction(Transaction.CREATE, {'data': data}, [user_input], - [user_output]) + tx = Transaction(Transaction.CREATE, {"data": data}, [user_input], [user_output]) tx_dict = tx.to_dict() assert tx_dict == expected @@ -343,6 +325,7 @@ def test_transaction_serialization(user_input, user_output, data): def test_transaction_deserialization(tri_state_transaction): from planetmint.transactions.common.transaction import Transaction from .utils import validate_transaction_model + tx = Transaction.from_dict(tri_state_transaction) validate_transaction_model(tx) @@ -353,16 +336,16 @@ def test_invalid_input_initialization(user_input, user_pub): with raises(TypeError): Input(user_input, user_pub) with raises(TypeError): - Input(user_input, tx_input='somethingthatiswrong') + Input(user_input, tx_input="somethingthatiswrong") def test_transaction_link_serialization(): from planetmint.transactions.common.transaction import TransactionLink - tx_id = 'a transaction id' + tx_id = "a transaction id" expected = { - 'transaction_id': tx_id, - 'output_index': 0, + "transaction_id": tx_id, + "output_index": 0, } tx_link = TransactionLink(tx_id, 0) @@ -381,11 +364,11 @@ def test_transaction_link_serialization_with_empty_payload(): def test_transaction_link_deserialization(): from planetmint.transactions.common.transaction import TransactionLink - tx_id = 'a transaction id' + tx_id = "a transaction id" expected = TransactionLink(tx_id, 0) tx_link = { - 'transaction_id': tx_id, - 'output_index': 0, + "transaction_id": tx_id, + "output_index": 0, } tx_link = TransactionLink.from_dict(tx_link) @@ -413,8 +396,8 @@ def test_transaction_link_empty_to_uri(): def test_transaction_link_to_uri(): from planetmint.transactions.common.transaction import TransactionLink - expected = 'path/transactions/abc/outputs/0' - tx_link = TransactionLink('abc', 0).to_uri('path') + expected = "path/transactions/abc/outputs/0" + tx_link = TransactionLink("abc", 0).to_uri("path") assert expected == tx_link @@ -423,9 +406,9 @@ def test_cast_transaction_link_to_boolean(): from planetmint.transactions.common.transaction import TransactionLink assert bool(TransactionLink()) is False - assert bool(TransactionLink('a', None)) is False - assert bool(TransactionLink(None, 'b')) is False - assert bool(TransactionLink('a', 'b')) is True + assert bool(TransactionLink("a", None)) is False + assert bool(TransactionLink(None, "b")) is False + assert bool(TransactionLink("a", "b")) is True assert bool(TransactionLink(False, False)) is True @@ -452,10 +435,11 @@ def test_add_input_to_tx(user_input, asset_definition): def test_add_input_to_tx_with_invalid_parameters(asset_definition): from planetmint.transactions.common.transaction import Transaction + tx = Transaction(Transaction.CREATE, asset_definition) with raises(TypeError): - tx.add_input('somewronginput') + tx.add_input("somewronginput") def test_add_output_to_tx(user_output, user_input, asset_definition): @@ -472,10 +456,11 @@ def test_add_output_to_tx(user_output, user_input, asset_definition): def test_add_output_to_tx_with_invalid_parameters(asset_definition): from planetmint.transactions.common.transaction import Transaction + tx = Transaction(Transaction.CREATE, asset_definition, [], []) with raises(TypeError): - tx.add_output('somewronginput') + tx.add_output("somewronginput") def test_sign_with_invalid_parameters(utx, user_priv): @@ -485,52 +470,41 @@ def test_sign_with_invalid_parameters(utx, user_priv): utx.sign(user_priv) -def test_validate_tx_simple_create_signature(user_input, user_output, user_priv, - asset_definition): +def test_validate_tx_simple_create_signature(user_input, user_output, user_priv, asset_definition): from planetmint.transactions.common.transaction import Transaction from .utils import validate_transaction_model tx = Transaction(Transaction.CREATE, asset_definition, [user_input], [user_output]) expected = deepcopy(user_output) tx_dict = tx.to_dict() - tx_dict['inputs'][0]['fulfillment'] = None - serialized_tx = json.dumps(tx_dict, sort_keys=True, - separators=(',', ':'), ensure_ascii=True) + tx_dict["inputs"][0]["fulfillment"] = None + serialized_tx = json.dumps(tx_dict, sort_keys=True, separators=(",", ":"), ensure_ascii=True) message = sha3_256(serialized_tx.encode()).digest() expected.fulfillment.sign(message, b58decode(user_priv)) tx.sign([user_priv]) - assert tx.inputs[0].to_dict()['fulfillment'] == \ - expected.fulfillment.serialize_uri() + assert tx.inputs[0].to_dict()["fulfillment"] == expected.fulfillment.serialize_uri() assert tx.inputs_valid() is True validate_transaction_model(tx) -def test_invoke_simple_signature_fulfillment_with_invalid_params(utx, - user_input): +def test_invoke_simple_signature_fulfillment_with_invalid_params(utx, user_input): from planetmint.transactions.common.exceptions import KeypairMismatchException with raises(KeypairMismatchException): - invalid_key_pair = {'wrong_pub_key': 'wrong_priv_key'} - utx._sign_simple_signature_fulfillment(user_input, - 'somemessage', - invalid_key_pair) + invalid_key_pair = {"wrong_pub_key": "wrong_priv_key"} + utx._sign_simple_signature_fulfillment(user_input, "somemessage", invalid_key_pair) -def test_sign_threshold_with_invalid_params(utx, user_user2_threshold_input, - user3_pub, user3_priv): +def test_sign_threshold_with_invalid_params(utx, user_user2_threshold_input, user3_pub, user3_priv): from planetmint.transactions.common.exceptions import KeypairMismatchException with raises(KeypairMismatchException): - utx._sign_threshold_signature_fulfillment(user_user2_threshold_input, - 'somemessage', - {user3_pub: user3_priv}) + utx._sign_threshold_signature_fulfillment(user_user2_threshold_input, "somemessage", {user3_pub: user3_priv}) with raises(KeypairMismatchException): - user_user2_threshold_input.owners_before = [58 * 'a'] - utx._sign_threshold_signature_fulfillment(user_user2_threshold_input, - 'somemessage', - None) + user_user2_threshold_input.owners_before = [58 * "a"] + utx._sign_threshold_signature_fulfillment(user_user2_threshold_input, "somemessage", None) def test_validate_input_with_invalid_parameters(utx): @@ -543,103 +517,86 @@ def test_validate_input_with_invalid_parameters(utx): assert not valid -def test_validate_tx_threshold_create_signature(user_user2_threshold_input, - user_user2_threshold_output, - user_pub, - user2_pub, - user_priv, - user2_priv, - asset_definition): +def test_validate_tx_threshold_create_signature( + user_user2_threshold_input, + user_user2_threshold_output, + user_pub, + user2_pub, + user_priv, + user2_priv, + asset_definition, +): from planetmint.transactions.common.transaction import Transaction from .utils import validate_transaction_model - tx = Transaction(Transaction.CREATE, asset_definition, - [user_user2_threshold_input], - [user_user2_threshold_output]) + tx = Transaction(Transaction.CREATE, asset_definition, [user_user2_threshold_input], [user_user2_threshold_output]) tx_dict = tx.to_dict() - tx_dict['inputs'][0]['fulfillment'] = None - serialized_tx = json.dumps(tx_dict, sort_keys=True, - separators=(',', ':'), ensure_ascii=True) + tx_dict["inputs"][0]["fulfillment"] = None + serialized_tx = json.dumps(tx_dict, sort_keys=True, separators=(",", ":"), ensure_ascii=True) message = sha3_256(serialized_tx.encode()).digest() expected = deepcopy(user_user2_threshold_output) - expected.fulfillment.subconditions[0]['body'].sign( - message, b58decode(user_priv)) - expected.fulfillment.subconditions[1]['body'].sign( - message, b58decode(user2_priv)) + expected.fulfillment.subconditions[0]["body"].sign(message, b58decode(user_priv)) + expected.fulfillment.subconditions[1]["body"].sign(message, b58decode(user2_priv)) tx.sign([user_priv, user2_priv]) - assert tx.inputs[0].to_dict()['fulfillment'] == \ - expected.fulfillment.serialize_uri() + assert tx.inputs[0].to_dict()["fulfillment"] == expected.fulfillment.serialize_uri() assert tx.inputs_valid() is True validate_transaction_model(tx) -def test_validate_tx_threshold_duplicated_pk(user_pub, user_priv, - asset_definition): +def test_validate_tx_threshold_duplicated_pk(user_pub, user_priv, asset_definition): from cryptoconditions import Ed25519Sha256, ThresholdSha256 from planetmint.transactions.common.transaction import Input, Output, Transaction threshold = ThresholdSha256(threshold=2) - threshold.add_subfulfillment( - Ed25519Sha256(public_key=b58decode(user_pub))) - threshold.add_subfulfillment( - Ed25519Sha256(public_key=b58decode(user_pub))) + threshold.add_subfulfillment(Ed25519Sha256(public_key=b58decode(user_pub))) + threshold.add_subfulfillment(Ed25519Sha256(public_key=b58decode(user_pub))) threshold_input = Input(threshold, [user_pub, user_pub]) threshold_output = Output(threshold, [user_pub, user_pub]) - tx = Transaction(Transaction.CREATE, asset_definition, - [threshold_input], [threshold_output]) + tx = Transaction(Transaction.CREATE, asset_definition, [threshold_input], [threshold_output]) tx_dict = tx.to_dict() - tx_dict['inputs'][0]['fulfillment'] = None - serialized_tx = json.dumps(tx_dict, sort_keys=True, - separators=(',', ':'), ensure_ascii=True) + tx_dict["inputs"][0]["fulfillment"] = None + serialized_tx = json.dumps(tx_dict, sort_keys=True, separators=(",", ":"), ensure_ascii=True) message = sha3_256(serialized_tx.encode()).digest() expected = deepcopy(threshold_input) - expected.fulfillment.subconditions[0]['body'].sign( - message, b58decode(user_priv)) - expected.fulfillment.subconditions[1]['body'].sign( - message, b58decode(user_priv)) + expected.fulfillment.subconditions[0]["body"].sign(message, b58decode(user_priv)) + expected.fulfillment.subconditions[1]["body"].sign(message, b58decode(user_priv)) tx.sign([user_priv, user_priv]) subconditions = tx.inputs[0].fulfillment.subconditions expected_subconditions = expected.fulfillment.subconditions - assert subconditions[0]['body'].to_dict()['signature'] == \ - expected_subconditions[0]['body'].to_dict()['signature'] - assert subconditions[1]['body'].to_dict()['signature'] == \ - expected_subconditions[1]['body'].to_dict()['signature'] + assert subconditions[0]["body"].to_dict()["signature"] == expected_subconditions[0]["body"].to_dict()["signature"] + assert subconditions[1]["body"].to_dict()["signature"] == expected_subconditions[1]["body"].to_dict()["signature"] - assert tx.inputs[0].to_dict()['fulfillment'] == \ - expected.fulfillment.serialize_uri() + assert tx.inputs[0].to_dict()["fulfillment"] == expected.fulfillment.serialize_uri() assert tx.inputs_valid() is True -def test_multiple_input_validation_of_transfer_tx(user_input, user_output, - user_priv, user2_pub, - user2_priv, user3_pub, - user3_priv, - asset_definition): - from planetmint.transactions.common.transaction import ( - Transaction, TransactionLink, Input, Output) +def test_multiple_input_validation_of_transfer_tx( + user_input, user_output, user_priv, user2_pub, user2_priv, user3_pub, user3_priv, asset_definition +): + from planetmint.transactions.common.transaction import Transaction, TransactionLink, Input, Output from cryptoconditions import Ed25519Sha256 from .utils import validate_transaction_model - tx = Transaction(Transaction.CREATE, asset_definition, [user_input], - [user_output, deepcopy(user_output)]) + tx = Transaction(Transaction.CREATE, asset_definition, [user_input], [user_output, deepcopy(user_output)]) tx.sign([user_priv]) - inputs = [Input(cond.fulfillment, cond.public_keys, - TransactionLink(tx.id, index)) - for index, cond in enumerate(tx.outputs)] - outputs = [Output(Ed25519Sha256(public_key=b58decode(user3_pub)), - [user3_pub]), - Output(Ed25519Sha256(public_key=b58decode(user3_pub)), - [user3_pub])] - transfer_tx = Transaction('TRANSFER', {'id': tx.id}, inputs, outputs) + inputs = [ + Input(cond.fulfillment, cond.public_keys, TransactionLink(tx.id, index)) + for index, cond in enumerate(tx.outputs) + ] + outputs = [ + Output(Ed25519Sha256(public_key=b58decode(user3_pub)), [user3_pub]), + Output(Ed25519Sha256(public_key=b58decode(user3_pub)), [user3_pub]), + ] + transfer_tx = Transaction("TRANSFER", {"id": tx.id}, inputs, outputs) transfer_tx = transfer_tx.sign([user_priv]) assert transfer_tx.inputs_valid(tx.outputs) is True @@ -648,20 +605,21 @@ def test_multiple_input_validation_of_transfer_tx(user_input, user_output, def test_validate_inputs_of_transfer_tx_with_invalid_params( - transfer_tx, cond_uri, utx, user2_pub, user_priv, ffill_uri): + transfer_tx, cond_uri, utx, user2_pub, user_priv, ffill_uri +): from planetmint.transactions.common.transaction import Output from cryptoconditions import Ed25519Sha256 - invalid_out = Output(Ed25519Sha256.from_uri(ffill_uri), ['invalid']) + invalid_out = Output(Ed25519Sha256.from_uri(ffill_uri), ["invalid"]) assert transfer_tx.inputs_valid([invalid_out]) is False invalid_out = utx.outputs[0] - invalid_out.public_key = 'invalid' + invalid_out.public_key = "invalid" assert transfer_tx.inputs_valid([invalid_out]) is True with raises(TypeError): assert transfer_tx.inputs_valid(None) is False with raises(AttributeError): - transfer_tx.inputs_valid('not a list') + transfer_tx.inputs_valid("not a list") with raises(ValueError): transfer_tx.inputs_valid([]) with raises(TypeError): @@ -674,45 +632,34 @@ def test_create_create_transaction_single_io(user_output, user_pub, data): from .utils import validate_transaction_model expected = { - 'outputs': [user_output.to_dict()], - 'metadata': data, - 'asset': { - 'data': data, + "outputs": [user_output.to_dict()], + "metadata": data, + "asset": { + "data": data, }, - 'inputs': [ - { - 'owners_before': [ - user_pub - ], - 'fulfillment': None, - 'fulfills': None - } - ], - 'operation': 'CREATE', - 'version': Transaction.VERSION, + "inputs": [{"owners_before": [user_pub], "fulfillment": None, "fulfills": None}], + "operation": "CREATE", + "version": Transaction.VERSION, } - tx = Create.generate([user_pub], [([user_pub], 1)], metadata=data, - asset=data) + tx = Create.generate([user_pub], [([user_pub], 1)], metadata=data, asset=data) tx_dict = tx.to_dict() - tx_dict['inputs'][0]['fulfillment'] = None - tx_dict.pop('id') + tx_dict["inputs"][0]["fulfillment"] = None + tx_dict.pop("id") assert tx_dict == expected validate_transaction_model(tx) -def test_validate_single_io_create_transaction(user_pub, user_priv, data, - asset_definition): +def test_validate_single_io_create_transaction(user_pub, user_priv, data, asset_definition): tx = Create.generate([user_pub], [([user_pub], 1)], metadata=data) tx = tx.sign([user_priv]) assert tx.inputs_valid() is True -def test_create_create_transaction_multiple_io(user_output, user2_output, user_pub, - user2_pub, asset_definition): +def test_create_create_transaction_multiple_io(user_output, user2_output, user_pub, user2_pub, asset_definition): from planetmint.transactions.common.transaction import Transaction, Input # a fulfillment for a create transaction with multiple `owners_before` @@ -720,75 +667,66 @@ def test_create_create_transaction_multiple_io(user_output, user2_output, user_p # weight = len(owners_before) input = Input.generate([user_pub, user2_pub]).to_dict() expected = { - 'outputs': [user_output.to_dict(), user2_output.to_dict()], - 'metadata': { - 'message': 'hello' - }, - 'inputs': [input], - 'operation': 'CREATE', - 'version': Transaction.VERSION + "outputs": [user_output.to_dict(), user2_output.to_dict()], + "metadata": {"message": "hello"}, + "inputs": [input], + "operation": "CREATE", + "version": Transaction.VERSION, } - tx = Create.generate([user_pub, user2_pub], - [([user_pub], 1), ([user2_pub], 1)], - metadata={'message': 'hello'}).to_dict() - tx.pop('id') - tx.pop('asset') + tx = Create.generate( + [user_pub, user2_pub], [([user_pub], 1), ([user2_pub], 1)], metadata={"message": "hello"} + ).to_dict() + tx.pop("id") + tx.pop("asset") assert tx == expected -def test_validate_multiple_io_create_transaction(user_pub, user_priv, - user2_pub, user2_priv, - asset_definition): +def test_validate_multiple_io_create_transaction(user_pub, user_priv, user2_pub, user2_priv, asset_definition): from .utils import validate_transaction_model - tx = Create.generate([user_pub, user2_pub], - [([user_pub], 1), ([user2_pub], 1)], - metadata={'message': 'hello'}) + tx = Create.generate([user_pub, user2_pub], [([user_pub], 1), ([user2_pub], 1)], metadata={"message": "hello"}) tx = tx.sign([user_priv, user2_priv]) assert tx.inputs_valid() is True validate_transaction_model(tx) -def test_create_create_transaction_threshold(user_pub, user2_pub, user3_pub, - user_user2_threshold_output, - user_user2_threshold_input, data): +def test_create_create_transaction_threshold( + user_pub, user2_pub, user3_pub, user_user2_threshold_output, user_user2_threshold_input, data +): from planetmint.transactions.common.transaction import Transaction expected = { - 'outputs': [user_user2_threshold_output.to_dict()], - 'metadata': data, - 'asset': { - 'data': data, + "outputs": [user_user2_threshold_output.to_dict()], + "metadata": data, + "asset": { + "data": data, }, - 'inputs': [ + "inputs": [ { - 'owners_before': [ + "owners_before": [ user_pub, ], - 'fulfillment': None, - 'fulfills': None, + "fulfillment": None, + "fulfills": None, }, ], - 'operation': 'CREATE', - 'version': Transaction.VERSION + "operation": "CREATE", + "version": Transaction.VERSION, } - tx = Create.generate([user_pub], [([user_pub, user2_pub], 1)], - metadata=data, asset=data) + tx = Create.generate([user_pub], [([user_pub, user2_pub], 1)], metadata=data, asset=data) tx_dict = tx.to_dict() - tx_dict.pop('id') - tx_dict['inputs'][0]['fulfillment'] = None + tx_dict.pop("id") + tx_dict["inputs"][0]["fulfillment"] = None assert tx_dict == expected -def test_validate_threshold_create_transaction(user_pub, user_priv, user2_pub, - data, asset_definition): +def test_validate_threshold_create_transaction(user_pub, user_priv, user2_pub, data, asset_definition): from .utils import validate_transaction_model - tx = Create.generate([user_pub], [([user_pub, user2_pub], 1)], - metadata=data) + tx = Create.generate([user_pub], [([user_pub, user2_pub], 1)], metadata=data) tx = tx.sign([user_priv]) assert tx.inputs_valid() is True @@ -797,9 +735,9 @@ def test_validate_threshold_create_transaction(user_pub, user_priv, user2_pub, def test_create_create_transaction_with_invalid_parameters(user_pub): with raises(TypeError): - Create.generate('not a list') + Create.generate("not a list") with raises(TypeError): - Create.generate([], 'not a list') + Create.generate([], "not a list") with raises(ValueError): Create.generate([], [user_pub]) with raises(ValueError): @@ -809,12 +747,9 @@ def test_create_create_transaction_with_invalid_parameters(user_pub): with raises(ValueError): Create.generate([user_pub], [([user_pub],)]) with raises(TypeError): - Create.generate([user_pub], [([user_pub], 1)], - metadata='not a dict or none') + Create.generate([user_pub], [([user_pub], 1)], metadata="not a dict or none") with raises(TypeError): - Create.generate([user_pub], - [([user_pub], 1)], - asset='not a dict or none') + Create.generate([user_pub], [([user_pub], 1)], asset="not a dict or none") def test_outputs_to_inputs(tx): @@ -827,50 +762,44 @@ def test_outputs_to_inputs(tx): assert input.fulfills.output == 0 -def test_create_transfer_transaction_single_io(tx, user_pub, user2_pub, - user2_output, user_priv): +def test_create_transfer_transaction_single_io(tx, user_pub, user2_pub, user2_output, user_priv): from planetmint.transactions.common.transaction import Transaction from .utils import validate_transaction_model expected = { - 'id': None, - 'outputs': [user2_output.to_dict()], - 'metadata': None, - 'asset': { - 'id': tx.id, + "id": None, + "outputs": [user2_output.to_dict()], + "metadata": None, + "asset": { + "id": tx.id, }, - 'inputs': [ + "inputs": [ { - 'owners_before': [ - user_pub - ], - 'fulfillment': None, - 'fulfills': { - 'transaction_id': tx.id, - 'output_index': 0 - } + "owners_before": [user_pub], + "fulfillment": None, + "fulfills": {"transaction_id": tx.id, "output_index": 0}, } ], - 'operation': 'TRANSFER', - 'version': Transaction.VERSION + "operation": "TRANSFER", + "version": Transaction.VERSION, } inputs = tx.to_inputs([0]) - transfer_tx = Transfer.generate(inputs, [([user2_pub], 1)], - asset_id=tx.id) + transfer_tx = Transfer.generate(inputs, [([user2_pub], 1)], asset_id=tx.id) transfer_tx = transfer_tx.sign([user_priv]) transfer_tx = transfer_tx.to_dict() expected_input = deepcopy(inputs[0]) - json_serialized_tx = json.dumps(expected, sort_keys=True, - separators=(',', ':'), ensure_ascii=True) + json_serialized_tx = json.dumps(expected, sort_keys=True, separators=(",", ":"), ensure_ascii=True) message = sha3_256(json_serialized_tx.encode()) - message.update('{}{}'.format( - expected['inputs'][0]['fulfills']['transaction_id'], - expected['inputs'][0]['fulfills']['output_index'], - ).encode()) + message.update( + "{}{}".format( + expected["inputs"][0]["fulfills"]["transaction_id"], + expected["inputs"][0]["fulfills"]["output_index"], + ).encode() + ) expected_input.fulfillment.sign(message.digest(), b58decode(user_priv)) expected_ffill = expected_input.fulfillment.serialize_uri() - transfer_ffill = transfer_tx['inputs'][0]['fulfillment'] + transfer_ffill = transfer_tx["inputs"][0]["fulfillment"] assert transfer_ffill == expected_ffill @@ -880,47 +809,34 @@ def test_create_transfer_transaction_single_io(tx, user_pub, user2_pub, validate_transaction_model(transfer_tx) -def test_create_transfer_transaction_multiple_io(user_pub, user_priv, - user2_pub, user2_priv, - user3_pub, user2_output, - asset_definition): +def test_create_transfer_transaction_multiple_io( + user_pub, user_priv, user2_pub, user2_priv, user3_pub, user2_output, asset_definition +): from planetmint.transactions.common.transaction import Transaction - tx = Create.generate([user_pub], [([user_pub], 1), ([user2_pub], 1)], - metadata={'message': 'hello'}) + tx = Create.generate([user_pub], [([user_pub], 1), ([user2_pub], 1)], metadata={"message": "hello"}) tx = tx.sign([user_priv]) expected = { - 'outputs': [user2_output.to_dict(), user2_output.to_dict()], - 'metadata': None, - 'inputs': [ + "outputs": [user2_output.to_dict(), user2_output.to_dict()], + "metadata": None, + "inputs": [ { - 'owners_before': [ - user_pub - ], - 'fulfillment': None, - 'fulfills': { - 'transaction_id': tx.id, - 'output_index': 0 - } - }, { - 'owners_before': [ - user2_pub - ], - 'fulfillment': None, - 'fulfills': { - 'transaction_id': tx.id, - 'output_index': 1 - } - } + "owners_before": [user_pub], + "fulfillment": None, + "fulfills": {"transaction_id": tx.id, "output_index": 0}, + }, + { + "owners_before": [user2_pub], + "fulfillment": None, + "fulfills": {"transaction_id": tx.id, "output_index": 1}, + }, ], - 'operation': 'TRANSFER', - 'version': Transaction.VERSION + "operation": "TRANSFER", + "version": Transaction.VERSION, } - transfer_tx = Transfer.generate(tx.to_inputs(), - [([user2_pub], 1), ([user2_pub], 1)], - asset_id=tx.id) + transfer_tx = Transfer.generate(tx.to_inputs(), [([user2_pub], 1), ([user2_pub], 1)], asset_id=tx.id) transfer_tx = transfer_tx.sign([user_priv, user2_priv]) assert len(transfer_tx.inputs) == 2 @@ -929,10 +845,10 @@ def test_create_transfer_transaction_multiple_io(user_pub, user_priv, assert transfer_tx.inputs_valid(tx.outputs) is True transfer_tx = transfer_tx.to_dict() - transfer_tx['inputs'][0]['fulfillment'] = None - transfer_tx['inputs'][1]['fulfillment'] = None - transfer_tx.pop('asset') - transfer_tx.pop('id') + transfer_tx["inputs"][0]["fulfillment"] = None + transfer_tx["inputs"][1]["fulfillment"] = None + transfer_tx.pop("asset") + transfer_tx.pop("id") assert expected == transfer_tx @@ -943,23 +859,22 @@ def test_create_transfer_with_invalid_parameters(tx, user_pub): with raises(ValueError): Transfer.generate([], [], tx.id) with raises(TypeError): - Transfer.generate(['fulfillment'], {}, tx.id) + Transfer.generate(["fulfillment"], {}, tx.id) with raises(ValueError): - Transfer.generate(['fulfillment'], [], tx.id) + Transfer.generate(["fulfillment"], [], tx.id) with raises(ValueError): - Transfer.generate(['fulfillment'], [user_pub], tx.id) + Transfer.generate(["fulfillment"], [user_pub], tx.id) with raises(ValueError): - Transfer.generate(['fulfillment'], [([user_pub],)], tx.id) + Transfer.generate(["fulfillment"], [([user_pub],)], tx.id) with raises(TypeError): - Transfer.generate(['fulfillment'], [([user_pub], 1)], - tx.id, metadata='not a dict or none') + Transfer.generate(["fulfillment"], [([user_pub], 1)], tx.id, metadata="not a dict or none") with raises(TypeError): - Transfer.generate(['fulfillment'], [([user_pub], 1)], - ['not a string']) + Transfer.generate(["fulfillment"], [([user_pub], 1)], ["not a string"]) def test_cant_add_empty_output(): from planetmint.transactions.common.transaction import Transaction + tx = Transaction(Transaction.CREATE, None) with raises(TypeError): @@ -968,6 +883,7 @@ def test_cant_add_empty_output(): def test_cant_add_empty_input(): from planetmint.transactions.common.transaction import Transaction + tx = Transaction(Transaction.CREATE, None) with raises(TypeError): @@ -976,27 +892,27 @@ def test_cant_add_empty_input(): def test_unfulfilled_transaction_serialized(unfulfilled_transaction): from planetmint.transactions.common.transaction import Transaction + tx_obj = Transaction.from_dict(unfulfilled_transaction) - expected = json.dumps(unfulfilled_transaction, sort_keys=True, - separators=(',', ':'), ensure_ascii=True) + expected = json.dumps(unfulfilled_transaction, sort_keys=True, separators=(",", ":"), ensure_ascii=True) assert tx_obj.serialized == expected def test_fulfilled_transaction_serialized(fulfilled_transaction): from planetmint.transactions.common.transaction import Transaction + tx_obj = Transaction.from_dict(fulfilled_transaction) - expected = json.dumps(fulfilled_transaction, sort_keys=True, - separators=(',', ':'), ensure_ascii=True) + expected = json.dumps(fulfilled_transaction, sort_keys=True, separators=(",", ":"), ensure_ascii=True) assert tx_obj.serialized == expected def test_transaction_hash(fulfilled_transaction): from planetmint.transactions.common.transaction import Transaction + tx_obj = Transaction.from_dict(fulfilled_transaction) assert tx_obj._id is None assert tx_obj.id is None - thing_to_hash = json.dumps(fulfilled_transaction, sort_keys=True, - separators=(',', ':'), ensure_ascii=True) + thing_to_hash = json.dumps(fulfilled_transaction, sort_keys=True, separators=(",", ":"), ensure_ascii=True) expected_hash_id = sha3_256(thing_to_hash.encode()).hexdigest() tx_obj._hash() assert tx_obj._id == expected_hash_id @@ -1008,7 +924,7 @@ def test_output_from_dict_invalid_amount(user_output): from planetmint.transactions.common.exceptions import AmountError out = user_output.to_dict() - out['amount'] = 'a' + out["amount"] = "a" with raises(AmountError): Output.from_dict(out) @@ -1016,29 +932,23 @@ def test_output_from_dict_invalid_amount(user_output): def test_unspent_outputs_property(merlin, alice, bob, carol): tx = Create.generate( [merlin.public_key], - [([alice.public_key], 1), - ([bob.public_key], 2), - ([carol.public_key], 3)], - asset={'hash': '06e47bcf9084f7ecfd2a2a2ad275444a'}, + [([alice.public_key], 1), ([bob.public_key], 2), ([carol.public_key], 3)], + asset={"hash": "06e47bcf9084f7ecfd2a2a2ad275444a"}, ).sign([merlin.private_key]) unspent_outputs = list(tx.unspent_outputs) assert len(unspent_outputs) == 3 assert all(utxo.transaction_id == tx.id for utxo in unspent_outputs) assert all(utxo.asset_id == tx.id for utxo in unspent_outputs) - assert all( - utxo.output_index == i for i, utxo in enumerate(unspent_outputs)) + assert all(utxo.output_index == i for i, utxo in enumerate(unspent_outputs)) unspent_output_0 = unspent_outputs[0] assert unspent_output_0.amount == 1 - assert unspent_output_0.condition_uri == Ed25519Sha256( - public_key=b58decode(alice.public_key)).condition_uri + assert unspent_output_0.condition_uri == Ed25519Sha256(public_key=b58decode(alice.public_key)).condition_uri unspent_output_1 = unspent_outputs[1] assert unspent_output_1.amount == 2 - assert unspent_output_1.condition_uri == Ed25519Sha256( - public_key=b58decode(bob.public_key)).condition_uri + assert unspent_output_1.condition_uri == Ed25519Sha256(public_key=b58decode(bob.public_key)).condition_uri unspent_output_2 = unspent_outputs[2] assert unspent_output_2.amount == 3 - assert unspent_output_2.condition_uri == Ed25519Sha256( - public_key=b58decode(carol.public_key)).condition_uri + assert unspent_output_2.condition_uri == Ed25519Sha256(public_key=b58decode(carol.public_key)).condition_uri def test_spent_outputs_property(signed_transfer_tx): @@ -1046,6 +956,6 @@ def test_spent_outputs_property(signed_transfer_tx): tx = signed_transfer_tx.to_dict() assert len(spent_outputs) == 1 spent_output = spent_outputs[0] - assert spent_output['transaction_id'] == tx['inputs'][0]['fulfills']['transaction_id'] - assert spent_output['output_index'] == tx['inputs'][0]['fulfills']['output_index'] + assert spent_output["transaction_id"] == tx["inputs"][0]["fulfills"]["transaction_id"] + assert spent_output["output_index"] == tx["inputs"][0]["fulfills"]["output_index"] # assert spent_output._asdict() == tx['inputs'][0]['fulfills'] diff --git a/tests/conftest.py b/tests/conftest.py index 3fc445d..365daa6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -22,6 +22,7 @@ from planetmint.backend.connection import connect from planetmint.backend.tarantool.connection import TarantoolDBConnection import pytest + # from pymongo import MongoClient from planetmint import ValidatorElection @@ -29,8 +30,7 @@ from planetmint.transactions.common import crypto from planetmint.transactions.common.transaction_mode_types import BROADCAST_TX_COMMIT from planetmint.tendermint_utils import key_from_base64 from planetmint.backend import schema, query -from planetmint.transactions.common.crypto import ( - key_pair_from_ed25519_key, public_key_from_ed25519_key) +from planetmint.transactions.common.crypto import key_pair_from_ed25519_key, public_key_from_ed25519_key from planetmint.transactions.common.exceptions import DatabaseDoesNotExist from planetmint.lib import Block from tests.utils import gen_vote @@ -40,106 +40,102 @@ from planetmint.upsert_validator import ValidatorElection # noqa from tendermint.abci import types_pb2 as types from tendermint.crypto import keys_pb2 -TEST_DB_NAME = 'planetmint_test' +TEST_DB_NAME = "planetmint_test" USER2_SK, USER2_PK = crypto.generate_key_pair() # Test user. inputs will be created for this user. Cryptography Keys -USER_PRIVATE_KEY = '8eJ8q9ZQpReWyQT5aFCiwtZ5wDZC4eDnCen88p3tQ6ie' -USER_PUBLIC_KEY = 'JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE' +USER_PRIVATE_KEY = "8eJ8q9ZQpReWyQT5aFCiwtZ5wDZC4eDnCen88p3tQ6ie" +USER_PUBLIC_KEY = "JEAkEJqLbbgDRAtMm8YAjGp759Aq2qTn9eaEHUj2XePE" @pytest.fixture def init_chain_request(): - pk = codecs.decode(b'VAgFZtYw8bNR5TMZHFOBDWk9cAmEu3/c6JgRBmddbbI=', - 'base64') - val_a = types.ValidatorUpdate(power=10, - pub_key=keys_pb2.PublicKey(ed25519=pk)) + pk = codecs.decode(b"VAgFZtYw8bNR5TMZHFOBDWk9cAmEu3/c6JgRBmddbbI=", "base64") + val_a = types.ValidatorUpdate(power=10, pub_key=keys_pb2.PublicKey(ed25519=pk)) return types.RequestInitChain(validators=[val_a]) def pytest_addoption(parser): from planetmint.backend.connection import BACKENDS - backends = ', '.join(BACKENDS.keys()) + backends = ", ".join(BACKENDS.keys()) parser.addoption( - '--database-backend', - action='store', - default=os.environ.get('PLANETMINT_DATABASE_BACKEND', 'tarantool_db'), - help='Defines the backend to use (available: {})'.format(backends), + "--database-backend", + action="store", + default=os.environ.get("PLANETMINT_DATABASE_BACKEND", "tarantool_db"), + help="Defines the backend to use (available: {})".format(backends), ) def pytest_configure(config): config.addinivalue_line( - 'markers', - 'bdb(): Mark the test as needing Planetmint.' - 'Planetmint will be configured such that the database and tables are available for an ' - 'entire test session.' - 'You need to run a backend (e.g. MongoDB) ' - 'prior to running tests with this marker. You should not need to restart the backend ' - 'in between tests runs since the test infrastructure flushes the backend upon session end.' + "markers", + "bdb(): Mark the test as needing Planetmint." + "Planetmint will be configured such that the database and tables are available for an " + "entire test session." + "You need to run a backend (e.g. MongoDB) " + "prior to running tests with this marker. You should not need to restart the backend " + "in between tests runs since the test infrastructure flushes the backend upon session end.", ) config.addinivalue_line( - 'markers', - 'abci(): Mark the test as needing a running ABCI server in place. Use this marker' - 'for tests that require a running Tendermint instance. Note that the test infrastructure' - 'has no way to reset Tendermint data upon session end - you need to do it manually.' - 'Setup performed by this marker includes the steps performed by the bdb marker.' + "markers", + "abci(): Mark the test as needing a running ABCI server in place. Use this marker" + "for tests that require a running Tendermint instance. Note that the test infrastructure" + "has no way to reset Tendermint data upon session end - you need to do it manually." + "Setup performed by this marker includes the steps performed by the bdb marker.", ) @pytest.fixture(autouse=True) def _bdb_marker(request): - if request.keywords.get('bdb', None): - request.getfixturevalue('_bdb') + if request.keywords.get("bdb", None): + request.getfixturevalue("_bdb") @pytest.fixture(autouse=True) def _restore_config(_configure_planetmint): - config_before_test = Config().init_config('tarantool_db') # noqa + config_before_test = Config().init_config("tarantool_db") # noqa -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def _configure_planetmint(request): from planetmint import config_utils + test_db_name = TEST_DB_NAME # Put a suffix like _gw0, _gw1 etc on xdist processes - xdist_suffix = getattr(request.config, 'slaveinput', {}).get('slaveid') + xdist_suffix = getattr(request.config, "slaveinput", {}).get("slaveid") if xdist_suffix: - test_db_name = '{}_{}'.format(TEST_DB_NAME, xdist_suffix) + test_db_name = "{}_{}".format(TEST_DB_NAME, xdist_suffix) # backend = request.config.getoption('--database-backend') backend = "tarantool_db" - config = { - 'database': Config().get_db_map(backend), - 'tendermint': Config()._private_real_config["tendermint"] - } - config['database']['name'] = test_db_name + config = {"database": Config().get_db_map(backend), "tendermint": Config()._private_real_config["tendermint"]} + config["database"]["name"] = test_db_name config = config_utils.env_config(config) config_utils.set_config(config) -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def _setup_database(_configure_planetmint): # TODO Here is located setup database from planetmint.config import Config - print('Initializing test db') - dbname = Config().get()['database']['name'] + print("Initializing test db") + dbname = Config().get()["database"]["name"] conn = connect() _drop_db(conn, dbname) schema.init_database(conn, dbname) - print('Finishing init database') + print("Finishing init database") yield - print('Deleting `{}` database'.format(dbname)) + print("Deleting `{}` database".format(dbname)) conn = connect() _drop_db(conn, dbname) - print('Finished deleting `{}`'.format(dbname)) + print("Finished deleting `{}`".format(dbname)) @pytest.fixture @@ -148,9 +144,10 @@ def _bdb(_setup_database, _configure_planetmint): from planetmint.models import Transaction from .utils import flush_db from planetmint.config import Config + conn = connect() yield - dbname = Config().get()['database']['name'] + dbname = Config().get()["database"]["name"] flush_db(conn, dbname) to_dict.cache_clear() @@ -167,15 +164,14 @@ def ignore_local_config_file(monkeypatch): def mock_file_config(filename=None): return {} - monkeypatch.setattr('planetmint.config_utils.file_config', - mock_file_config) + monkeypatch.setattr("planetmint.config_utils.file_config", mock_file_config) @pytest.fixture def reset_logging_config(): # root_logger_level = getLogger().level - root_logger_level = 'DEBUG' - dictConfig({'version': 1, 'root': {'level': 'NOTSET'}}) + root_logger_level = "DEBUG" + dictConfig({"version": 1, "root": {"level": "NOTSET"}}) yield getLogger().setLevel(root_logger_level) @@ -203,12 +199,14 @@ def user2_pk(): @pytest.fixture def alice(): from planetmint.transactions.common.crypto import generate_key_pair + return generate_key_pair() @pytest.fixture def bob(): from planetmint.transactions.common.crypto import generate_key_pair + return generate_key_pair() @@ -225,6 +223,7 @@ def bob_pubkey(carol): @pytest.fixture def carol(): from planetmint.transactions.common.crypto import generate_key_pair + return generate_key_pair() @@ -241,6 +240,7 @@ def carol_pubkey(carol): @pytest.fixture def merlin(): from planetmint.transactions.common.crypto import generate_key_pair + return generate_key_pair() @@ -248,17 +248,21 @@ def merlin(): # def a(): def abci_fixture(): from tendermint.abci import types_pb2 + return types_pb2 + @pytest.fixture def b(): from planetmint import Planetmint + return Planetmint() @pytest.fixture def eventqueue_fixture(): from multiprocessing import Queue + return Queue() @@ -272,10 +276,7 @@ def mock_get_validators(network_validators): def validator_set(height): validators = [] for public_key, power in network_validators.items(): - validators.append({ - 'public_key': {'type': 'ed25519-base64', 'value': public_key}, - 'voting_power': power - }) + validators.append({"public_key": {"type": "ed25519-base64", "value": public_key}, "voting_power": power}) return validators return validator_set @@ -284,8 +285,9 @@ def mock_get_validators(network_validators): @pytest.fixture def create_tx(alice, user_pk): from planetmint.transactions.types.assets.create import Create - name = f'I am created by the create_tx fixture. My random identifier is {random.random()}.' - return Create.generate([alice.public_key], [([user_pk], 1)], asset={'name': name}) + + name = f"I am created by the create_tx fixture. My random identifier is {random.random()}." + return Create.generate([alice.public_key], [([user_pk], 1)], asset={"name": name}) @pytest.fixture @@ -303,6 +305,7 @@ def posted_create_tx(b, signed_create_tx): @pytest.fixture def signed_transfer_tx(signed_create_tx, user_pk, user_sk): from planetmint.transactions.types.assets.transfer import Transfer + inputs = signed_create_tx.to_inputs() tx = Transfer.generate(inputs, [([user_pk], 1)], asset_id=signed_create_tx.id) return tx.sign([user_sk]) @@ -311,32 +314,33 @@ def signed_transfer_tx(signed_create_tx, user_pk, user_sk): @pytest.fixture def double_spend_tx(signed_create_tx, carol_pubkey, user_sk): from planetmint.transactions.types.assets.transfer import Transfer + inputs = signed_create_tx.to_inputs() - tx = Transfer.generate( - inputs, [([carol_pubkey], 1)], asset_id=signed_create_tx.id) + tx = Transfer.generate(inputs, [([carol_pubkey], 1)], asset_id=signed_create_tx.id) return tx.sign([user_sk]) def _get_height(b): maybe_block = b.get_latest_block() - return 0 if maybe_block is None else maybe_block['height'] + return 0 if maybe_block is None else maybe_block["height"] @pytest.fixture def inputs(user_pk, b, alice): from planetmint.transactions.types.assets.create import Create + # create blocks with transactions for `USER` to spend for height in range(1, 4): transactions = [ Create.generate( [alice.public_key], [([user_pk], 1)], - metadata={'msg': random.random()}, + metadata={"msg": random.random()}, ).sign([alice.private_key]) for _ in range(10) ] tx_ids = [tx.id for tx in transactions] - block = Block(app_hash='hash' + str(height), height=height, transactions=tx_ids) + block = Block(app_hash="hash" + str(height), height=height, transactions=tx_ids) b.store_block(block._asdict()) b.store_bulk_transactions(transactions) @@ -369,22 +373,22 @@ def _drop_db(conn, dbname): @pytest.fixture def db_config(): - return Config().get()['database'] + return Config().get()["database"] @pytest.fixture def db_host(db_config): - return db_config['host'] + return db_config["host"] @pytest.fixture def db_port(db_config): - return db_config['port'] + return db_config["port"] @pytest.fixture def db_name(db_config): - return db_config['name'] + return db_config["name"] @pytest.fixture @@ -394,8 +398,7 @@ def db_conn(): @pytest.fixture def db_context(db_config, db_host, db_port, db_name, db_conn): - DBContext = namedtuple( - 'DBContext', ('config', 'host', 'port', 'name', 'conn')) + DBContext = namedtuple("DBContext", ("config", "host", "port", "name", "conn")) return DBContext( config=db_config, host=db_host, @@ -407,34 +410,33 @@ def db_context(db_config, db_host, db_port, db_name, db_conn): @pytest.fixture def tendermint_host(): - return os.getenv('PLANETMINT_TENDERMINT_HOST', 'localhost') + return os.getenv("PLANETMINT_TENDERMINT_HOST", "localhost") @pytest.fixture def tendermint_port(): - return int(os.getenv('PLANETMINT_TENDERMINT_PORT', 26657)) + return int(os.getenv("PLANETMINT_TENDERMINT_PORT", 26657)) @pytest.fixture def tendermint_ws_url(tendermint_host, tendermint_port): - return 'ws://{}:{}/websocket'.format(tendermint_host, tendermint_port) + return "ws://{}:{}/websocket".format(tendermint_host, tendermint_port) @pytest.fixture(autouse=True) def _abci_http(request): - if request.keywords.get('abci', None): - request.getfixturevalue('abci_http') + if request.keywords.get("abci", None): + request.getfixturevalue("abci_http") @pytest.fixture -def abci_http(_setup_database, _configure_planetmint, abci_server, - tendermint_host, tendermint_port): +def abci_http(_setup_database, _configure_planetmint, abci_server, tendermint_host, tendermint_port): import requests import time for i in range(300): try: - uri = 'http://{}:{}/abci_info'.format(tendermint_host, tendermint_port) + uri = "http://{}:{}/abci_info".format(tendermint_host, tendermint_port) requests.get(uri) return True @@ -445,7 +447,7 @@ def abci_http(_setup_database, _configure_planetmint, abci_server, return False -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def event_loop(): import asyncio @@ -454,80 +456,81 @@ def event_loop(): loop.close() -@pytest.fixture(scope='session') +@pytest.fixture(scope="session") def abci_server(): from abci.server import ABCIServer + # from tendermint.abci import types_pb2 as types_v0_34_11 from planetmint.core import App from planetmint.utils import Process app = ABCIServer(app=App()) - abci_proxy = Process(name='ABCI', target=app.run) + abci_proxy = Process(name="ABCI", target=app.run) yield abci_proxy.start() abci_proxy.terminate() @pytest.fixture def wsserver_config(): - return Config().get()['wsserver'] + return Config().get()["wsserver"] @pytest.fixture def wsserver_scheme(wsserver_config): - return wsserver_config['advertised_scheme'] + return wsserver_config["advertised_scheme"] @pytest.fixture def wsserver_host(wsserver_config): - return wsserver_config['advertised_host'] + return wsserver_config["advertised_host"] @pytest.fixture def wsserver_port(wsserver_config): - return wsserver_config['advertised_port'] + return wsserver_config["advertised_port"] @pytest.fixture def wsserver_base_url(wsserver_scheme, wsserver_host, wsserver_port): - return '{}://{}:{}'.format(wsserver_scheme, wsserver_host, wsserver_port) + return "{}://{}:{}".format(wsserver_scheme, wsserver_host, wsserver_port) @pytest.fixture def unspent_output_0(): return { - 'amount': 1, - 'asset_id': 'e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d', - 'condition_uri': 'ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072', - 'fulfillment_message': '{"asset":{"data":{"hash":"06e47bcf9084f7ecfd2a2a2ad275444a"}},"id":"e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d","inputs":[{"fulfillment":"pGSAIIQT0Jm6LDlcSs9coJK4Q4W-SNtsO2EtMtQJ04EUjBMJgUAXKIqeaippbF-IClhhZNNaP6EIZ_OgrVQYU4mH6b-Vc3Tg-k6p-rJOlLGUUo_w8C5QgPHNRYFOqUk2f1q0Cs4G","fulfills":null,"owners_before":["9taLkHkaBXeSF8vrhDGFTAmcZuCEPqjQrKadfYGs4gHv"]}],"metadata":null,"operation":"CREATE","outputs":[{"amount":"1","condition":{"details":{"public_key":"6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz","type":"ed25519-sha-256"},"uri":"ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072"},"public_keys":["6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz"]},{"amount":"2","condition":{"details":{"public_key":"AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT","type":"ed25519-sha-256"},"uri":"ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072"},"public_keys":["AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT"]},{"amount":"3","condition":{"details":{"public_key":"HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB","type":"ed25519-sha-256"},"uri":"ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072"},"public_keys":["HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB"]}],"version":"1.0"}', # noqa: E501 + "amount": 1, + "asset_id": "e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d", + "condition_uri": "ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072", + "fulfillment_message": '{"asset":{"data":{"hash":"06e47bcf9084f7ecfd2a2a2ad275444a"}},"id":"e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d","inputs":[{"fulfillment":"pGSAIIQT0Jm6LDlcSs9coJK4Q4W-SNtsO2EtMtQJ04EUjBMJgUAXKIqeaippbF-IClhhZNNaP6EIZ_OgrVQYU4mH6b-Vc3Tg-k6p-rJOlLGUUo_w8C5QgPHNRYFOqUk2f1q0Cs4G","fulfills":null,"owners_before":["9taLkHkaBXeSF8vrhDGFTAmcZuCEPqjQrKadfYGs4gHv"]}],"metadata":null,"operation":"CREATE","outputs":[{"amount":"1","condition":{"details":{"public_key":"6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz","type":"ed25519-sha-256"},"uri":"ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072"},"public_keys":["6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz"]},{"amount":"2","condition":{"details":{"public_key":"AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT","type":"ed25519-sha-256"},"uri":"ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072"},"public_keys":["AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT"]},{"amount":"3","condition":{"details":{"public_key":"HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB","type":"ed25519-sha-256"},"uri":"ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072"},"public_keys":["HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB"]}],"version":"1.0"}', # noqa: E501 # noqa - 'output_index': 0, - 'transaction_id': 'e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d' + "output_index": 0, + "transaction_id": "e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d", } @pytest.fixture def unspent_output_1(): return { - 'amount': 2, - 'asset_id': 'e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d', - 'condition_uri': 'ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072', - 'fulfillment_message': '{"asset":{"data":{"hash":"06e47bcf9084f7ecfd2a2a2ad275444a"}},"id":"e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d","inputs":[{"fulfillment":"pGSAIIQT0Jm6LDlcSs9coJK4Q4W-SNtsO2EtMtQJ04EUjBMJgUAXKIqeaippbF-IClhhZNNaP6EIZ_OgrVQYU4mH6b-Vc3Tg-k6p-rJOlLGUUo_w8C5QgPHNRYFOqUk2f1q0Cs4G","fulfills":null,"owners_before":["9taLkHkaBXeSF8vrhDGFTAmcZuCEPqjQrKadfYGs4gHv"]}],"metadata":null,"operation":"CREATE","outputs":[{"amount":"1","condition":{"details":{"public_key":"6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz","type":"ed25519-sha-256"},"uri":"ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072"},"public_keys":["6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz"]},{"amount":"2","condition":{"details":{"public_key":"AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT","type":"ed25519-sha-256"},"uri":"ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072"},"public_keys":["AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT"]},{"amount":"3","condition":{"details":{"public_key":"HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB","type":"ed25519-sha-256"},"uri":"ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072"},"public_keys":["HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB"]}],"version":"1.0"}', # noqa: E501 + "amount": 2, + "asset_id": "e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d", + "condition_uri": "ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072", + "fulfillment_message": '{"asset":{"data":{"hash":"06e47bcf9084f7ecfd2a2a2ad275444a"}},"id":"e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d","inputs":[{"fulfillment":"pGSAIIQT0Jm6LDlcSs9coJK4Q4W-SNtsO2EtMtQJ04EUjBMJgUAXKIqeaippbF-IClhhZNNaP6EIZ_OgrVQYU4mH6b-Vc3Tg-k6p-rJOlLGUUo_w8C5QgPHNRYFOqUk2f1q0Cs4G","fulfills":null,"owners_before":["9taLkHkaBXeSF8vrhDGFTAmcZuCEPqjQrKadfYGs4gHv"]}],"metadata":null,"operation":"CREATE","outputs":[{"amount":"1","condition":{"details":{"public_key":"6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz","type":"ed25519-sha-256"},"uri":"ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072"},"public_keys":["6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz"]},{"amount":"2","condition":{"details":{"public_key":"AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT","type":"ed25519-sha-256"},"uri":"ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072"},"public_keys":["AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT"]},{"amount":"3","condition":{"details":{"public_key":"HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB","type":"ed25519-sha-256"},"uri":"ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072"},"public_keys":["HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB"]}],"version":"1.0"}', # noqa: E501 # noqa - 'output_index': 1, - 'transaction_id': 'e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d', + "output_index": 1, + "transaction_id": "e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d", } @pytest.fixture def unspent_output_2(): return { - 'amount': 3, - 'asset_id': 'e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d', - 'condition_uri': 'ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072', - 'fulfillment_message': '{"asset":{"data":{"hash":"06e47bcf9084f7ecfd2a2a2ad275444a"}},"id":"e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d","inputs":[{"fulfillment":"pGSAIIQT0Jm6LDlcSs9coJK4Q4W-SNtsO2EtMtQJ04EUjBMJgUAXKIqeaippbF-IClhhZNNaP6EIZ_OgrVQYU4mH6b-Vc3Tg-k6p-rJOlLGUUo_w8C5QgPHNRYFOqUk2f1q0Cs4G","fulfills":null,"owners_before":["9taLkHkaBXeSF8vrhDGFTAmcZuCEPqjQrKadfYGs4gHv"]}],"metadata":null,"operation":"CREATE","outputs":[{"amount":"1","condition":{"details":{"public_key":"6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz","type":"ed25519-sha-256"},"uri":"ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072"},"public_keys":["6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz"]},{"amount":"2","condition":{"details":{"public_key":"AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT","type":"ed25519-sha-256"},"uri":"ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072"},"public_keys":["AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT"]},{"amount":"3","condition":{"details":{"public_key":"HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB","type":"ed25519-sha-256"},"uri":"ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072"},"public_keys":["HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB"]}],"version":"1.0"}', # noqa: E501 + "amount": 3, + "asset_id": "e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d", + "condition_uri": "ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072", + "fulfillment_message": '{"asset":{"data":{"hash":"06e47bcf9084f7ecfd2a2a2ad275444a"}},"id":"e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d","inputs":[{"fulfillment":"pGSAIIQT0Jm6LDlcSs9coJK4Q4W-SNtsO2EtMtQJ04EUjBMJgUAXKIqeaippbF-IClhhZNNaP6EIZ_OgrVQYU4mH6b-Vc3Tg-k6p-rJOlLGUUo_w8C5QgPHNRYFOqUk2f1q0Cs4G","fulfills":null,"owners_before":["9taLkHkaBXeSF8vrhDGFTAmcZuCEPqjQrKadfYGs4gHv"]}],"metadata":null,"operation":"CREATE","outputs":[{"amount":"1","condition":{"details":{"public_key":"6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz","type":"ed25519-sha-256"},"uri":"ni:///sha-256;RmovleG60-7K0CX60jjfUunV3lBpUOkiQOAnBzghm0w?fpt=ed25519-sha-256&cost=131072"},"public_keys":["6FDGsHrR9RZqNaEm7kBvqtxRkrvuWogBW2Uy7BkWc5Tz"]},{"amount":"2","condition":{"details":{"public_key":"AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT","type":"ed25519-sha-256"},"uri":"ni:///sha-256;-HlYmgwwl-vXwE52IaADhvYxaL1TbjqfJ-LGn5a1PFc?fpt=ed25519-sha-256&cost=131072"},"public_keys":["AH9D7xgmhyLmVE944zvHvuvYWuj5DfbMBJhnDM4A5FdT"]},{"amount":"3","condition":{"details":{"public_key":"HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB","type":"ed25519-sha-256"},"uri":"ni:///sha-256;xfn8pvQkTCPtvR0trpHy2pqkkNTmMBCjWMMOHtk3WO4?fpt=ed25519-sha-256&cost=131072"},"public_keys":["HpmSVrojHvfCXQbmoAs4v6Aq1oZiZsZDnjr68KiVtPbB"]}],"version":"1.0"}', # noqa: E501 # noqa - 'output_index': 2, - 'transaction_id': 'e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d', + "output_index": 2, + "transaction_id": "e897c7a0426461a02b4fca8ed73bc0debed7570cf3b40fb4f49c963434225a4d", } @@ -547,6 +550,7 @@ def tarantool_client(db_context): # TODO Here add TarantoolConnectionClass # # + @pytest.fixture def utxo_collection(tarantool_client, _setup_database): return tarantool_client.get_space("utxos") @@ -555,15 +559,16 @@ def utxo_collection(tarantool_client, _setup_database): @pytest.fixture def dummy_unspent_outputs(): return [ - {'transaction_id': 'a', 'output_index': 0}, - {'transaction_id': 'a', 'output_index': 1}, - {'transaction_id': 'b', 'output_index': 0}, + {"transaction_id": "a", "output_index": 0}, + {"transaction_id": "a", "output_index": 1}, + {"transaction_id": "b", "output_index": 0}, ] @pytest.fixture def utxoset(dummy_unspent_outputs, utxo_collection): from json import dumps + num_rows_before_operation = utxo_collection.select().rowcount for utxo in dummy_unspent_outputs: res = utxo_collection.insert((utxo["transaction_id"], utxo["output_index"], dumps(utxo))) @@ -612,35 +617,27 @@ def ed25519_node_keys(node_keys): @pytest.fixture def node_keys(): - return {'zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=': - 'cM5oW4J0zmUSZ/+QRoRlincvgCwR0pEjFoY//ZnnjD3Mv8Nqy8q6VdnOFI0XDHhwtFcqRIz0Y8rtjSdngUTKUw==', - 'GIijU7GBcVyiVUcB0GwWZbxCxdk2xV6pxdvL24s/AqM=': - 'mdz7IjP6mGXs6+ebgGJkn7kTXByUeeGhV+9aVthLuEAYiKNTsYFxXKJVRwHQbBZlvELF2TbFXqnF28vbiz8Cow==', - 'JbfwrLvCVIwOPm8tj8936ki7IYbmGHjPiKb6nAZegRA=': - '83VINXdj2ynOHuhvSZz5tGuOE5oYzIi0mEximkX1KYMlt/Csu8JUjA4+by2Pz3fqSLshhuYYeM+IpvqcBl6BEA==', - 'PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=': - 'uz8bYgoL4rHErWT1gjjrnA+W7bgD/uDQWSRKDmC8otc95wnnxJo1GxYlmh0OaqOkJaobpu13BcUcvITjRFiVgw=='} + return { + "zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=": "cM5oW4J0zmUSZ/+QRoRlincvgCwR0pEjFoY//ZnnjD3Mv8Nqy8q6VdnOFI0XDHhwtFcqRIz0Y8rtjSdngUTKUw==", + "GIijU7GBcVyiVUcB0GwWZbxCxdk2xV6pxdvL24s/AqM=": "mdz7IjP6mGXs6+ebgGJkn7kTXByUeeGhV+9aVthLuEAYiKNTsYFxXKJVRwHQbBZlvELF2TbFXqnF28vbiz8Cow==", + "JbfwrLvCVIwOPm8tj8936ki7IYbmGHjPiKb6nAZegRA=": "83VINXdj2ynOHuhvSZz5tGuOE5oYzIi0mEximkX1KYMlt/Csu8JUjA4+by2Pz3fqSLshhuYYeM+IpvqcBl6BEA==", + "PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=": "uz8bYgoL4rHErWT1gjjrnA+W7bgD/uDQWSRKDmC8otc95wnnxJo1GxYlmh0OaqOkJaobpu13BcUcvITjRFiVgw==", + } @pytest.fixture def priv_validator_path(node_keys): (public_key, private_key) = list(node_keys.items())[0] priv_validator = { - 'address': '84F787D95E196DC5DE5F972666CFECCA36801426', - 'pub_key': { - 'type': 'AC26791624DE60', - 'value': public_key - }, - 'last_height': 0, - 'last_round': 0, - 'last_step': 0, - 'priv_key': { - 'type': '954568A3288910', - 'value': private_key - } + "address": "84F787D95E196DC5DE5F972666CFECCA36801426", + "pub_key": {"type": "AC26791624DE60", "value": public_key}, + "last_height": 0, + "last_round": 0, + "last_step": 0, + "priv_key": {"type": "954568A3288910", "value": private_key}, } fd, path = tempfile.mkstemp() - socket = os.fdopen(fd, 'w') + socket = os.fdopen(fd, "w") json.dump(priv_validator, socket) socket.close() return path @@ -650,21 +647,15 @@ def priv_validator_path(node_keys): def bad_validator_path(node_keys): (public_key, private_key) = list(node_keys.items())[1] priv_validator = { - 'address': '84F787D95E196DC5DE5F972666CFECCA36801426', - 'pub_key': { - 'type': 'AC26791624DE60', - 'value': public_key - }, - 'last_height': 0, - 'last_round': 0, - 'last_step': 0, - 'priv_key': { - 'type': '954568A3288910', - 'value': private_key - } + "address": "84F787D95E196DC5DE5F972666CFECCA36801426", + "pub_key": {"type": "AC26791624DE60", "value": public_key}, + "last_height": 0, + "last_round": 0, + "last_step": 0, + "priv_key": {"type": "954568A3288910", "value": private_key}, } fd, path = tempfile.mkstemp() - socket = os.fdopen(fd, 'w') + socket = os.fdopen(fd, "w") json.dump(priv_validator, socket) socket.close() return path @@ -684,14 +675,15 @@ def validators(b, node_keys): (public_key, private_key) = list(node_keys.items())[0] - validator_set = [{'address': 'F5426F0980E36E03044F74DD414248D29ABCBDB2', - 'public_key': {'value': public_key, - 'type': 'ed25519-base64'}, - 'voting_power': 10}] + validator_set = [ + { + "address": "F5426F0980E36E03044F74DD414248D29ABCBDB2", + "public_key": {"value": public_key, "type": "ed25519-base64"}, + "voting_power": 10, + } + ] - validator_update = {'validators': validator_set, - 'height': height + 1, - 'election_id': f'setup_at_{timestamp()}'} + validator_update = {"validators": validator_set, "height": height + 1, "election_id": f"setup_at_{timestamp()}"} query.store_validator_set(b.connection, validator_update) @@ -699,16 +691,18 @@ def validators(b, node_keys): height = get_block_height(b) - validator_update = {'validators': original_validators, - 'height': height, - 'election_id': f'teardown_at_{timestamp()}'} + validator_update = { + "validators": original_validators, + "height": height, + "election_id": f"teardown_at_{timestamp()}", + } query.store_validator_set(b.connection, validator_update) def get_block_height(b): if b.get_latest_block(): - height = b.get_latest_block()['height'] + height = b.get_latest_block()["height"] else: height = 0 @@ -717,43 +711,33 @@ def get_block_height(b): @pytest.fixture def new_validator(): - public_key = '1718D2DBFF00158A0852A17A01C78F4DCF3BA8E4FB7B8586807FAC182A535034' + public_key = "1718D2DBFF00158A0852A17A01C78F4DCF3BA8E4FB7B8586807FAC182A535034" power = 1 - node_id = 'fake_node_id' + node_id = "fake_node_id" - return {'public_key': {'value': public_key, - 'type': 'ed25519-base16'}, - 'power': power, - 'node_id': node_id} + return {"public_key": {"value": public_key, "type": "ed25519-base16"}, "power": power, "node_id": node_id} @pytest.fixture def valid_upsert_validator_election(b_mock, node_key, new_validator): voters = ValidatorElection.recipients(b_mock) - return ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + return ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign([node_key.private_key]) @pytest.fixture def valid_upsert_validator_election_2(b_mock, node_key, new_validator): voters = ValidatorElection.recipients(b_mock) - return ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + return ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign([node_key.private_key]) @pytest.fixture def ongoing_validator_election(b, valid_upsert_validator_election, ed25519_node_keys): validators = b.get_validators(height=1) - genesis_validators = {'validators': validators, - 'height': 0} + genesis_validators = {"validators": validators, "height": 0} query.store_validator_set(b.connection, genesis_validators) b.store_bulk_transactions([valid_upsert_validator_election]) - query.store_election(b.connection, valid_upsert_validator_election.id, 1, - is_concluded=False) - block_1 = Block(app_hash='hash_1', height=1, - transactions=[valid_upsert_validator_election.id]) + query.store_election(b.connection, valid_upsert_validator_election.id, 1, is_concluded=False) + block_1 = Block(app_hash="hash_1", height=1, transactions=[valid_upsert_validator_election.id]) b.store_block(block_1._asdict()) return valid_upsert_validator_election @@ -761,13 +745,11 @@ def ongoing_validator_election(b, valid_upsert_validator_election, ed25519_node_ @pytest.fixture def ongoing_validator_election_2(b, valid_upsert_validator_election_2, ed25519_node_keys): validators = b.get_validators(height=1) - genesis_validators = {'validators': validators, - 'height': 0, - 'election_id': None} + genesis_validators = {"validators": validators, "height": 0, "election_id": None} query.store_validator_set(b.connection, genesis_validators) b.store_bulk_transactions([valid_upsert_validator_election_2]) - block_1 = Block(app_hash='hash_2', height=1, transactions=[valid_upsert_validator_election_2.id]) + block_1 = Block(app_hash="hash_2", height=1, transactions=[valid_upsert_validator_election_2.id]) b.store_block(block_1._asdict()) return valid_upsert_validator_election_2 diff --git a/tests/db/test_planetmint_api.py b/tests/db/test_planetmint_api.py index 0fac416..ca415d3 100644 --- a/tests/db/test_planetmint_api.py +++ b/tests/db/test_planetmint_api.py @@ -14,7 +14,6 @@ pytestmark = pytest.mark.bdb class TestBigchainApi(object): - def test_get_spent_with_double_spend_detected(self, b, alice): from planetmint.transactions.common.exceptions import DoubleSpend from planetmint.exceptions import CriticalDoubleSpend @@ -24,11 +23,9 @@ class TestBigchainApi(object): b.store_bulk_transactions([tx]) - transfer_tx = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], - asset_id=tx.id) + transfer_tx = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], asset_id=tx.id) transfer_tx = transfer_tx.sign([alice.private_key]) - transfer_tx2 = Transfer.generate(tx.to_inputs(), [([alice.public_key], 2)], - asset_id=tx.id) + transfer_tx2 = Transfer.generate(tx.to_inputs(), [([alice.public_key], 2)], asset_id=tx.id) transfer_tx2 = transfer_tx2.sign([alice.private_key]) with pytest.raises(DoubleSpend): @@ -68,46 +65,41 @@ class TestBigchainApi(object): return # define the assets - asset1 = {'msg': 'Planetmint 1'} - asset2 = {'msg': 'Planetmint 2'} - asset3 = {'msg': 'Planetmint 3'} + asset1 = {"msg": "Planetmint 1"} + asset2 = {"msg": "Planetmint 2"} + asset3 = {"msg": "Planetmint 3"} # create the transactions - tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], - asset=asset1).sign([alice.private_key]) - tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], - asset=asset2).sign([alice.private_key]) - tx3 = Create.generate([alice.public_key], [([alice.public_key], 1)], - asset=asset3).sign([alice.private_key]) + tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset1).sign([alice.private_key]) + tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset2).sign([alice.private_key]) + tx3 = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset3).sign([alice.private_key]) # write the transactions to the DB b.store_bulk_transactions([tx1, tx2, tx3]) # get the assets through text search - assets = list(b.text_search('planetmint')) + assets = list(b.text_search("planetmint")) assert len(assets) == 3 - @pytest.mark.usefixtures('inputs') + @pytest.mark.usefixtures("inputs") def test_non_create_input_not_found(self, b, user_pk): from cryptoconditions import Ed25519Sha256 from planetmint.transactions.common.exceptions import InputDoesNotExist from planetmint.transactions.common.transaction import Input, TransactionLink # Create an input for a non existing transaction - input = Input(Ed25519Sha256(public_key=b58decode(user_pk)), - [user_pk], - TransactionLink('somethingsomething', 0)) - tx = Transfer.generate([input], [([user_pk], 1)], - asset_id='mock_asset_link') + input = Input( + Ed25519Sha256(public_key=b58decode(user_pk)), [user_pk], TransactionLink("somethingsomething", 0) + ) + tx = Transfer.generate([input], [([user_pk], 1)], asset_id="mock_asset_link") with pytest.raises(InputDoesNotExist): tx.validate(b) def test_write_transaction(self, b, user_sk, user_pk, alice, create_tx): - asset1 = {'msg': 'Planetmint 1'} + asset1 = {"msg": "Planetmint 1"} - tx = Create.generate([alice.public_key], [([alice.public_key], 1)], - asset=asset1).sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset1).sign([alice.private_key]) b.store_bulk_transactions([tx]) tx_from_db = b.get_transaction(tx.id) @@ -115,23 +107,22 @@ class TestBigchainApi(object): before = tx.to_dict() after = tx_from_db.to_dict() - assert before['asset']['data'] == after['asset']['data'] - before.pop('asset', None) - after.pop('asset', None) + assert before["asset"]["data"] == after["asset"]["data"] + before.pop("asset", None) + after.pop("asset", None) assert before == after class TestTransactionValidation(object): - def test_non_create_input_not_found(self, b, signed_transfer_tx): from planetmint.transactions.common.exceptions import InputDoesNotExist from planetmint.transactions.common.transaction import TransactionLink - signed_transfer_tx.inputs[0].fulfills = TransactionLink('c', 0) + signed_transfer_tx.inputs[0].fulfills = TransactionLink("c", 0) with pytest.raises(InputDoesNotExist): b.validate_transaction(signed_transfer_tx) - @pytest.mark.usefixtures('inputs') + @pytest.mark.usefixtures("inputs") def test_non_create_valid_input_wrong_owner(self, b, user_pk): from planetmint.transactions.common.crypto import generate_key_pair from planetmint.transactions.common.exceptions import InvalidSignature @@ -140,16 +131,15 @@ class TestTransactionValidation(object): input_transaction = b.get_transaction(input_tx.txid) sk, pk = generate_key_pair() tx = Create.generate([pk], [([user_pk], 1)]) - tx.operation = 'TRANSFER' - tx.asset = {'id': input_transaction.id} + tx.operation = "TRANSFER" + tx.asset = {"id": input_transaction.id} tx.inputs[0].fulfills = input_tx with pytest.raises(InvalidSignature): b.validate_transaction(tx) - @pytest.mark.usefixtures('inputs') - def test_non_create_double_spend(self, b, signed_create_tx, - signed_transfer_tx, double_spend_tx): + @pytest.mark.usefixtures("inputs") + def test_non_create_double_spend(self, b, signed_create_tx, signed_transfer_tx, double_spend_tx): from planetmint.transactions.common.exceptions import DoubleSpend b.store_bulk_transactions([signed_create_tx, signed_transfer_tx]) @@ -159,9 +149,7 @@ class TestTransactionValidation(object): class TestMultipleInputs(object): - - def test_transfer_single_owner_single_input(self, b, inputs, user_pk, - user_sk): + def test_transfer_single_owner_single_input(self, b, inputs, user_pk, user_sk): from planetmint.transactions.common import crypto user2_sk, user2_pk = crypto.generate_key_pair() @@ -169,8 +157,7 @@ class TestMultipleInputs(object): tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop() input_tx = b.get_transaction(tx_link.txid) inputs = input_tx.to_inputs() - tx = Transfer.generate(inputs, [([user2_pk], 1)], - asset_id=input_tx.id) + tx = Transfer.generate(inputs, [([user2_pk], 1)], asset_id=input_tx.id) tx = tx.sign([user_sk]) # validate transaction @@ -178,10 +165,7 @@ class TestMultipleInputs(object): assert len(tx.inputs) == 1 assert len(tx.outputs) == 1 - def test_single_owner_before_multiple_owners_after_single_input(self, b, - user_sk, - user_pk, - inputs): + def test_single_owner_before_multiple_owners_after_single_input(self, b, user_sk, user_pk, inputs): from planetmint.transactions.common import crypto user2_sk, user2_pk = crypto.generate_key_pair() @@ -189,20 +173,15 @@ class TestMultipleInputs(object): tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop() input_tx = b.get_transaction(tx_link.txid) - tx = Transfer.generate(input_tx.to_inputs(), - [([user2_pk, user3_pk], 1)], - asset_id=input_tx.id) + tx = Transfer.generate(input_tx.to_inputs(), [([user2_pk, user3_pk], 1)], asset_id=input_tx.id) tx = tx.sign([user_sk]) tx.validate(b) assert len(tx.inputs) == 1 assert len(tx.outputs) == 1 - @pytest.mark.usefixtures('inputs') - def test_multiple_owners_before_single_owner_after_single_input(self, b, - user_sk, - user_pk, - alice): + @pytest.mark.usefixtures("inputs") + def test_multiple_owners_before_single_owner_after_single_input(self, b, user_sk, user_pk, alice): from planetmint.transactions.common import crypto user2_sk, user2_pk = crypto.generate_key_pair() @@ -216,8 +195,7 @@ class TestMultipleInputs(object): input_tx = b.get_transaction(owned_input.txid) inputs = input_tx.to_inputs() - transfer_tx = Transfer.generate(inputs, [([user3_pk], 1)], - asset_id=input_tx.id) + transfer_tx = Transfer.generate(inputs, [([user3_pk], 1)], asset_id=input_tx.id) transfer_tx = transfer_tx.sign([user_sk, user2_sk]) # validate transaction @@ -225,11 +203,8 @@ class TestMultipleInputs(object): assert len(transfer_tx.inputs) == 1 assert len(transfer_tx.outputs) == 1 - @pytest.mark.usefixtures('inputs') - def test_multiple_owners_before_multiple_owners_after_single_input(self, b, - user_sk, - user_pk, - alice): + @pytest.mark.usefixtures("inputs") + def test_multiple_owners_before_multiple_owners_after_single_input(self, b, user_sk, user_pk, alice): from planetmint.transactions.common import crypto user2_sk, user2_pk = crypto.generate_key_pair() @@ -244,9 +219,7 @@ class TestMultipleInputs(object): tx_link = b.fastquery.get_outputs_by_public_key(user_pk).pop() tx_input = b.get_transaction(tx_link.txid) - tx = Transfer.generate(tx_input.to_inputs(), - [([user3_pk, user4_pk], 1)], - asset_id=tx_input.id) + tx = Transfer.generate(tx_input.to_inputs(), [([user3_pk, user4_pk], 1)], asset_id=tx_input.id) tx = tx.sign([user_sk, user2_sk]) tx.validate(b) @@ -268,8 +241,7 @@ class TestMultipleInputs(object): assert owned_inputs_user1 == [TransactionLink(tx.id, 0)] assert owned_inputs_user2 == [] - tx_transfer = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], - asset_id=tx.id) + tx_transfer = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_id=tx.id) tx_transfer = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_transfer]) @@ -279,8 +251,7 @@ class TestMultipleInputs(object): assert owned_inputs_user1 == [TransactionLink(tx.id, 0)] assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0)] - def test_get_owned_ids_single_tx_multiple_outputs(self, b, user_sk, - user_pk, alice): + def test_get_owned_ids_single_tx_multiple_outputs(self, b, user_sk, user_pk, alice): from planetmint.transactions.common import crypto from planetmint.transactions.common.transaction import TransactionLink @@ -295,23 +266,21 @@ class TestMultipleInputs(object): owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk) owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk) - expected_owned_inputs_user1 = [TransactionLink(tx_create.id, 0), - TransactionLink(tx_create.id, 1)] + expected_owned_inputs_user1 = [TransactionLink(tx_create.id, 0), TransactionLink(tx_create.id, 1)] assert owned_inputs_user1 == expected_owned_inputs_user1 assert owned_inputs_user2 == [] # transfer divisible asset divided in two outputs - tx_transfer = Transfer.generate(tx_create.to_inputs(), - [([user2_pk], 1), ([user2_pk], 1)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate( + tx_create.to_inputs(), [([user2_pk], 1), ([user2_pk], 1)], asset_id=tx_create.id + ) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_transfer_signed]) owned_inputs_user1 = b.fastquery.get_outputs_by_public_key(user_pk) owned_inputs_user2 = b.fastquery.get_outputs_by_public_key(user2_pk) assert owned_inputs_user1 == expected_owned_inputs_user1 - assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0), - TransactionLink(tx_transfer.id, 1)] + assert owned_inputs_user2 == [TransactionLink(tx_transfer.id, 0), TransactionLink(tx_transfer.id, 1)] def test_get_owned_ids_multiple_owners(self, b, user_sk, user_pk, alice): from planetmint.transactions.common import crypto @@ -332,8 +301,7 @@ class TestMultipleInputs(object): assert owned_inputs_user1 == owned_inputs_user2 assert owned_inputs_user1 == expected_owned_inputs_user1 - tx = Transfer.generate(tx.to_inputs(), [([user3_pk], 1)], - asset_id=tx.id) + tx = Transfer.generate(tx.to_inputs(), [([user3_pk], 1)], asset_id=tx.id) tx = tx.sign([user_sk, user2_sk]) b.store_bulk_transactions([tx]) @@ -361,8 +329,7 @@ class TestMultipleInputs(object): assert spent_inputs_user1 is None # create a transaction and send it - tx = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], - asset_id=tx.id) + tx = Transfer.generate(tx.to_inputs(), [([user2_pk], 1)], asset_id=tx.id) tx = tx.sign([user_sk]) b.store_bulk_transactions([tx]) @@ -376,10 +343,7 @@ class TestMultipleInputs(object): user2_sk, user2_pk = crypto.generate_key_pair() # create a divisible asset with 3 outputs - tx_create = Create.generate([alice.public_key], - [([user_pk], 1), - ([user_pk], 1), - ([user_pk], 1)]) + tx_create = Create.generate([alice.public_key], [([user_pk], 1), ([user_pk], 1), ([user_pk], 1)]) tx_create_signed = tx_create.sign([alice.private_key]) b.store_bulk_transactions([tx_create_signed]) @@ -390,9 +354,9 @@ class TestMultipleInputs(object): assert b.get_spent(input_tx.txid, input_tx.output) is None # transfer the first 2 inputs - tx_transfer = Transfer.generate(tx_create.to_inputs()[:2], - [([user2_pk], 1), ([user2_pk], 1)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate( + tx_create.to_inputs()[:2], [([user2_pk], 1), ([user2_pk], 1)], asset_id=tx_create.id + ) tx_transfer_signed = tx_transfer.sign([user_sk]) b.store_bulk_transactions([tx_transfer_signed]) @@ -413,9 +377,8 @@ class TestMultipleInputs(object): transactions = [] for i in range(3): - payload = {'somedata': i} - tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)], - payload) + payload = {"somedata": i} + tx = Create.generate([alice.public_key], [([user_pk, user2_pk], 1)], payload) tx = tx.sign([alice.private_key]) transactions.append(tx) @@ -427,9 +390,7 @@ class TestMultipleInputs(object): assert b.get_spent(input_tx.txid, input_tx.output) is None # create a transaction - tx = Transfer.generate(transactions[0].to_inputs(), - [([user3_pk], 1)], - asset_id=transactions[0].id) + tx = Transfer.generate(transactions[0].to_inputs(), [([user3_pk], 1)], asset_id=transactions[0].id) tx = tx.sign([user_sk, user2_sk]) b.store_bulk_transactions([tx]) @@ -444,45 +405,43 @@ def test_get_outputs_filtered_only_unspent(): from planetmint.transactions.common.transaction import TransactionLink from planetmint.lib import Planetmint - go = 'planetmint.fastquery.FastQuery.get_outputs_by_public_key' + go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key" with patch(go) as get_outputs: - get_outputs.return_value = [TransactionLink('a', 1), - TransactionLink('b', 2)] - fs = 'planetmint.fastquery.FastQuery.filter_spent_outputs' + get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)] + fs = "planetmint.fastquery.FastQuery.filter_spent_outputs" with patch(fs) as filter_spent: - filter_spent.return_value = [TransactionLink('b', 2)] - out = Planetmint().get_outputs_filtered('abc', spent=False) - get_outputs.assert_called_once_with('abc') - assert out == [TransactionLink('b', 2)] + filter_spent.return_value = [TransactionLink("b", 2)] + out = Planetmint().get_outputs_filtered("abc", spent=False) + get_outputs.assert_called_once_with("abc") + assert out == [TransactionLink("b", 2)] def test_get_outputs_filtered_only_spent(): from planetmint.transactions.common.transaction import TransactionLink from planetmint.lib import Planetmint - go = 'planetmint.fastquery.FastQuery.get_outputs_by_public_key' + + go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key" with patch(go) as get_outputs: - get_outputs.return_value = [TransactionLink('a', 1), - TransactionLink('b', 2)] - fs = 'planetmint.fastquery.FastQuery.filter_unspent_outputs' + get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)] + fs = "planetmint.fastquery.FastQuery.filter_unspent_outputs" with patch(fs) as filter_spent: - filter_spent.return_value = [TransactionLink('b', 2)] - out = Planetmint().get_outputs_filtered('abc', spent=True) - get_outputs.assert_called_once_with('abc') - assert out == [TransactionLink('b', 2)] + filter_spent.return_value = [TransactionLink("b", 2)] + out = Planetmint().get_outputs_filtered("abc", spent=True) + get_outputs.assert_called_once_with("abc") + assert out == [TransactionLink("b", 2)] -@patch('planetmint.fastquery.FastQuery.filter_unspent_outputs') -@patch('planetmint.fastquery.FastQuery.filter_spent_outputs') +@patch("planetmint.fastquery.FastQuery.filter_unspent_outputs") +@patch("planetmint.fastquery.FastQuery.filter_spent_outputs") def test_get_outputs_filtered(filter_spent, filter_unspent): from planetmint.transactions.common.transaction import TransactionLink from planetmint.lib import Planetmint - go = 'planetmint.fastquery.FastQuery.get_outputs_by_public_key' + go = "planetmint.fastquery.FastQuery.get_outputs_by_public_key" with patch(go) as get_outputs: - get_outputs.return_value = [TransactionLink('a', 1), - TransactionLink('b', 2)] - out = Planetmint().get_outputs_filtered('abc') - get_outputs.assert_called_once_with('abc') + get_outputs.return_value = [TransactionLink("a", 1), TransactionLink("b", 2)] + out = Planetmint().get_outputs_filtered("abc") + get_outputs.assert_called_once_with("abc") filter_spent.assert_not_called() filter_unspent.assert_not_called() assert out == get_outputs.return_value @@ -502,8 +461,7 @@ def test_cant_spend_same_input_twice_in_tx(b, alice): # Create a transfer transaction with duplicated fulfillments dup_inputs = tx_create.to_inputs() + tx_create.to_inputs() - tx_transfer = Transfer.generate(dup_inputs, [([alice.public_key], 200)], - asset_id=tx_create.id) + tx_transfer = Transfer.generate(dup_inputs, [([alice.public_key], 200)], asset_id=tx_create.id) tx_transfer_signed = tx_transfer.sign([alice.private_key]) with pytest.raises(DoubleSpend): tx_transfer_signed.validate(b) @@ -514,11 +472,10 @@ def test_transaction_unicode(b, alice): from planetmint.transactions.common.utils import serialize # http://www.fileformat.info/info/unicode/char/1f37a/index.htm - beer_python = {'beer': '\N{BEER MUG}'} + beer_python = {"beer": "\N{BEER MUG}"} beer_json = '{"beer":"\N{BEER MUG}"}' - tx = (Create.generate([alice.public_key], [([alice.public_key], 100)], beer_python) - ).sign([alice.private_key]) + tx = (Create.generate([alice.public_key], [([alice.public_key], 100)], beer_python)).sign([alice.private_key]) tx_1 = copy.deepcopy(tx) b.store_bulk_transactions([tx]) diff --git a/tests/elections/test_election.py b/tests/elections/test_election.py index e8197be..b76a20e 100644 --- a/tests/elections/test_election.py +++ b/tests/elections/test_election.py @@ -7,256 +7,211 @@ from planetmint.transactions.types.elections.election import Election from planetmint.transactions.types.elections.chain_migration_election import ChainMigrationElection from planetmint.upsert_validator.validator_election import ValidatorElection + @pytest.mark.bdb def test_process_block_concludes_all_elections(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) new_validator = generate_validators([1])[0] - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, {}, voter_keys) txs = [election] total_votes = votes - election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - new_validator['election'], - voter_keys) + election, votes = generate_election( + b, ValidatorElection, public_key, private_key, new_validator["election"], voter_keys + ) txs += [election] total_votes += votes - b.store_abci_chain(1, 'chain-X') + b.store_abci_chain(1, "chain-X") Election.process_block(b, 1, txs) - b.store_block(Block(height=1, - transactions=[tx.id for tx in txs], - app_hash='')._asdict()) + b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict()) b.store_bulk_transactions(txs) Election.process_block(b, 2, total_votes) validators = b.get_validators() assert len(validators) == 5 - assert new_validator['storage'] in validators + assert new_validator["storage"] in validators chain = b.get_latest_abci_chain() assert chain assert chain == { - 'height': 2, - 'is_synced': False, - 'chain_id': 'chain-X-migrated-at-height-1', + "height": 2, + "is_synced": False, + "chain_id": "chain-X-migrated-at-height-1", } for tx in txs: - assert b.get_election(tx.id)['is_concluded'] + assert b.get_election(tx.id)["is_concluded"] @pytest.mark.bdb def test_process_block_approves_only_one_validator_update(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) new_validator = generate_validators([1])[0] - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - new_validator['election'], - voter_keys) + election, votes = generate_election( + b, ValidatorElection, public_key, private_key, new_validator["election"], voter_keys + ) txs = [election] total_votes = votes another_validator = generate_validators([1])[0] - election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - another_validator['election'], - voter_keys) + election, votes = generate_election( + b, ValidatorElection, public_key, private_key, another_validator["election"], voter_keys + ) txs += [election] total_votes += votes Election.process_block(b, 1, txs) - b.store_block(Block(height=1, - transactions=[tx.id for tx in txs], - app_hash='')._asdict()) + b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict()) b.store_bulk_transactions(txs) Election.process_block(b, 2, total_votes) validators = b.get_validators() assert len(validators) == 5 - assert new_validator['storage'] in validators - assert another_validator['storage'] not in validators + assert new_validator["storage"] in validators + assert another_validator["storage"] not in validators - assert b.get_election(txs[0].id)['is_concluded'] - assert not b.get_election(txs[1].id)['is_concluded'] + assert b.get_election(txs[0].id)["is_concluded"] + assert not b.get_election(txs[1].id)["is_concluded"] @pytest.mark.bdb def test_process_block_approves_after_pending_validator_update(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) new_validator = generate_validators([1])[0] - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - new_validator['election'], - voter_keys) + election, votes = generate_election( + b, ValidatorElection, public_key, private_key, new_validator["election"], voter_keys + ) txs = [election] total_votes = votes another_validator = generate_validators([1])[0] - election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - another_validator['election'], - voter_keys) + election, votes = generate_election( + b, ValidatorElection, public_key, private_key, another_validator["election"], voter_keys + ) txs += [election] total_votes += votes - election, votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, {}, voter_keys) txs += [election] total_votes += votes - b.store_abci_chain(1, 'chain-X') + b.store_abci_chain(1, "chain-X") Election.process_block(b, 1, txs) - b.store_block(Block(height=1, - transactions=[tx.id for tx in txs], - app_hash='')._asdict()) + b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict()) b.store_bulk_transactions(txs) Election.process_block(b, 2, total_votes) validators = b.get_validators() assert len(validators) == 5 - assert new_validator['storage'] in validators - assert another_validator['storage'] not in validators + assert new_validator["storage"] in validators + assert another_validator["storage"] not in validators - assert b.get_election(txs[0].id)['is_concluded'] - assert not b.get_election(txs[1].id)['is_concluded'] - assert b.get_election(txs[2].id)['is_concluded'] + assert b.get_election(txs[0].id)["is_concluded"] + assert not b.get_election(txs[1].id)["is_concluded"] + assert b.get_election(txs[2].id)["is_concluded"] - assert b.get_latest_abci_chain() == {'height': 2, - 'chain_id': 'chain-X-migrated-at-height-1', - 'is_synced': False} + assert b.get_latest_abci_chain() == {"height": 2, "chain_id": "chain-X-migrated-at-height-1", "is_synced": False} @pytest.mark.bdb def test_process_block_does_not_approve_after_validator_update(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) new_validator = generate_validators([1])[0] - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - new_validator['election'], - voter_keys) + election, votes = generate_election( + b, ValidatorElection, public_key, private_key, new_validator["election"], voter_keys + ) txs = [election] total_votes = votes - b.store_block(Block(height=1, - transactions=[tx.id for tx in txs], - app_hash='')._asdict()) + b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict()) Election.process_block(b, 1, txs) b.store_bulk_transactions(txs) - second_election, second_votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + second_election, second_votes = generate_election( + b, ChainMigrationElection, public_key, private_key, {}, voter_keys + ) Election.process_block(b, 2, total_votes + [second_election]) - b.store_block(Block(height=2, - transactions=[v.id for v in total_votes + [second_election]], - app_hash='')._asdict()) + b.store_block(Block(height=2, transactions=[v.id for v in total_votes + [second_election]], app_hash="")._asdict()) - b.store_abci_chain(1, 'chain-X') + b.store_abci_chain(1, "chain-X") Election.process_block(b, 3, second_votes) - assert not b.get_election(second_election.id)['is_concluded'] - assert b.get_latest_abci_chain() == {'height': 1, - 'chain_id': 'chain-X', - 'is_synced': True} + assert not b.get_election(second_election.id)["is_concluded"] + assert b.get_latest_abci_chain() == {"height": 1, "chain_id": "chain-X", "is_synced": True} @pytest.mark.bdb def test_process_block_applies_only_one_migration(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, {}, voter_keys) txs = [election] total_votes = votes - election, votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, {}, voter_keys) txs += [election] total_votes += votes - b.store_abci_chain(1, 'chain-X') + b.store_abci_chain(1, "chain-X") Election.process_block(b, 1, txs) - b.store_block(Block(height=1, - transactions=[tx.id for tx in txs], - app_hash='')._asdict()) + b.store_block(Block(height=1, transactions=[tx.id for tx in txs], app_hash="")._asdict()) b.store_bulk_transactions(txs) Election.process_block(b, 1, total_votes) chain = b.get_latest_abci_chain() assert chain assert chain == { - 'height': 2, - 'is_synced': False, - 'chain_id': 'chain-X-migrated-at-height-1', + "height": 2, + "is_synced": False, + "chain_id": "chain-X-migrated-at-height-1", } - assert b.get_election(txs[0].id)['is_concluded'] - assert not b.get_election(txs[1].id)['is_concluded'] + assert b.get_election(txs[0].id)["is_concluded"] + assert not b.get_election(txs[1].id)["is_concluded"] def test_process_block_gracefully_handles_empty_block(b): diff --git a/tests/migrations/test_migration_election.py b/tests/migrations/test_migration_election.py index 8b7cbea..7faf534 100644 --- a/tests/migrations/test_migration_election.py +++ b/tests/migrations/test_migration_election.py @@ -3,7 +3,5 @@ from planetmint.transactions.types.elections.chain_migration_election import Cha def test_valid_migration_election(b_mock, node_key): voters = ChainMigrationElection.recipients(b_mock) - election = ChainMigrationElection.generate([node_key.public_key], - voters, - {}, None).sign([node_key.private_key]) + election = ChainMigrationElection.generate([node_key.public_key], voters, {}, None).sign([node_key.private_key]) assert election.validate(b_mock) diff --git a/tests/tendermint/conftest.py b/tests/tendermint/conftest.py index e3f3ffd..65539f6 100644 --- a/tests/tendermint/conftest.py +++ b/tests/tendermint/conftest.py @@ -9,15 +9,14 @@ import codecs from tendermint.abci import types_pb2 as types from tendermint.crypto import keys_pb2 + @pytest.fixture def validator_pub_key(): - return 'B0E42D2589A455EAD339A035D6CE1C8C3E25863F268120AA0162AD7D003A4014' + return "B0E42D2589A455EAD339A035D6CE1C8C3E25863F268120AA0162AD7D003A4014" @pytest.fixture def init_chain_request(): - pk = codecs.decode(b'VAgFZtYw8bNR5TMZHFOBDWk9cAmEu3/c6JgRBmddbbI=', - 'base64') - val_a = types.ValidatorUpdate(power=10, - pub_key=keys_pb2.PublicKey(ed25519=pk)) + pk = codecs.decode(b"VAgFZtYw8bNR5TMZHFOBDWk9cAmEu3/c6JgRBmddbbI=", "base64") + val_a = types.ValidatorUpdate(power=10, pub_key=keys_pb2.PublicKey(ed25519=pk)) return types.RequestInitChain(validators=[val_a]) diff --git a/tests/tendermint/test_core.py b/tests/tendermint/test_core.py index eede330..38d0bb3 100644 --- a/tests/tendermint/test_core.py +++ b/tests/tendermint/test_core.py @@ -15,9 +15,7 @@ from tendermint.crypto import keys_pb2 from planetmint import App from planetmint.backend import query from planetmint.transactions.common.crypto import generate_key_pair -from planetmint.core import (OkCode, - CodeTypeError, - rollback) +from planetmint.core import OkCode, CodeTypeError, rollback from planetmint.transactions.types.elections.election import Election from planetmint.lib import Block from planetmint.transactions.types.elections.chain_migration_election import ChainMigrationElection @@ -33,12 +31,11 @@ pytestmark = pytest.mark.bdb def encode_tx_to_bytes(transaction): - return json.dumps(transaction.to_dict()).encode('utf8') + return json.dumps(transaction.to_dict()).encode("utf8") def generate_address(): - return ''.join(random.choices('1,2,3,4,5,6,7,8,9,A,B,C,D,E,F'.split(','), - k=40)).encode() + return "".join(random.choices("1,2,3,4,5,6,7,8,9,A,B,C,D,E,F".split(","), k=40)).encode() def generate_validator(): @@ -54,21 +51,21 @@ def generate_init_chain_request(chain_id, vals=None): def test_init_chain_successfully_registers_chain(b): - request = generate_init_chain_request('chain-XYZ') + request = generate_init_chain_request("chain-XYZ") res = App(b).init_chain(request) assert res == types.ResponseInitChain() chain = query.get_latest_abci_chain(b.connection) - assert chain == {'height': 0, 'chain_id': 'chain-XYZ', 'is_synced': True} + assert chain == {"height": 0, "chain_id": "chain-XYZ", "is_synced": True} assert query.get_latest_block(b.connection) == { - 'height': 0, - 'app_hash': '', - 'transactions': [], + "height": 0, + "app_hash": "", + "transactions": [], } def test_init_chain_ignores_invalid_init_chain_requests(b): validators = [generate_validator()] - request = generate_init_chain_request('chain-XYZ', validators) + request = generate_init_chain_request("chain-XYZ", validators) res = App(b).init_chain(request) assert res == types.ResponseInitChain() @@ -77,9 +74,9 @@ def test_init_chain_ignores_invalid_init_chain_requests(b): invalid_requests = [ request, # the same request again # different validator set - generate_init_chain_request('chain-XYZ'), + generate_init_chain_request("chain-XYZ"), # different chain ID - generate_init_chain_request('chain-ABC', validators), + generate_init_chain_request("chain-ABC", validators), ] for r in invalid_requests: with pytest.raises(SystemExit): @@ -87,83 +84,81 @@ def test_init_chain_ignores_invalid_init_chain_requests(b): # assert nothing changed - neither validator set, nor chain ID new_validator_set = query.get_validator_set(b.connection) assert new_validator_set == validator_set - new_chain_id = query.get_latest_abci_chain(b.connection)['chain_id'] - assert new_chain_id == 'chain-XYZ' + new_chain_id = query.get_latest_abci_chain(b.connection)["chain_id"] + assert new_chain_id == "chain-XYZ" assert query.get_latest_block(b.connection) == { - 'height': 0, - 'app_hash': '', - 'transactions': [], + "height": 0, + "app_hash": "", + "transactions": [], } def test_init_chain_recognizes_new_chain_after_migration(b): validators = [generate_validator()] - request = generate_init_chain_request('chain-XYZ', validators) + request = generate_init_chain_request("chain-XYZ", validators) res = App(b).init_chain(request) assert res == types.ResponseInitChain() - validator_set = query.get_validator_set(b.connection)['validators'] + validator_set = query.get_validator_set(b.connection)["validators"] # simulate a migration - query.store_block(b.connection, Block(app_hash='', height=1, - transactions=[])._asdict()) + query.store_block(b.connection, Block(app_hash="", height=1, transactions=[])._asdict()) b.migrate_abci_chain() # the same or other mismatching requests are ignored invalid_requests = [ request, - generate_init_chain_request('unknown', validators), - generate_init_chain_request('chain-XYZ'), - generate_init_chain_request('chain-XYZ-migrated-at-height-1'), + generate_init_chain_request("unknown", validators), + generate_init_chain_request("chain-XYZ"), + generate_init_chain_request("chain-XYZ-migrated-at-height-1"), ] for r in invalid_requests: with pytest.raises(SystemExit): App(b).init_chain(r) assert query.get_latest_abci_chain(b.connection) == { - 'chain_id': 'chain-XYZ-migrated-at-height-1', - 'is_synced': False, - 'height': 2, + "chain_id": "chain-XYZ-migrated-at-height-1", + "is_synced": False, + "height": 2, } - new_validator_set = query.get_validator_set(b.connection)['validators'] + new_validator_set = query.get_validator_set(b.connection)["validators"] assert new_validator_set == validator_set # a request with the matching chain ID and matching validator set # completes the migration - request = generate_init_chain_request('chain-XYZ-migrated-at-height-1', - validators) + request = generate_init_chain_request("chain-XYZ-migrated-at-height-1", validators) res = App(b).init_chain(request) assert res == types.ResponseInitChain() assert query.get_latest_abci_chain(b.connection) == { - 'chain_id': 'chain-XYZ-migrated-at-height-1', - 'is_synced': True, - 'height': 2, + "chain_id": "chain-XYZ-migrated-at-height-1", + "is_synced": True, + "height": 2, } assert query.get_latest_block(b.connection) == { - 'height': 2, - 'app_hash': '', - 'transactions': [], + "height": 2, + "app_hash": "", + "transactions": [], } # requests with old chain ID and other requests are ignored invalid_requests = [ request, - generate_init_chain_request('chain-XYZ', validators), - generate_init_chain_request('chain-XYZ-migrated-at-height-1'), + generate_init_chain_request("chain-XYZ", validators), + generate_init_chain_request("chain-XYZ-migrated-at-height-1"), ] for r in invalid_requests: with pytest.raises(SystemExit): App(b).init_chain(r) assert query.get_latest_abci_chain(b.connection) == { - 'chain_id': 'chain-XYZ-migrated-at-height-1', - 'is_synced': True, - 'height': 2, + "chain_id": "chain-XYZ-migrated-at-height-1", + "is_synced": True, + "height": 2, } - new_validator_set = query.get_validator_set(b.connection)['validators'] + new_validator_set = query.get_validator_set(b.connection)["validators"] assert new_validator_set == validator_set assert query.get_latest_block(b.connection) == { - 'height': 2, - 'app_hash': '', - 'transactions': [], + "height": 2, + "app_hash": "", + "transactions": [], } @@ -173,33 +168,33 @@ def test_info(b): res = app.info(r) assert res.last_block_height == 0 - assert res.last_block_app_hash == b'' + assert res.last_block_app_hash == b"" - b.store_block(Block(app_hash='1', height=1, transactions=[])._asdict()) + b.store_block(Block(app_hash="1", height=1, transactions=[])._asdict()) res = app.info(r) assert res.last_block_height == 1 - assert res.last_block_app_hash == b'1' + assert res.last_block_app_hash == b"1" # simulate a migration and assert the height is shifted - b.store_abci_chain(2, 'chain-XYZ') + b.store_abci_chain(2, "chain-XYZ") app = App(b) - b.store_block(Block(app_hash='2', height=2, transactions=[])._asdict()) + b.store_block(Block(app_hash="2", height=2, transactions=[])._asdict()) res = app.info(r) assert res.last_block_height == 0 - assert res.last_block_app_hash == b'2' + assert res.last_block_app_hash == b"2" - b.store_block(Block(app_hash='3', height=3, transactions=[])._asdict()) + b.store_block(Block(app_hash="3", height=3, transactions=[])._asdict()) res = app.info(r) assert res.last_block_height == 1 - assert res.last_block_app_hash == b'3' + assert res.last_block_app_hash == b"3" # it's always the latest migration that is taken into account - b.store_abci_chain(4, 'chain-XYZ-new') + b.store_abci_chain(4, "chain-XYZ-new") app = App(b) - b.store_block(Block(app_hash='4', height=4, transactions=[])._asdict()) + b.store_block(Block(app_hash="4", height=4, transactions=[])._asdict()) res = app.info(r) assert res.last_block_height == 0 - assert res.last_block_app_hash == b'4' + assert res.last_block_app_hash == b"4" def test_check_tx__signed_create_is_ok(b): @@ -209,9 +204,7 @@ def test_check_tx__signed_create_is_ok(b): alice = generate_key_pair() bob = generate_key_pair() - tx = Create.generate([alice.public_key], - [([bob.public_key], 1)])\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([bob.public_key], 1)]).sign([alice.private_key]) app = App(b) result = app.check_tx(encode_tx_to_bytes(tx)) @@ -225,8 +218,7 @@ def test_check_tx__unsigned_create_is_error(b): alice = generate_key_pair() bob = generate_key_pair() - tx = Create.generate([alice.public_key], - [([bob.public_key], 1)]) + tx = Create.generate([alice.public_key], [([bob.public_key], 1)]) app = App(b) result = app.check_tx(encode_tx_to_bytes(tx)) @@ -242,9 +234,7 @@ def test_deliver_tx__valid_create_updates_db_and_emits_event(b, init_chain_reque bob = generate_key_pair() events = mp.Queue() - tx = Create.generate([alice.public_key], - [([bob.public_key], 1)])\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([bob.public_key], 1)]).sign([alice.private_key]) app = App(b, events) @@ -260,7 +250,7 @@ def test_deliver_tx__valid_create_updates_db_and_emits_event(b, init_chain_reque app.commit() assert b.get_transaction(tx.id).id == tx.id block_event = events.get() - assert block_event.data['transactions'] == [tx] + assert block_event.data["transactions"] == [tx] # unspent_outputs = b.get_unspent_outputs() # unspent_output = next(unspent_outputs) @@ -277,9 +267,7 @@ def test_deliver_tx__double_spend_fails(b, init_chain_request): alice = generate_key_pair() bob = generate_key_pair() - tx = Create.generate([alice.public_key], - [([bob.public_key], 1)])\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([bob.public_key], 1)]).sign([alice.private_key]) app = App(b) app.init_chain(init_chain_request) @@ -311,30 +299,21 @@ def test_deliver_transfer_tx__double_spend_fails(b, init_chain_request): bob = generate_key_pair() carly = generate_key_pair() - asset = { - 'msg': 'live long and prosper' - } + asset = {"msg": "live long and prosper"} - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=asset)\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset).sign([alice.private_key]) result = app.deliver_tx(encode_tx_to_bytes(tx)) assert result.code == OkCode - tx_transfer = Transfer.generate(tx.to_inputs(), - [([bob.public_key], 1)], - asset_id=tx.id)\ - .sign([alice.private_key]) + tx_transfer = Transfer.generate(tx.to_inputs(), [([bob.public_key], 1)], asset_id=tx.id).sign([alice.private_key]) result = app.deliver_tx(encode_tx_to_bytes(tx_transfer)) assert result.code == OkCode - double_spend = Transfer.generate(tx.to_inputs(), - [([carly.public_key], 1)], - asset_id=tx.id)\ - .sign([alice.private_key]) + double_spend = Transfer.generate(tx.to_inputs(), [([carly.public_key], 1)], asset_id=tx.id).sign( + [alice.private_key] + ) result = app.deliver_tx(encode_tx_to_bytes(double_spend)) assert result.code == CodeTypeError @@ -349,29 +328,26 @@ def test_end_block_return_validator_updates(b, init_chain_request): # generate a block containing a concluded validator election validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) + b.store_validator_set(1, [v["storage"] for v in validators]) new_validator = generate_validators([1])[0] - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - new_validator['election'], - voter_keys) - b.store_block(Block(height=1, transactions=[election.id], - app_hash='')._asdict()) + election, votes = generate_election( + b, ValidatorElection, public_key, private_key, new_validator["election"], voter_keys + ) + b.store_block(Block(height=1, transactions=[election.id], app_hash="")._asdict()) b.store_bulk_transactions([election]) Election.process_block(b, 1, [election]) app.block_transactions = votes resp = app.end_block(types.RequestEndBlock(height=2)) - assert resp.validator_updates[0].power == new_validator['election']['power'] - expected = bytes.fromhex(new_validator['election']['public_key']['value']) + assert resp.validator_updates[0].power == new_validator["election"]["power"] + expected = bytes.fromhex(new_validator["election"]["public_key"]["value"]) assert expected == resp.validator_updates[0].pub_key.ed25519 @@ -379,10 +355,9 @@ def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request): from planetmint import App from planetmint.backend import query - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset={'msg': 'live long and prosper'})\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset={"msg": "live long and prosper"}).sign( + [alice.private_key] + ) app = App(b) app.init_chain(init_chain_request) @@ -393,60 +368,54 @@ def test_store_pre_commit_state_in_end_block(b, alice, init_chain_request): app.end_block(types.RequestEndBlock(height=99)) resp = query.get_pre_commit_state(b.connection) - assert resp['height'] == 99 - assert resp['transactions'] == [tx.id] + assert resp["height"] == 99 + assert resp["transactions"] == [tx.id] app.begin_block(begin_block) app.deliver_tx(encode_tx_to_bytes(tx)) app.end_block(types.RequestEndBlock(height=100)) resp = query.get_pre_commit_state(b.connection) - assert resp['height'] == 100 - assert resp['transactions'] == [tx.id] + assert resp["height"] == 100 + assert resp["transactions"] == [tx.id] # simulate a chain migration and assert the height is shifted - b.store_abci_chain(100, 'new-chain') + b.store_abci_chain(100, "new-chain") app = App(b) app.begin_block(begin_block) app.deliver_tx(encode_tx_to_bytes(tx)) app.end_block(types.RequestEndBlock(height=1)) resp = query.get_pre_commit_state(b.connection) - assert resp['height'] == 101 - assert resp['transactions'] == [tx.id] + assert resp["height"] == 101 + assert resp["transactions"] == [tx.id] def test_rollback_pre_commit_state_after_crash(b): validators = generate_validators([1] * 4) - b.store_validator_set(1, [v['storage'] for v in validators]) - b.store_block(Block(height=1, transactions=[], app_hash='')._asdict()) + b.store_validator_set(1, [v["storage"] for v in validators]) + b.store_block(Block(height=1, transactions=[], app_hash="")._asdict()) - public_key = validators[0]['public_key'] - private_key = validators[0]['private_key'] - voter_keys = [v['private_key'] for v in validators] + public_key = validators[0]["public_key"] + private_key = validators[0]["private_key"] + voter_keys = [v["private_key"] for v in validators] - migration_election, votes = generate_election(b, - ChainMigrationElection, - public_key, private_key, - {}, - voter_keys) + migration_election, votes = generate_election(b, ChainMigrationElection, public_key, private_key, {}, voter_keys) total_votes = votes txs = [migration_election, *votes] new_validator = generate_validators([1])[0] - validator_election, votes = generate_election(b, - ValidatorElection, - public_key, private_key, - new_validator['election'], - voter_keys) + validator_election, votes = generate_election( + b, ValidatorElection, public_key, private_key, new_validator["election"], voter_keys + ) total_votes += votes txs += [validator_election, *votes] b.store_bulk_transactions(txs) - b.store_abci_chain(2, 'new_chain') - b.store_validator_set(2, [v['storage'] for v in validators]) + b.store_abci_chain(2, "new_chain") + b.store_validator_set(2, [v["storage"] for v in validators]) # TODO change to `4` when upgrading to Tendermint 0.22.4. - b.store_validator_set(3, [new_validator['storage']]) + b.store_validator_set(3, [new_validator["storage"]]) b.store_election(migration_election.id, 2, is_concluded=False) b.store_election(validator_election.id, 2, is_concluded=True) @@ -456,33 +425,42 @@ def test_rollback_pre_commit_state_after_crash(b): for tx in txs: assert b.get_transaction(tx.id) assert b.get_latest_abci_chain() - assert len(b.get_validator_change()['validators']) == 1 + assert len(b.get_validator_change()["validators"]) == 1 assert b.get_election(migration_election.id) assert b.get_election(validator_election.id) - b.store_pre_commit_state({'height': 2, 'transactions': [tx.id for tx in txs]}) + b.store_pre_commit_state({"height": 2, "transactions": [tx.id for tx in txs]}) rollback(b) for tx in txs: assert not b.get_transaction(tx.id) assert not b.get_latest_abci_chain() - assert len(b.get_validator_change()['validators']) == 4 - assert len(b.get_validator_change(2)['validators']) == 4 + assert len(b.get_validator_change()["validators"]) == 4 + assert len(b.get_validator_change(2)["validators"]) == 4 assert not b.get_election(migration_election.id) assert not b.get_election(validator_election.id) def test_new_validator_set(b): - node1 = {'public_key': {'type': 'ed25519-base64', - 'value': 'FxjS2/8AFYoIUqF6AcePTc87qOT7e4WGgH+sGCpTUDQ='}, - 'voting_power': 10} - node1_new_power = {'public_key': {'value': '1718D2DBFF00158A0852A17A01C78F4DCF3BA8E4FB7B8586807FAC182A535034', - 'type': 'ed25519-base16'}, - 'power': 20} - node2 = {'public_key': {'value': '1888A353B181715CA2554701D06C1665BC42C5D936C55EA9C5DBCBDB8B3F02A3', - 'type': 'ed25519-base16'}, - 'power': 10} + node1 = { + "public_key": {"type": "ed25519-base64", "value": "FxjS2/8AFYoIUqF6AcePTc87qOT7e4WGgH+sGCpTUDQ="}, + "voting_power": 10, + } + node1_new_power = { + "public_key": { + "value": "1718D2DBFF00158A0852A17A01C78F4DCF3BA8E4FB7B8586807FAC182A535034", + "type": "ed25519-base16", + }, + "power": 20, + } + node2 = { + "public_key": { + "value": "1888A353B181715CA2554701D06C1665BC42C5D936C55EA9C5DBCBDB8B3F02A3", + "type": "ed25519-base16", + }, + "power": 10, + } validators = [node1] updates = [node1_new_power, node2] @@ -491,50 +469,53 @@ def test_new_validator_set(b): updated_validators = [] for u in updates: - updated_validators.append({'public_key': {'type': 'ed25519-base64', - 'value': public_key_to_base64(u['public_key']['value'])}, - 'voting_power': u['power']}) + updated_validators.append( + { + "public_key": {"type": "ed25519-base64", "value": public_key_to_base64(u["public_key"]["value"])}, + "voting_power": u["power"], + } + ) assert updated_validator_set == updated_validators def test_info_aborts_if_chain_is_not_synced(b): - b.store_abci_chain(0, 'chain-XYZ', False) + b.store_abci_chain(0, "chain-XYZ", False) with pytest.raises(SystemExit): App(b).info(types.RequestInfo()) def test_check_tx_aborts_if_chain_is_not_synced(b): - b.store_abci_chain(0, 'chain-XYZ', False) + b.store_abci_chain(0, "chain-XYZ", False) with pytest.raises(SystemExit): - App(b).check_tx('some bytes') + App(b).check_tx("some bytes") def test_begin_aborts_if_chain_is_not_synced(b): - b.store_abci_chain(0, 'chain-XYZ', False) + b.store_abci_chain(0, "chain-XYZ", False) with pytest.raises(SystemExit): App(b).info(types.RequestBeginBlock()) def test_deliver_tx_aborts_if_chain_is_not_synced(b): - b.store_abci_chain(0, 'chain-XYZ', False) + b.store_abci_chain(0, "chain-XYZ", False) with pytest.raises(SystemExit): - App(b).deliver_tx('some bytes') + App(b).deliver_tx("some bytes") def test_end_block_aborts_if_chain_is_not_synced(b): - b.store_abci_chain(0, 'chain-XYZ', False) + b.store_abci_chain(0, "chain-XYZ", False) with pytest.raises(SystemExit): App(b).info(types.RequestEndBlock()) def test_commit_aborts_if_chain_is_not_synced(b): - b.store_abci_chain(0, 'chain-XYZ', False) + b.store_abci_chain(0, "chain-XYZ", False) with pytest.raises(SystemExit): App(b).commit() diff --git a/tests/tendermint/test_fastquery.py b/tests/tendermint/test_fastquery.py index 77579bc..d37d0a5 100644 --- a/tests/tendermint/test_fastquery.py +++ b/tests/tendermint/test_fastquery.py @@ -14,19 +14,17 @@ pytestmark = pytest.mark.bdb @pytest.fixture def txns(b, user_pk, user_sk, user2_pk, user2_sk): - txs = [Create.generate([user_pk], [([user2_pk], 1)]).sign([user_sk]), - Create.generate([user2_pk], [([user_pk], 1)]).sign([user2_sk]), - Create.generate([user_pk], [([user_pk], 1), ([user2_pk], 1)]) - .sign([user_sk])] + txs = [ + Create.generate([user_pk], [([user2_pk], 1)]).sign([user_sk]), + Create.generate([user2_pk], [([user_pk], 1)]).sign([user2_sk]), + Create.generate([user_pk], [([user_pk], 1), ([user2_pk], 1)]).sign([user_sk]), + ] b.store_bulk_transactions(txs) return txs def test_get_outputs_by_public_key(b, user_pk, user2_pk, txns): - expected = [ - TransactionLink(txns[1].id, 0), - TransactionLink(txns[2].id, 0) - ] + expected = [TransactionLink(txns[1].id, 0), TransactionLink(txns[2].id, 0)] actual = b.fastquery.get_outputs_by_public_key(user_pk) _all_txs = set([tx.txid for tx in expected + actual]) @@ -37,8 +35,8 @@ def test_get_outputs_by_public_key(b, user_pk, user2_pk, txns): # ] actual_1 = b.fastquery.get_outputs_by_public_key(user2_pk) expected_1 = [ - TransactionLink(txns[0].id, 0), - TransactionLink(txns[2].id, 1), + TransactionLink(txns[0].id, 0), + TransactionLink(txns[2].id, 1), ] _all_tx_1 = set([tx.txid for tx in actual_1 + expected_1]) assert len(_all_tx_1) == 2 @@ -96,9 +94,7 @@ def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk): from planetmint.backend.connection import connect from planetmint.backend import query - tx1 = Create.generate([user_pk], - [([user_pk], 3), ([user_pk], 2), ([user_pk], 1)])\ - .sign([user_sk]) + tx1 = Create.generate([user_pk], [([user_pk], 3), ([user_pk], 2), ([user_pk], 1)]).sign([user_sk]) b.store_bulk_transactions([tx1]) inputs = tx1.to_inputs() @@ -106,9 +102,11 @@ def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk): assert tx2.validate(b) tx2_dict = tx2.to_dict() - fulfills = tx2_dict['inputs'][0]['fulfills'] - tx2_dict['inputs'][0]['fulfills'] = {'transaction_id': fulfills['transaction_id'], - 'output_index': fulfills['output_index']} + fulfills = tx2_dict["inputs"][0]["fulfills"] + tx2_dict["inputs"][0]["fulfills"] = { + "transaction_id": fulfills["transaction_id"], + "output_index": fulfills["output_index"], + } backend.query.store_transactions(b.connection, [tx2_dict]) outputs = b.get_outputs_filtered(user_pk, spent=False) @@ -123,8 +121,10 @@ def test_outputs_query_key_order(b, user_pk, user_sk, user2_pk, user2_sk): b.store_bulk_transactions([tx1]) tx2_dict = tx2.to_dict() - tx2_dict['inputs'][0]['fulfills'] = {'output_index': fulfills['output_index'], - 'transaction_id': fulfills['transaction_id']} + tx2_dict["inputs"][0]["fulfills"] = { + "output_index": fulfills["output_index"], + "transaction_id": fulfills["transaction_id"], + } backend.query.store_transactions(b.connection, [tx2_dict]) outputs = b.get_outputs_filtered(user_pk, spent=False) diff --git a/tests/tendermint/test_integration.py b/tests/tendermint/test_integration.py index 69c818d..cc19d5f 100644 --- a/tests/tendermint/test_integration.py +++ b/tests/tendermint/test_integration.py @@ -29,44 +29,41 @@ def test_app(b, eventqueue_fixture, init_chain_request): app = App(b, eventqueue_fixture) p = ProtocolHandler(app) - data = p.process('info', - types.Request(info=types.RequestInfo(version=__tm_supported_versions__[0]))) + data = p.process("info", types.Request(info=types.RequestInfo(version=__tm_supported_versions__[0]))) res = next(read_messages(BytesIO(data), types.Response)) assert res - assert res.info.last_block_app_hash == b'' + assert res.info.last_block_app_hash == b"" assert res.info.last_block_height == 0 assert not b.get_latest_block() - p.process('init_chain', types.Request(init_chain=init_chain_request)) + p.process("init_chain", types.Request(init_chain=init_chain_request)) block0 = b.get_latest_block() assert block0 - assert block0['height'] == 0 - assert block0['app_hash'] == '' + assert block0["height"] == 0 + assert block0["app_hash"] == "" - pk = codecs.encode(init_chain_request.validators[0].pub_key.ed25519, 'base64').decode().strip('\n') + pk = codecs.encode(init_chain_request.validators[0].pub_key.ed25519, "base64").decode().strip("\n") [validator] = b.get_validators(height=1) - assert validator['public_key']['value'] == pk - assert validator['voting_power'] == 10 + assert validator["public_key"]["value"] == pk + assert validator["voting_power"] == 10 alice = generate_key_pair() bob = generate_key_pair() - tx = Create.generate([alice.public_key], - [([bob.public_key], 1)])\ - .sign([alice.private_key]) - etxn = json.dumps(tx.to_dict()).encode('utf8') + tx = Create.generate([alice.public_key], [([bob.public_key], 1)]).sign([alice.private_key]) + etxn = json.dumps(tx.to_dict()).encode("utf8") r = types.Request(check_tx=types.RequestCheckTx(tx=etxn)) - data = p.process('check_tx', r) + data = p.process("check_tx", r) res = next(read_messages(BytesIO(data), types.Response)) assert res assert res.check_tx.code == 0 r = types.Request() - r.begin_block.hash = b'' - p.process('begin_block', r) + r.begin_block.hash = b"" + p.process("begin_block", r) r = types.Request(deliver_tx=types.RequestDeliverTx(tx=etxn)) - data = p.process('deliver_tx', r) + data = p.process("deliver_tx", r) res = next(read_messages(BytesIO(data), types.Response)) assert res assert res.deliver_tx.code == 0 @@ -74,42 +71,42 @@ def test_app(b, eventqueue_fixture, init_chain_request): new_block_txn_hash = calculate_hash([tx.id]) r = types.Request(end_block=types.RequestEndBlock(height=1)) - data = p.process('end_block', r) + data = p.process("end_block", r) res = next(read_messages(BytesIO(data), types.Response)) assert res - assert 'end_block' == res.WhichOneof('value') + assert "end_block" == res.WhichOneof("value") - new_block_hash = calculate_hash([block0['app_hash'], new_block_txn_hash]) + new_block_hash = calculate_hash([block0["app_hash"], new_block_txn_hash]) - data = p.process('commit', None) + data = p.process("commit", None) res = next(read_messages(BytesIO(data), types.Response)) - assert res.commit.data == new_block_hash.encode('utf-8') + assert res.commit.data == new_block_hash.encode("utf-8") assert b.get_transaction(tx.id).id == tx.id block0 = b.get_latest_block() assert block0 - assert block0['height'] == 1 - assert block0['app_hash'] == new_block_hash + assert block0["height"] == 1 + assert block0["app_hash"] == new_block_hash # empty block should not update height r = types.Request() - r.begin_block.hash = new_block_hash.encode('utf-8') - p.process('begin_block', r) + r.begin_block.hash = new_block_hash.encode("utf-8") + p.process("begin_block", r) r = types.Request() r.end_block.height = 2 - p.process('end_block', r) + p.process("end_block", r) - data = p.process('commit', None) + data = p.process("commit", None) res = next(read_messages(BytesIO(data), types.Response)) - assert res.commit.data == new_block_hash.encode('utf-8') + assert res.commit.data == new_block_hash.encode("utf-8") block0 = b.get_latest_block() assert block0 - assert block0['height'] == 2 + assert block0["height"] == 2 # when empty block is generated hash of previous block should be returned - assert block0['app_hash'] == new_block_hash + assert block0["app_hash"] == new_block_hash @pytest.mark.abci @@ -118,18 +115,12 @@ def test_post_transaction_responses(tendermint_ws_url, b): alice = generate_key_pair() bob = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=None)\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]) code, message = b.write_transaction(tx, BROADCAST_TX_COMMIT) assert code == 202 - tx_transfer = Transfer.generate(tx.to_inputs(), - [([bob.public_key], 1)], - asset_id=tx.id)\ - .sign([alice.private_key]) + tx_transfer = Transfer.generate(tx.to_inputs(), [([bob.public_key], 1)], asset_id=tx.id).sign([alice.private_key]) code, message = b.write_transaction(tx_transfer, BROADCAST_TX_COMMIT) assert code == 202 @@ -143,4 +134,4 @@ def test_post_transaction_responses(tendermint_ws_url, b): for mode in (BROADCAST_TX_SYNC, BROADCAST_TX_COMMIT): code, message = b.write_transaction(double_spend, mode) assert code == 500 - assert message == 'Transaction validation failed' + assert message == "Transaction validation failed" diff --git a/tests/tendermint/test_lib.py b/tests/tendermint/test_lib.py index d07a21c..0932c02 100644 --- a/tests/tendermint/test_lib.py +++ b/tests/tendermint/test_lib.py @@ -20,7 +20,10 @@ from pymongo import MongoClient from planetmint import backend from planetmint.transactions.common.transaction_mode_types import ( - BROADCAST_TX_COMMIT, BROADCAST_TX_ASYNC, BROADCAST_TX_SYNC) + BROADCAST_TX_COMMIT, + BROADCAST_TX_ASYNC, + BROADCAST_TX_SYNC, +) from planetmint.lib import Block @@ -36,20 +39,21 @@ def test_asset_is_separated_from_transaciton(b): alice = generate_key_pair() bob = generate_key_pair() - asset = {'Never gonna': ['give you up', - 'let you down', - 'run around' - 'desert you', - 'make you cry', - 'say goodbye', - 'tell a lie', - 'hurt you']} + asset = { + "Never gonna": [ + "give you up", + "let you down", + "run around" "desert you", + "make you cry", + "say goodbye", + "tell a lie", + "hurt you", + ] + } - tx = Create.generate([alice.public_key], - [([bob.public_key], 1)], - metadata=None, - asset=asset) \ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([bob.public_key], 1)], metadata=None, asset=asset).sign( + [alice.private_key] + ) # with store_bulk_transactions we use `insert_many` where PyMongo # automatically adds an `_id` field to the tx, therefore we need the @@ -58,8 +62,8 @@ def test_asset_is_separated_from_transaciton(b): tx_dict = copy.deepcopy(tx.to_dict()) b.store_bulk_transactions([tx]) - assert 'asset' not in backend.query.get_transaction(b.connection, tx.id) - assert backend.query.get_asset(b.connection, tx.id)['data'] == asset + assert "asset" not in backend.query.get_transaction(b.connection, tx.id) + assert backend.query.get_asset(b.connection, tx.id)["data"] == asset assert b.get_transaction(tx.id).to_dict() == tx_dict @@ -70,92 +74,77 @@ def test_get_latest_block(b): for i in range(10): app_hash = os.urandom(16).hex() txn_id = os.urandom(16).hex() - block = Block(app_hash=app_hash, height=i, - transactions=[txn_id])._asdict() + block = Block(app_hash=app_hash, height=i, transactions=[txn_id])._asdict() b.store_block(block) block = b.get_latest_block() - assert block['height'] == 9 + assert block["height"] == 9 @pytest.mark.bdb -@patch('planetmint.backend.query.get_block', return_value=None) -@patch('planetmint.Planetmint.get_latest_block', return_value={'height': 10}) +@patch("planetmint.backend.query.get_block", return_value=None) +@patch("planetmint.Planetmint.get_latest_block", return_value={"height": 10}) def test_get_empty_block(_0, _1, b): - assert b.get_block(5) == {'height': 5, 'transactions': []} + assert b.get_block(5) == {"height": 5, "transactions": []} def test_validation_error(b): from planetmint.transactions.common.crypto import generate_key_pair alice = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=None) \ - .sign([alice.private_key]).to_dict() + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]).to_dict() - tx['metadata'] = '' + tx["metadata"] = "" assert not b.validate_transaction(tx) -@patch('requests.post') +@patch("requests.post") def test_write_and_post_transaction(mock_post, b): from planetmint.transactions.common.crypto import generate_key_pair from planetmint.tendermint_utils import encode_transaction alice = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=None) \ - .sign([alice.private_key]).to_dict() + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]).to_dict() tx = b.validate_transaction(tx) b.write_transaction(tx, BROADCAST_TX_ASYNC) assert mock_post.called args, kwargs = mock_post.call_args - assert BROADCAST_TX_ASYNC == kwargs['json']['method'] + assert BROADCAST_TX_ASYNC == kwargs["json"]["method"] encoded_tx = [encode_transaction(tx.to_dict())] - assert encoded_tx == kwargs['json']['params'] + assert encoded_tx == kwargs["json"]["params"] -@patch('requests.post') -@pytest.mark.parametrize('mode', [ - BROADCAST_TX_SYNC, - BROADCAST_TX_ASYNC, - BROADCAST_TX_COMMIT -]) +@patch("requests.post") +@pytest.mark.parametrize("mode", [BROADCAST_TX_SYNC, BROADCAST_TX_ASYNC, BROADCAST_TX_COMMIT]) def test_post_transaction_valid_modes(mock_post, b, mode): from planetmint.transactions.common.crypto import generate_key_pair + alice = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=None) \ - .sign([alice.private_key]).to_dict() + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]).to_dict() tx = b.validate_transaction(tx) b.write_transaction(tx, mode) args, kwargs = mock_post.call_args - assert mode == kwargs['json']['method'] + assert mode == kwargs["json"]["method"] def test_post_transaction_invalid_mode(b): from planetmint.transactions.common.crypto import generate_key_pair from planetmint.transactions.common.exceptions import ValidationError + alice = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=None) \ - .sign([alice.private_key]).to_dict() + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]).to_dict() tx = b.validate_transaction(tx) with pytest.raises(ValidationError): - b.write_transaction(tx, 'nope') + b.write_transaction(tx, "nope") @pytest.mark.bdb def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn): b.update_utxoset(signed_create_tx) - utxoset = db_conn.get_space('utxos') + utxoset = db_conn.get_space("utxos") assert utxoset.select().rowcount == 1 utxo = utxoset.select().data assert utxo[0][0] == signed_create_tx.id @@ -168,40 +157,36 @@ def test_update_utxoset(b, signed_create_tx, signed_transfer_tx, db_conn): @pytest.mark.bdb -def test_store_transaction(mocker, b, signed_create_tx, - signed_transfer_tx, db_context): +def test_store_transaction(mocker, b, signed_create_tx, signed_transfer_tx, db_context): from planetmint.backend.tarantool.connection import TarantoolDBConnection - mocked_store_asset = mocker.patch('planetmint.backend.query.store_assets') - mocked_store_metadata = mocker.patch( - 'planetmint.backend.query.store_metadatas') - mocked_store_transaction = mocker.patch( - 'planetmint.backend.query.store_transactions') + + mocked_store_asset = mocker.patch("planetmint.backend.query.store_assets") + mocked_store_metadata = mocker.patch("planetmint.backend.query.store_metadatas") + mocked_store_transaction = mocker.patch("planetmint.backend.query.store_transactions") b.store_bulk_transactions([signed_create_tx]) if not isinstance(b.connection, TarantoolDBConnection): mongo_client = MongoClient(host=db_context.host, port=db_context.port) - utxoset = mongo_client[db_context.name]['utxos'] + utxoset = mongo_client[db_context.name]["utxos"] assert utxoset.count_documents({}) == 1 utxo = utxoset.find_one() - assert utxo['transaction_id'] == signed_create_tx.id - assert utxo['output_index'] == 0 + assert utxo["transaction_id"] == signed_create_tx.id + assert utxo["output_index"] == 0 mocked_store_asset.assert_called_once_with( b.connection, - [{'data': signed_create_tx.asset['data'], 'tx_id': signed_create_tx.id, 'asset_id': signed_create_tx.id}] + [{"data": signed_create_tx.asset["data"], "tx_id": signed_create_tx.id, "asset_id": signed_create_tx.id}], ) else: mocked_store_asset.assert_called_once_with( - b.connection, - [(signed_create_tx.asset, signed_create_tx.id, signed_create_tx.id)] + b.connection, [(signed_create_tx.asset, signed_create_tx.id, signed_create_tx.id)] ) mocked_store_metadata.assert_called_once_with( b.connection, - [{'id': signed_create_tx.id, 'metadata': signed_create_tx.metadata}], + [{"id": signed_create_tx.id, "metadata": signed_create_tx.metadata}], ) mocked_store_transaction.assert_called_once_with( b.connection, - [{k: v for k, v in signed_create_tx.to_dict().items() - if k not in ('asset', 'metadata')}], + [{k: v for k, v in signed_create_tx.to_dict().items() if k not in ("asset", "metadata")}], ) mocked_store_asset.reset_mock() mocked_store_metadata.reset_mock() @@ -210,39 +195,35 @@ def test_store_transaction(mocker, b, signed_create_tx, if not isinstance(b.connection, TarantoolDBConnection): assert utxoset.count_documents({}) == 1 utxo = utxoset.find_one() - assert utxo['transaction_id'] == signed_transfer_tx.id - assert utxo['output_index'] == 0 + assert utxo["transaction_id"] == signed_transfer_tx.id + assert utxo["output_index"] == 0 assert not mocked_store_asset.called mocked_store_metadata.asser_called_once_with( b.connection, - [{'id': signed_transfer_tx.id, 'metadata': signed_transfer_tx.metadata}], + [{"id": signed_transfer_tx.id, "metadata": signed_transfer_tx.metadata}], ) if not isinstance(b.connection, TarantoolDBConnection): mocked_store_transaction.assert_called_once_with( b.connection, - [{k: v for k, v in signed_transfer_tx.to_dict().items() - if k != 'metadata'}], + [{k: v for k, v in signed_transfer_tx.to_dict().items() if k != "metadata"}], ) @pytest.mark.bdb -def test_store_bulk_transaction(mocker, b, signed_create_tx, - signed_transfer_tx, db_context): +def test_store_bulk_transaction(mocker, b, signed_create_tx, signed_transfer_tx, db_context): from planetmint.backend.tarantool.connection import TarantoolDBConnection - mocked_store_assets = mocker.patch( - 'planetmint.backend.query.store_assets') - mocked_store_metadata = mocker.patch( - 'planetmint.backend.query.store_metadatas') - mocked_store_transactions = mocker.patch( - 'planetmint.backend.query.store_transactions') + + mocked_store_assets = mocker.patch("planetmint.backend.query.store_assets") + mocked_store_metadata = mocker.patch("planetmint.backend.query.store_metadatas") + mocked_store_transactions = mocker.patch("planetmint.backend.query.store_transactions") b.store_bulk_transactions((signed_create_tx,)) if not isinstance(b.connection, TarantoolDBConnection): mongo_client = MongoClient(host=db_context.host, port=db_context.port) - utxoset = mongo_client[db_context.name]['utxos'] + utxoset = mongo_client[db_context.name]["utxos"] assert utxoset.count_documents({}) == 1 utxo = utxoset.find_one() - assert utxo['transaction_id'] == signed_create_tx.id - assert utxo['output_index'] == 0 + assert utxo["transaction_id"] == signed_create_tx.id + assert utxo["output_index"] == 0 if isinstance(b.connection, TarantoolDBConnection): mocked_store_assets.assert_called_once_with( b.connection, # signed_create_tx.asset['data'] this was before @@ -255,12 +236,11 @@ def test_store_bulk_transaction(mocker, b, signed_create_tx, ) mocked_store_metadata.assert_called_once_with( b.connection, - [{'id': signed_create_tx.id, 'metadata': signed_create_tx.metadata}], + [{"id": signed_create_tx.id, "metadata": signed_create_tx.metadata}], ) mocked_store_transactions.assert_called_once_with( b.connection, - [{k: v for k, v in signed_create_tx.to_dict().items() - if k not in ('asset', 'metadata')}], + [{k: v for k, v in signed_create_tx.to_dict().items() if k not in ("asset", "metadata")}], ) mocked_store_assets.reset_mock() mocked_store_metadata.reset_mock() @@ -269,19 +249,17 @@ def test_store_bulk_transaction(mocker, b, signed_create_tx, if not isinstance(b.connection, TarantoolDBConnection): assert utxoset.count_documents({}) == 1 utxo = utxoset.find_one() - assert utxo['transaction_id'] == signed_transfer_tx.id - assert utxo['output_index'] == 0 + assert utxo["transaction_id"] == signed_transfer_tx.id + assert utxo["output_index"] == 0 assert not mocked_store_assets.called mocked_store_metadata.asser_called_once_with( b.connection, - [{'id': signed_transfer_tx.id, - 'metadata': signed_transfer_tx.metadata}], + [{"id": signed_transfer_tx.id, "metadata": signed_transfer_tx.metadata}], ) if not isinstance(b.connection, TarantoolDBConnection): mocked_store_transactions.assert_called_once_with( b.connection, - [{k: v for k, v in signed_transfer_tx.to_dict().items() - if k != 'metadata'}], + [{k: v for k, v in signed_transfer_tx.to_dict().items() if k != "metadata"}], ) @@ -305,46 +283,56 @@ def test_delete_zero_unspent_outputs(b, utxoset): @pytest.mark.bdb def test_delete_one_unspent_outputs(b, utxoset): from planetmint.backend.tarantool.connection import TarantoolDBConnection + unspent_outputs, utxo_collection = utxoset delete_res = b.delete_unspent_outputs(unspent_outputs[0]) if not isinstance(b.connection, TarantoolDBConnection): assert len(list(delete_res)) == 1 - assert utxo_collection.count_documents( - {'$or': [ - {'transaction_id': 'a', 'output_index': 1}, - {'transaction_id': 'b', 'output_index': 0}, - ]} - ) == 2 - assert utxo_collection.count_documents( - {'transaction_id': 'a', 'output_index': 0}) == 0 + assert ( + utxo_collection.count_documents( + { + "$or": [ + {"transaction_id": "a", "output_index": 1}, + {"transaction_id": "b", "output_index": 0}, + ] + } + ) + == 2 + ) + assert utxo_collection.count_documents({"transaction_id": "a", "output_index": 0}) == 0 else: utx_space = b.connection.get_space("utxos") - res1 = utx_space.select(['a', 1], index="id_search").data - res2 = utx_space.select(['b', 0], index="id_search").data + res1 = utx_space.select(["a", 1], index="id_search").data + res2 = utx_space.select(["b", 0], index="id_search").data assert len(res1) + len(res2) == 2 - res3 = utx_space.select(['a', 0], index="id_search").data + res3 = utx_space.select(["a", 0], index="id_search").data assert len(res3) == 0 @pytest.mark.bdb def test_delete_many_unspent_outputs(b, utxoset): from planetmint.backend.tarantool.connection import TarantoolDBConnection + unspent_outputs, utxo_collection = utxoset delete_res = b.delete_unspent_outputs(*unspent_outputs[::2]) if not isinstance(b.connection, TarantoolDBConnection): assert len(list(delete_res)) == 2 - assert utxo_collection.count_documents( - {'$or': [ - {'transaction_id': 'a', 'output_index': 0}, - {'transaction_id': 'b', 'output_index': 0}, - ]} - ) == 0 - assert utxo_collection.count_documents( - {'transaction_id': 'a', 'output_index': 1}) == 1 + assert ( + utxo_collection.count_documents( + { + "$or": [ + {"transaction_id": "a", "output_index": 0}, + {"transaction_id": "b", "output_index": 0}, + ] + } + ) + == 0 + ) + assert utxo_collection.count_documents({"transaction_id": "a", "output_index": 1}) == 1 else: # TODO It looks ugly because query.get_unspent_outputs function, has not yet implemented query parameter. utx_space = b.connection.get_space("utxos") - res1 = utx_space.select(['a', 0], index="id_search").data - res2 = utx_space.select(['b', 0], index="id_search").data + res1 = utx_space.select(["a", 0], index="id_search").data + res2 = utx_space.select(["b", 0], index="id_search").data assert len(res1) + len(res2) == 0 res3 = utx_space.select([], index="id_search").data assert len(res3) == 1 @@ -362,31 +350,37 @@ def test_store_zero_unspent_output(b, utxo_collection): @pytest.mark.bdb def test_store_one_unspent_output(b, unspent_output_1, utxo_collection): from planetmint.backend.tarantool.connection import TarantoolDBConnection + res = b.store_unspent_outputs(unspent_output_1) if not isinstance(b.connection, TarantoolDBConnection): assert res.acknowledged assert len(list(res)) == 1 - assert utxo_collection.count_documents( - {'transaction_id': unspent_output_1['transaction_id'], - 'output_index': unspent_output_1['output_index']} - ) == 1 + assert ( + utxo_collection.count_documents( + { + "transaction_id": unspent_output_1["transaction_id"], + "output_index": unspent_output_1["output_index"], + } + ) + == 1 + ) else: utx_space = b.connection.get_space("utxos") - res = utx_space.select([unspent_output_1["transaction_id"], unspent_output_1["output_index"]], - index="id_search") + res = utx_space.select( + [unspent_output_1["transaction_id"], unspent_output_1["output_index"]], index="id_search" + ) assert len(res.data) == 1 @pytest.mark.bdb def test_store_many_unspent_outputs(b, unspent_outputs, utxo_collection): from planetmint.backend.tarantool.connection import TarantoolDBConnection + res = b.store_unspent_outputs(*unspent_outputs) if not isinstance(b.connection, TarantoolDBConnection): assert res.acknowledged assert len(list(res)) == 3 - assert utxo_collection.count_documents( - {'transaction_id': unspent_outputs[0]['transaction_id']} - ) == 3 + assert utxo_collection.count_documents({"transaction_id": unspent_outputs[0]["transaction_id"]}) == 3 else: utxo_space = b.connection.get_space("utxos") # .select([], index="transaction_search").data res = utxo_space.select([unspent_outputs[0]["transaction_id"]], index="transaction_search") @@ -394,14 +388,13 @@ def test_store_many_unspent_outputs(b, unspent_outputs, utxo_collection): def test_get_utxoset_merkle_root_when_no_utxo(b): - assert b.get_utxoset_merkle_root() == sha3_256(b'').hexdigest() + assert b.get_utxoset_merkle_root() == sha3_256(b"").hexdigest() @pytest.mark.bdb -@pytest.mark.usefixture('utxoset') +@pytest.mark.usefixture("utxoset") def test_get_utxoset_merkle_root(b, utxoset): - expected_merkle_root = ( - '86d311c03115bf4d287f8449ca5828505432d69b82762d47077b1c00fe426eac') + expected_merkle_root = "86d311c03115bf4d287f8449ca5828505432d69b82762d47077b1c00fe426eac" merkle_root = b.get_utxoset_merkle_root() assert merkle_root == expected_merkle_root @@ -411,27 +404,19 @@ def test_get_spent_transaction_critical_double_spend(b, alice, bob, carol): from planetmint.exceptions import CriticalDoubleSpend from planetmint.transactions.common.exceptions import DoubleSpend - asset = {'test': 'asset'} + asset = {"test": "asset"} - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=asset) \ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset).sign([alice.private_key]) - tx_transfer = Transfer.generate(tx.to_inputs(), - [([bob.public_key], 1)], - asset_id=tx.id) \ - .sign([alice.private_key]) + tx_transfer = Transfer.generate(tx.to_inputs(), [([bob.public_key], 1)], asset_id=tx.id).sign([alice.private_key]) - double_spend = Transfer.generate(tx.to_inputs(), - [([carol.public_key], 1)], - asset_id=tx.id) \ - .sign([alice.private_key]) + double_spend = Transfer.generate(tx.to_inputs(), [([carol.public_key], 1)], asset_id=tx.id).sign( + [alice.private_key] + ) - same_input_double_spend = Transfer.generate(tx.to_inputs() + tx.to_inputs(), - [([bob.public_key], 1)], - asset_id=tx.id) \ - .sign([alice.private_key]) + same_input_double_spend = Transfer.generate( + tx.to_inputs() + tx.to_inputs(), [([bob.public_key], 1)], asset_id=tx.id + ).sign([alice.private_key]) b.store_bulk_transactions([tx]) @@ -441,8 +426,7 @@ def test_get_spent_transaction_critical_double_spend(b, alice, bob, carol): assert b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer]) with pytest.raises(DoubleSpend): - b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, - [tx_transfer, double_spend]) + b.get_spent(tx.id, tx_transfer.inputs[0].fulfills.output, [tx_transfer, double_spend]) b.store_bulk_transactions([tx_transfer]) @@ -461,12 +445,8 @@ def test_validation_with_transaction_buffer(b): priv_key, pub_key = generate_key_pair() create_tx = Create.generate([pub_key], [([pub_key], 10)]).sign([priv_key]) - transfer_tx = Transfer.generate(create_tx.to_inputs(), - [([pub_key], 10)], - asset_id=create_tx.id).sign([priv_key]) - double_spend = Transfer.generate(create_tx.to_inputs(), - [([pub_key], 10)], - asset_id=create_tx.id).sign([priv_key]) + transfer_tx = Transfer.generate(create_tx.to_inputs(), [([pub_key], 10)], asset_id=create_tx.id).sign([priv_key]) + double_spend = Transfer.generate(create_tx.to_inputs(), [([pub_key], 10)], asset_id=create_tx.id).sign([priv_key]) assert b.is_valid_transaction(create_tx) assert b.is_valid_transaction(transfer_tx, [create_tx]) @@ -484,25 +464,24 @@ def test_migrate_abci_chain_yields_on_genesis(b): @pytest.mark.bdb -@pytest.mark.parametrize('chain,block_height,expected', [ - ( - (1, 'chain-XYZ', True), +@pytest.mark.parametrize( + "chain,block_height,expected", + [ + ( + (1, "chain-XYZ", True), 4, - {'height': 5, 'chain_id': 'chain-XYZ-migrated-at-height-4', - 'is_synced': False}, - ), - ( - (5, 'chain-XYZ-migrated-at-height-4', True), + {"height": 5, "chain_id": "chain-XYZ-migrated-at-height-4", "is_synced": False}, + ), + ( + (5, "chain-XYZ-migrated-at-height-4", True), 13, - {'height': 14, 'chain_id': 'chain-XYZ-migrated-at-height-13', - 'is_synced': False}, - ), -]) -def test_migrate_abci_chain_generates_new_chains(b, chain, block_height, - expected): + {"height": 14, "chain_id": "chain-XYZ-migrated-at-height-13", "is_synced": False}, + ), + ], +) +def test_migrate_abci_chain_generates_new_chains(b, chain, block_height, expected): b.store_abci_chain(*chain) - b.store_block(Block(app_hash='', height=block_height, - transactions=[])._asdict()) + b.store_block(Block(app_hash="", height=block_height, transactions=[])._asdict()) b.migrate_abci_chain() latest_chain = b.get_latest_abci_chain() assert latest_chain == expected @@ -517,10 +496,7 @@ def test_get_spent_key_order(b, user_pk, user_sk, user2_pk, user2_sk): alice = generate_key_pair() bob = generate_key_pair() - tx1 = Create.generate([user_pk], - [([alice.public_key], 3), ([user_pk], 2)], - asset=None) \ - .sign([user_sk]) + tx1 = Create.generate([user_pk], [([alice.public_key], 3), ([user_pk], 2)], asset=None).sign([user_sk]) b.store_bulk_transactions([tx1]) inputs = tx1.to_inputs() @@ -528,9 +504,11 @@ def test_get_spent_key_order(b, user_pk, user_sk, user2_pk, user2_sk): assert tx2.validate(b) tx2_dict = tx2.to_dict() - fulfills = tx2_dict['inputs'][0]['fulfills'] - tx2_dict['inputs'][0]['fulfills'] = {'output_index': fulfills['output_index'], - 'transaction_id': fulfills['transaction_id']} + fulfills = tx2_dict["inputs"][0]["fulfills"] + tx2_dict["inputs"][0]["fulfills"] = { + "output_index": fulfills["output_index"], + "transaction_id": fulfills["transaction_id"], + } backend.query.store_transactions(b.connection, [tx2_dict]) diff --git a/tests/tendermint/test_utils.py b/tests/tendermint/test_utils.py index 6b998fd..785685c 100644 --- a/tests/tendermint/test_utils.py +++ b/tests/tendermint/test_utils.py @@ -13,16 +13,12 @@ except ImportError: def test_encode_decode_transaction(b): - from planetmint.tendermint_utils import (encode_transaction, - decode_transaction) + from planetmint.tendermint_utils import encode_transaction, decode_transaction - asset = { - 'value': 'key' - } + asset = {"value": "key"} encode_tx = encode_transaction(asset) - new_encode_tx = base64.b64encode(json.dumps(asset). - encode('utf8')).decode('utf8') + new_encode_tx = base64.b64encode(json.dumps(asset).encode("utf8")).decode("utf8") assert encode_tx == new_encode_tx @@ -34,38 +30,34 @@ def test_calculate_hash_no_key(b): from planetmint.tendermint_utils import calculate_hash # pass an empty list - assert calculate_hash([]) == '' + assert calculate_hash([]) == "" # TODO test for the case of an empty list of hashes, and possibly other cases. def test_merkleroot(): from planetmint.tendermint_utils import merkleroot - hashes = [sha3_256(i.encode()).digest() for i in 'abc'] - assert merkleroot(hashes) == ( - '78c7c394d3158c218916b7ae0ebdea502e0f4e85c08e3b371e3dfd824d389fa3') + + hashes = [sha3_256(i.encode()).digest() for i in "abc"] + assert merkleroot(hashes) == ("78c7c394d3158c218916b7ae0ebdea502e0f4e85c08e3b371e3dfd824d389fa3") SAMPLE_PUBLIC_KEY = { "address": "53DC09497A6ED73B342C78AB1E916076A03A8B95", - "pub_key": { - "type": "AC26791624DE60", - "value": "7S+T/do70jvneAq0M1so2X3M1iWTSuwtuSAr3nVpfEw=" - } + "pub_key": {"type": "AC26791624DE60", "value": "7S+T/do70jvneAq0M1so2X3M1iWTSuwtuSAr3nVpfEw="}, } def test_convert_base64_public_key_to_address(): from planetmint.tendermint_utils import public_key64_to_address - address = public_key64_to_address(SAMPLE_PUBLIC_KEY['pub_key']['value']) - assert address == SAMPLE_PUBLIC_KEY['address'] + address = public_key64_to_address(SAMPLE_PUBLIC_KEY["pub_key"]["value"]) + assert address == SAMPLE_PUBLIC_KEY["address"] def test_public_key_encoding_decoding(): - from planetmint.tendermint_utils import (public_key_from_base64, - public_key_to_base64) + from planetmint.tendermint_utils import public_key_from_base64, public_key_to_base64 - public_key = public_key_from_base64(SAMPLE_PUBLIC_KEY['pub_key']['value']) + public_key = public_key_from_base64(SAMPLE_PUBLIC_KEY["pub_key"]["value"]) base64_public_key = public_key_to_base64(public_key) - assert base64_public_key == SAMPLE_PUBLIC_KEY['pub_key']['value'] + assert base64_public_key == SAMPLE_PUBLIC_KEY["pub_key"]["value"] diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index 8edc8a7..dae9546 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -12,21 +12,22 @@ import planetmint from planetmint.config import Config -@pytest.fixture(scope='function', autouse=True) +@pytest.fixture(scope="function", autouse=True) def clean_config(monkeypatch, request): - original_config = Config().init_config('tarantool_db') - backend = request.config.getoption('--database-backend') - original_config['database'] = Config().get_db_map(backend) - monkeypatch.setattr('planetmint.config', original_config) + original_config = Config().init_config("tarantool_db") + backend = request.config.getoption("--database-backend") + original_config["database"] = Config().get_db_map(backend) + monkeypatch.setattr("planetmint.config", original_config) def test_bigchain_instance_is_initialized_when_conf_provided(): from planetmint import config_utils - assert 'CONFIGURED' not in Config().get() - config_utils.set_config({'database': {'backend': 'a'}}) + assert "CONFIGURED" not in Config().get() - assert Config().get()['CONFIGURED'] is True + config_utils.set_config({"database": {"backend": "a"}}) + + assert Config().get()["CONFIGURED"] is True def test_load_validation_plugin_loads_default_rules_without_name(): @@ -41,7 +42,7 @@ def test_load_validation_plugin_raises_with_unknown_name(): from planetmint import config_utils with pytest.raises(ResolutionError): - config_utils.load_validation_plugin('bogus') + config_utils.load_validation_plugin("bogus") def test_load_validation_plugin_raises_with_invalid_subclass(monkeypatch): @@ -49,9 +50,10 @@ def test_load_validation_plugin_raises_with_invalid_subclass(monkeypatch): # ValidationRules instance from planetmint import config_utils import time - monkeypatch.setattr(config_utils, - 'iter_entry_points', - lambda *args: [type('entry_point', (object,), {'load': lambda: object})]) + + monkeypatch.setattr( + config_utils, "iter_entry_points", lambda *args: [type("entry_point", (object,), {"load": lambda: object})] + ) with pytest.raises(TypeError): # Since the function is decorated with `lru_cache`, we need to @@ -61,38 +63,28 @@ def test_load_validation_plugin_raises_with_invalid_subclass(monkeypatch): def test_load_events_plugins(monkeypatch): from planetmint import config_utils - monkeypatch.setattr(config_utils, - 'iter_entry_points', - lambda *args: [type('entry_point', (object,), {'load': lambda: object})]) - plugins = config_utils.load_events_plugins(['one', 'two']) + monkeypatch.setattr( + config_utils, "iter_entry_points", lambda *args: [type("entry_point", (object,), {"load": lambda: object})] + ) + + plugins = config_utils.load_events_plugins(["one", "two"]) assert len(plugins) == 2 def test_map_leafs_iterator(): from planetmint import config_utils - mapping = { - 'a': {'b': {'c': 1}, - 'd': {'z': 44}}, - 'b': {'d': 2}, - 'c': 3 - } + mapping = {"a": {"b": {"c": 1}, "d": {"z": 44}}, "b": {"d": 2}, "c": 3} result = config_utils.map_leafs(lambda x, path: x * 2, mapping) - assert result == { - 'a': {'b': {'c': 2}, - 'd': {'z': 88}}, - 'b': {'d': 4}, - 'c': 6 - } + assert result == {"a": {"b": {"c": 2}, "d": {"z": 88}}, "b": {"d": 4}, "c": 6} result = config_utils.map_leafs(lambda x, path: path, mapping) assert result == { - 'a': {'b': {'c': ['a', 'b', 'c']}, - 'd': {'z': ['a', 'd', 'z']}}, - 'b': {'d': ['b', 'd']}, - 'c': ['c'] + "a": {"b": {"c": ["a", "b", "c"]}, "d": {"z": ["a", "d", "z"]}}, + "b": {"d": ["b", "d"]}, + "c": ["c"], } @@ -100,17 +92,17 @@ def test_update_types(): from planetmint import config_utils raw = { - 'a_string': 'test', - 'an_int': '42', - 'a_float': '3.14', - 'a_list': 'a:b:c', + "a_string": "test", + "an_int": "42", + "a_float": "3.14", + "a_list": "a:b:c", } reference = { - 'a_string': 'test', - 'an_int': 42, - 'a_float': 3.14, - 'a_list': ['a', 'b', 'c'], + "a_string": "test", + "an_int": 42, + "a_float": 3.14, + "a_list": ["a", "b", "c"], } result = config_utils.update_types(raw, reference) @@ -118,142 +110,145 @@ def test_update_types(): def test_env_config(monkeypatch): - monkeypatch.setattr('os.environ', {'PLANETMINT_DATABASE_HOST': 'test-host', - 'PLANETMINT_DATABASE_PORT': 'test-port'}) + monkeypatch.setattr( + "os.environ", {"PLANETMINT_DATABASE_HOST": "test-host", "PLANETMINT_DATABASE_PORT": "test-port"} + ) from planetmint import config_utils - result = config_utils.env_config({'database': {'host': None, 'port': None}}) - expected = {'database': {'host': 'test-host', 'port': 'test-port'}} + result = config_utils.env_config({"database": {"host": None, "port": None}}) + expected = {"database": {"host": "test-host", "port": "test-port"}} assert result == expected @pytest.mark.skip -def test_autoconfigure_read_both_from_file_and_env(monkeypatch, - request): # TODO Disabled until we create a better config format +def test_autoconfigure_read_both_from_file_and_env( + monkeypatch, request +): # TODO Disabled until we create a better config format return # constants - DATABASE_HOST = 'test-host' - DATABASE_NAME = 'test-dbname' + DATABASE_HOST = "test-host" + DATABASE_NAME = "test-dbname" DATABASE_PORT = 4242 - DATABASE_BACKEND = request.config.getoption('--database-backend') - SERVER_BIND = '1.2.3.4:56' - WSSERVER_SCHEME = 'ws' - WSSERVER_HOST = '1.2.3.4' + DATABASE_BACKEND = request.config.getoption("--database-backend") + SERVER_BIND = "1.2.3.4:56" + WSSERVER_SCHEME = "ws" + WSSERVER_HOST = "1.2.3.4" WSSERVER_PORT = 57 - WSSERVER_ADVERTISED_SCHEME = 'wss' - WSSERVER_ADVERTISED_HOST = 'a.b.c.d' + WSSERVER_ADVERTISED_SCHEME = "wss" + WSSERVER_ADVERTISED_HOST = "a.b.c.d" WSSERVER_ADVERTISED_PORT = 89 - LOG_FILE = '/somewhere/something.log' + LOG_FILE = "/somewhere/something.log" file_config = { - 'database': { - 'host': DATABASE_HOST - }, - 'log': { - 'level_console': 'debug', + "database": {"host": DATABASE_HOST}, + "log": { + "level_console": "debug", }, } - monkeypatch.setattr('planetmint.config_utils.file_config', - lambda *args, **kwargs: file_config) + monkeypatch.setattr("planetmint.config_utils.file_config", lambda *args, **kwargs: file_config) - monkeypatch.setattr('os.environ', { - 'PLANETMINT_DATABASE_NAME': DATABASE_NAME, - 'PLANETMINT_DATABASE_PORT': str(DATABASE_PORT), - 'PLANETMINT_DATABASE_BACKEND': DATABASE_BACKEND, - 'PLANETMINT_SERVER_BIND': SERVER_BIND, - 'PLANETMINT_WSSERVER_SCHEME': WSSERVER_SCHEME, - 'PLANETMINT_WSSERVER_HOST': WSSERVER_HOST, - 'PLANETMINT_WSSERVER_PORT': WSSERVER_PORT, - 'PLANETMINT_WSSERVER_ADVERTISED_SCHEME': WSSERVER_ADVERTISED_SCHEME, - 'PLANETMINT_WSSERVER_ADVERTISED_HOST': WSSERVER_ADVERTISED_HOST, - 'PLANETMINT_WSSERVER_ADVERTISED_PORT': WSSERVER_ADVERTISED_PORT, - 'PLANETMINT_LOG_FILE': LOG_FILE, - 'PLANETMINT_LOG_FILE': LOG_FILE, - 'PLANETMINT_DATABASE_CA_CERT': 'ca_cert', - 'PLANETMINT_DATABASE_CRLFILE': 'crlfile', - 'PLANETMINT_DATABASE_CERTFILE': 'certfile', - 'PLANETMINT_DATABASE_KEYFILE': 'keyfile', - 'PLANETMINT_DATABASE_KEYFILE_PASSPHRASE': 'passphrase', - }) + monkeypatch.setattr( + "os.environ", + { + "PLANETMINT_DATABASE_NAME": DATABASE_NAME, + "PLANETMINT_DATABASE_PORT": str(DATABASE_PORT), + "PLANETMINT_DATABASE_BACKEND": DATABASE_BACKEND, + "PLANETMINT_SERVER_BIND": SERVER_BIND, + "PLANETMINT_WSSERVER_SCHEME": WSSERVER_SCHEME, + "PLANETMINT_WSSERVER_HOST": WSSERVER_HOST, + "PLANETMINT_WSSERVER_PORT": WSSERVER_PORT, + "PLANETMINT_WSSERVER_ADVERTISED_SCHEME": WSSERVER_ADVERTISED_SCHEME, + "PLANETMINT_WSSERVER_ADVERTISED_HOST": WSSERVER_ADVERTISED_HOST, + "PLANETMINT_WSSERVER_ADVERTISED_PORT": WSSERVER_ADVERTISED_PORT, + "PLANETMINT_LOG_FILE": LOG_FILE, + "PLANETMINT_LOG_FILE": LOG_FILE, + "PLANETMINT_DATABASE_CA_CERT": "ca_cert", + "PLANETMINT_DATABASE_CRLFILE": "crlfile", + "PLANETMINT_DATABASE_CERTFILE": "certfile", + "PLANETMINT_DATABASE_KEYFILE": "keyfile", + "PLANETMINT_DATABASE_KEYFILE_PASSPHRASE": "passphrase", + }, + ) from planetmint import config_utils from planetmint.log import DEFAULT_LOGGING_CONFIG as log_config + config_utils.autoconfigure() database_mongodb = { - 'backend': 'localmongodb', - 'host': DATABASE_HOST, - 'port': DATABASE_PORT, - 'name': DATABASE_NAME, - 'connection_timeout': 5000, - 'max_tries': 3, - 'replicaset': None, - 'ssl': False, - 'login': None, - 'password': None, - 'ca_cert': 'ca_cert', - 'certfile': 'certfile', - 'keyfile': 'keyfile', - 'keyfile_passphrase': 'passphrase', - 'crlfile': 'crlfile', + "backend": "localmongodb", + "host": DATABASE_HOST, + "port": DATABASE_PORT, + "name": DATABASE_NAME, + "connection_timeout": 5000, + "max_tries": 3, + "replicaset": None, + "ssl": False, + "login": None, + "password": None, + "ca_cert": "ca_cert", + "certfile": "certfile", + "keyfile": "keyfile", + "keyfile_passphrase": "passphrase", + "crlfile": "crlfile", } assert planetmint.config == { - 'CONFIGURED': True, - 'server': { - 'bind': SERVER_BIND, - 'loglevel': 'info', - 'workers': None, + "CONFIGURED": True, + "server": { + "bind": SERVER_BIND, + "loglevel": "info", + "workers": None, }, - 'wsserver': { - 'scheme': WSSERVER_SCHEME, - 'host': WSSERVER_HOST, - 'port': WSSERVER_PORT, - 'advertised_scheme': WSSERVER_ADVERTISED_SCHEME, - 'advertised_host': WSSERVER_ADVERTISED_HOST, - 'advertised_port': WSSERVER_ADVERTISED_PORT, + "wsserver": { + "scheme": WSSERVER_SCHEME, + "host": WSSERVER_HOST, + "port": WSSERVER_PORT, + "advertised_scheme": WSSERVER_ADVERTISED_SCHEME, + "advertised_host": WSSERVER_ADVERTISED_HOST, + "advertised_port": WSSERVER_ADVERTISED_PORT, }, - 'database': database_mongodb, - 'tendermint': { - 'host': 'localhost', - 'port': 26657, - 'version': 'v0.34.15' - }, - 'log': { - 'file': LOG_FILE, - 'level_console': 'debug', - 'error_file': log_config['handlers']['errors']['filename'], - 'level_console': 'debug', - 'level_logfile': 'info', - 'datefmt_console': log_config['formatters']['console']['datefmt'], - 'datefmt_logfile': log_config['formatters']['file']['datefmt'], - 'fmt_console': log_config['formatters']['console']['format'], - 'fmt_logfile': log_config['formatters']['file']['format'], - 'granular_levels': {}, + "database": database_mongodb, + "tendermint": {"host": "localhost", "port": 26657, "version": "v0.34.15"}, + "log": { + "file": LOG_FILE, + "level_console": "debug", + "error_file": log_config["handlers"]["errors"]["filename"], + "level_console": "debug", + "level_logfile": "info", + "datefmt_console": log_config["formatters"]["console"]["datefmt"], + "datefmt_logfile": log_config["formatters"]["file"]["datefmt"], + "fmt_console": log_config["formatters"]["console"]["format"], + "fmt_logfile": log_config["formatters"]["file"]["format"], + "granular_levels": {}, }, } def test_autoconfigure_env_precedence(monkeypatch): - file_config = { - 'database': {'host': 'test-host', 'name': 'planetmint', 'port': 28015} - } - monkeypatch.setattr('planetmint.config_utils.file_config', lambda *args, **kwargs: file_config) - monkeypatch.setattr('os.environ', {'PLANETMINT_DATABASE_NAME': 'test-dbname', - 'PLANETMINT_DATABASE_PORT': 4242, - 'PLANETMINT_SERVER_BIND': 'localhost:9985'}) + file_config = {"database": {"host": "test-host", "name": "planetmint", "port": 28015}} + monkeypatch.setattr("planetmint.config_utils.file_config", lambda *args, **kwargs: file_config) + monkeypatch.setattr( + "os.environ", + { + "PLANETMINT_DATABASE_NAME": "test-dbname", + "PLANETMINT_DATABASE_PORT": 4242, + "PLANETMINT_SERVER_BIND": "localhost:9985", + }, + ) from planetmint import config_utils from planetmint.config import Config + config_utils.autoconfigure() - assert Config().get()['CONFIGURED'] - assert Config().get()['database']['host'] == 'test-host' - assert Config().get()['database']['name'] == 'test-dbname' - assert Config().get()['database']['port'] == 4242 - assert Config().get()['server']['bind'] == 'localhost:9985' + assert Config().get()["CONFIGURED"] + assert Config().get()["database"]["host"] == "test-host" + assert Config().get()["database"]["name"] == "test-dbname" + assert Config().get()["database"]["port"] == 4242 + assert Config().get()["server"]["bind"] == "localhost:9985" def test_autoconfigure_explicit_file(monkeypatch): @@ -262,32 +257,31 @@ def test_autoconfigure_explicit_file(monkeypatch): def file_config(*args, **kwargs): raise FileNotFoundError() - monkeypatch.setattr('planetmint.config_utils.file_config', file_config) + monkeypatch.setattr("planetmint.config_utils.file_config", file_config) with pytest.raises(FileNotFoundError): - config_utils.autoconfigure(filename='autoexec.bat') + config_utils.autoconfigure(filename="autoexec.bat") def test_update_config(monkeypatch): from planetmint import config_utils - file_config = { - 'database': {'host': 'test-host', 'name': 'planetmint', 'port': 28015} - } - monkeypatch.setattr('planetmint.config_utils.file_config', lambda *args, **kwargs: file_config) + file_config = {"database": {"host": "test-host", "name": "planetmint", "port": 28015}} + monkeypatch.setattr("planetmint.config_utils.file_config", lambda *args, **kwargs: file_config) config_utils.autoconfigure(config=file_config) # update configuration, retaining previous changes - config_utils.update_config({'database': {'port': 28016, 'name': 'planetmint_other'}}) + config_utils.update_config({"database": {"port": 28016, "name": "planetmint_other"}}) - assert Config().get()['database']['host'] == 'test-host' - assert Config().get()['database']['name'] == 'planetmint_other' - assert Config().get()['database']['port'] == 28016 + assert Config().get()["database"]["host"] == "test-host" + assert Config().get()["database"]["name"] == "planetmint_other" + assert Config().get()["database"]["port"] == 28016 def test_file_config(): from planetmint.config_utils import file_config, CONFIG_DEFAULT_PATH - with patch('builtins.open', mock_open(read_data='{}')) as m: + + with patch("builtins.open", mock_open(read_data="{}")) as m: config = file_config() m.assert_called_once_with(CONFIG_DEFAULT_PATH) assert config == {} @@ -296,33 +290,38 @@ def test_file_config(): def test_invalid_file_config(): from planetmint.config_utils import file_config from planetmint.transactions.common import exceptions - with patch('builtins.open', mock_open(read_data='{_INVALID_JSON_}')): + + with patch("builtins.open", mock_open(read_data="{_INVALID_JSON_}")): with pytest.raises(exceptions.ConfigurationError): file_config() def test_write_config(): from planetmint.config_utils import write_config, CONFIG_DEFAULT_PATH + m = mock_open() - with patch('builtins.open', m): + with patch("builtins.open", m): write_config({}) - m.assert_called_once_with(CONFIG_DEFAULT_PATH, 'w') + m.assert_called_once_with(CONFIG_DEFAULT_PATH, "w") handle = m() - handle.write.assert_called_once_with('{}') + handle.write.assert_called_once_with("{}") -@pytest.mark.parametrize('env_name,env_value,config_key', ( - ('PLANETMINT_DATABASE_BACKEND', 'test-backend', 'backend'), - ('PLANETMINT_DATABASE_HOST', 'test-host', 'host'), - ('PLANETMINT_DATABASE_PORT', 4242, 'port'), - ('PLANETMINT_DATABASE_NAME', 'test-db', 'name'), -)) +@pytest.mark.parametrize( + "env_name,env_value,config_key", + ( + ("PLANETMINT_DATABASE_BACKEND", "test-backend", "backend"), + ("PLANETMINT_DATABASE_HOST", "test-host", "host"), + ("PLANETMINT_DATABASE_PORT", 4242, "port"), + ("PLANETMINT_DATABASE_NAME", "test-db", "name"), + ), +) def test_database_envs(env_name, env_value, config_key, monkeypatch): - monkeypatch.setattr('os.environ', {env_name: env_value}) + monkeypatch.setattr("os.environ", {env_name: env_value}) planetmint.config_utils.autoconfigure() expected_config = Config().get() - expected_config['database'][config_key] = env_value + expected_config["database"][config_key] = env_value assert planetmint.config == expected_config diff --git a/tests/test_core.py b/tests/test_core.py index 621b90e..f2b7cbc 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -13,9 +13,7 @@ from tendermint.crypto import keys_pb2 from planetmint import App from planetmint.backend import query from planetmint.transactions.common.crypto import generate_key_pair -from planetmint.core import (OkCode, - CodeTypeError, - rollback) +from planetmint.core import OkCode, CodeTypeError, rollback from planetmint.transactions.types.elections.election import Election from planetmint.lib import Block from planetmint.transactions.types.elections.chain_migration_election import ChainMigrationElection @@ -31,38 +29,39 @@ from tests.utils import generate_election, generate_validators @pytest.fixture def config(request, monkeypatch): - backend = request.config.getoption('--database-backend') - if backend == 'mongodb-ssl': - backend = 'mongodb' + backend = request.config.getoption("--database-backend") + if backend == "mongodb-ssl": + backend = "mongodb" config = { - 'database': { - 'backend': backend, - 'host': 'tarantool', - 'port': 3303, - 'name': 'bigchain', - 'replicaset': 'bigchain-rs', - 'connection_timeout': 5000, - 'max_tries': 3, - 'name': 'bigchain' + "database": { + "backend": backend, + "host": "tarantool", + "port": 3303, + "name": "bigchain", + "replicaset": "bigchain-rs", + "connection_timeout": 5000, + "max_tries": 3, + "name": "bigchain", }, - 'tendermint': { - 'host': 'localhost', - 'port': 26657, + "tendermint": { + "host": "localhost", + "port": 26657, }, - 'CONFIGURED': True, + "CONFIGURED": True, } - monkeypatch.setattr('planetmint.config', config) + monkeypatch.setattr("planetmint.config", config) return config def test_bigchain_class_default_initialization(config): from planetmint import Planetmint from planetmint.validation import BaseValidationRules + planet = Planetmint() - assert planet.connection.host == config['database']['host'] - assert planet.connection.port == config['database']['port'] + assert planet.connection.host == config["database"]["host"] + assert planet.connection.port == config["database"]["port"] assert planet.validation == BaseValidationRules @@ -70,17 +69,18 @@ def test_bigchain_class_initialization_with_parameters(): from planetmint import Planetmint from planetmint.backend import connect from planetmint.validation import BaseValidationRules + init_db_kwargs = { - 'backend': 'localmongodb', - 'host': 'this_is_the_db_host', - 'port': 12345, - 'name': 'this_is_the_db_name', + "backend": "localmongodb", + "host": "this_is_the_db_host", + "port": 12345, + "name": "this_is_the_db_name", } connection = connect(**init_db_kwargs) planet = Planetmint(connection=connection) assert planet.connection == connection - assert planet.connection.host == init_db_kwargs['host'] - assert planet.connection.port == init_db_kwargs['port'] + assert planet.connection.host == init_db_kwargs["host"] + assert planet.connection.port == init_db_kwargs["port"] # assert planet.connection.name == init_db_kwargs['name'] assert planet.validation == BaseValidationRules @@ -96,9 +96,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol): tx_2 = Transfer.generate( tx_1.to_inputs(), - [([bob.public_key], 2), - ([alice.public_key], 2), - ([carol.public_key], 4)], + [([bob.public_key], 2), ([alice.public_key], 2), ([carol.public_key], 4)], asset_id=tx_1.id, ).sign([carol.private_key]) assert tx_2.validate(b) @@ -106,8 +104,7 @@ def test_get_spent_issue_1271(b, alice, bob, carol): tx_3 = Transfer.generate( tx_2.to_inputs()[2:3], - [([alice.public_key], 1), - ([carol.public_key], 3)], + [([alice.public_key], 1), ([carol.public_key], 3)], asset_id=tx_1.id, ).sign([carol.private_key]) assert tx_3.validate(b) diff --git a/tests/test_docs.py b/tests/test_docs.py index 52da2b1..40dd90f 100644 --- a/tests/test_docs.py +++ b/tests/test_docs.py @@ -9,7 +9,7 @@ import os def test_build_root_docs(): - proc = subprocess.Popen(['bash'], stdin=subprocess.PIPE) - proc.stdin.write('cd docs/root; make html'.encode()) + proc = subprocess.Popen(["bash"], stdin=subprocess.PIPE) + proc.stdin.write("cd docs/root; make html".encode()) proc.stdin.close() assert proc.wait() == 0 diff --git a/tests/test_events.py b/tests/test_events.py index f34eee6..fc83753 100644 --- a/tests/test_events.py +++ b/tests/test_events.py @@ -10,15 +10,14 @@ def test_event_handler(): from planetmint.events import EventTypes, Event, Exchange # create and event - event_data = {'msg': 'some data'} + event_data = {"msg": "some data"} event = Event(EventTypes.BLOCK_VALID, event_data) # create the events pub sub exchange = Exchange() sub0 = exchange.get_subscriber_queue(EventTypes.BLOCK_VALID) - sub1 = exchange.get_subscriber_queue(EventTypes.BLOCK_VALID | - EventTypes.BLOCK_INVALID) + sub1 = exchange.get_subscriber_queue(EventTypes.BLOCK_VALID | EventTypes.BLOCK_INVALID) # Subscribe to all events sub2 = exchange.get_subscriber_queue() sub3 = exchange.get_subscriber_queue(EventTypes.BLOCK_INVALID) @@ -59,7 +58,7 @@ def test_exchange_stops_with_poison_pill(): from planetmint.events import EventTypes, Event, Exchange, POISON_PILL # create and event - event_data = {'msg': 'some data'} + event_data = {"msg": "some data"} event = Event(EventTypes.BLOCK_VALID, event_data) # create the events pub sub diff --git a/tests/test_parallel_validation.py b/tests/test_parallel_validation.py index 29e8864..d6d9241 100644 --- a/tests/test_parallel_validation.py +++ b/tests/test_parallel_validation.py @@ -17,10 +17,7 @@ def generate_create_and_transfer(keypair=None): keypair = generate_key_pair() priv_key, pub_key = keypair create_tx = Create.generate([pub_key], [([pub_key], 10)]).sign([priv_key]) - transfer_tx = Transfer.generate( - create_tx.to_inputs(), - [([pub_key], 10)], - asset_id=create_tx.id).sign([priv_key]) + transfer_tx = Transfer.generate(create_tx.to_inputs(), [([pub_key], 10)], asset_id=create_tx.id).sign([priv_key]) return create_tx, transfer_tx @@ -30,10 +27,9 @@ def test_validation_worker_process_multiple_transactions(b): keypair = generate_key_pair() create_tx, transfer_tx = generate_create_and_transfer(keypair) - double_spend = Transfer.generate( - create_tx.to_inputs(), - [([keypair.public_key], 10)], - asset_id=create_tx.id).sign([keypair.private_key]) + double_spend = Transfer.generate(create_tx.to_inputs(), [([keypair.public_key], 10)], asset_id=create_tx.id).sign( + [keypair.private_key] + ) in_queue, results_queue = mp.Queue(), mp.Queue() vw = ValidationWorker(in_queue, results_queue) @@ -86,17 +82,15 @@ def test_parallel_validator_routes_transactions_correctly(b, monkeypatch): # Validate is now a passthrough, and every time it is called it will emit # the PID of its worker to the designated queue. def validate(self, dict_transaction): - validation_called_by.put((os.getpid(), dict_transaction['id'])) + validation_called_by.put((os.getpid(), dict_transaction["id"])) return dict_transaction - monkeypatch.setattr( - 'planetmint.parallel_validation.ValidationWorker.validate', - validate) + monkeypatch.setattr("planetmint.parallel_validation.ValidationWorker.validate", validate) # Transaction routing uses the `id` of the transaction. This test strips # down a transaction to just its `id`. We have two workers, so even ids # will be processed by one worker, odd ids by the other. - transactions = [{'id': '0'}, {'id': '1'}, {'id': '2'}, {'id': '3'}] + transactions = [{"id": "0"}, {"id": "1"}, {"id": "2"}, {"id": "3"}] pv = ParallelValidator(number_of_workers=2) pv.start() @@ -109,7 +103,7 @@ def test_parallel_validator_routes_transactions_correctly(b, monkeypatch): for _ in range(2): # First, we push the transactions to the parallel validator instance for transaction in transactions: - pv.validate(dumps(transaction).encode('utf8')) + pv.validate(dumps(transaction).encode("utf8")) assert pv.result(timeout=1) == transactions @@ -128,7 +122,8 @@ def test_parallel_validator_routes_transactions_correctly(b, monkeypatch): # route for odd transactions. Since we don't know which worker # processed what, we test that the transactions processed by a # worker are all even or all odd. - assert (all(filter(lambda x: int(x) % 2 == 0, transaction_ids)) or - all(filter(lambda x: int(x) % 2 == 1, transaction_ids))) + assert all(filter(lambda x: int(x) % 2 == 0, transaction_ids)) or all( + filter(lambda x: int(x) % 2 == 1, transaction_ids) + ) pv.stop() diff --git a/tests/test_txlist.py b/tests/test_txlist.py index 8475be2..d9e1474 100644 --- a/tests/test_txlist.py +++ b/tests/test_txlist.py @@ -16,34 +16,32 @@ def txlist(b, user_pk, user2_pk, user_sk, user2_sk): from planetmint.transactions.types.assets.transfer import Transfer # Create two CREATE transactions - create1 = Create.generate([user_pk], [([user2_pk], 6)]) \ - .sign([user_sk]) + create1 = Create.generate([user_pk], [([user2_pk], 6)]).sign([user_sk]) - create2 = Create.generate([user2_pk], - [([user2_pk], 5), ([user_pk], 5)]) \ - .sign([user2_sk]) + create2 = Create.generate([user2_pk], [([user2_pk], 5), ([user_pk], 5)]).sign([user2_sk]) # Create a TRANSFER transactions - transfer1 = Transfer.generate(create1.to_inputs(), - [([user_pk], 8)], - create1.id).sign([user2_sk]) + transfer1 = Transfer.generate(create1.to_inputs(), [([user_pk], 8)], create1.id).sign([user2_sk]) b.store_bulk_transactions([create1, create2, transfer1]) - return type('', (), { - 'create1': create1, - 'transfer1': transfer1, - }) + return type( + "", + (), + { + "create1": create1, + "transfer1": transfer1, + }, + ) @pytest.mark.bdb def test_get_txlist_by_asset(b, txlist): res = b.get_transactions_filtered(txlist.create1.id) - assert sorted(set(tx.id for tx in res)) == sorted( - set([txlist.transfer1.id, txlist.create1.id])) + assert sorted(set(tx.id for tx in res)) == sorted(set([txlist.transfer1.id, txlist.create1.id])) @pytest.mark.bdb def test_get_txlist_by_operation(b, txlist): - res = b.get_transactions_filtered(txlist.create1.id, operation='CREATE') + res = b.get_transactions_filtered(txlist.create1.id, operation="CREATE") assert set(tx.id for tx in res) == {txlist.create1.id} diff --git a/tests/test_utils.py b/tests/test_utils.py index 875f4cc..68cbacb 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -11,7 +11,6 @@ import pytest @pytest.fixture def mock_queue(monkeypatch): - class MockQueue: items = [] @@ -28,96 +27,96 @@ def mock_queue(monkeypatch): mockqueue = MockQueue() - monkeypatch.setattr('queue.Queue', lambda: mockqueue) + monkeypatch.setattr("queue.Queue", lambda: mockqueue) return mockqueue def test_empty_pool_is_populated_with_instances(mock_queue): from planetmint import utils - pool = utils.pool(lambda: 'hello', 4) + pool = utils.pool(lambda: "hello", 4) assert len(mock_queue.items) == 0 with pool() as instance: - assert instance == 'hello' + assert instance == "hello" assert len(mock_queue.items) == 1 with pool() as instance: - assert instance == 'hello' + assert instance == "hello" assert len(mock_queue.items) == 2 with pool() as instance: - assert instance == 'hello' + assert instance == "hello" assert len(mock_queue.items) == 3 with pool() as instance: - assert instance == 'hello' + assert instance == "hello" assert len(mock_queue.items) == 4 with pool() as instance: - assert instance == 'hello' + assert instance == "hello" assert len(mock_queue.items) == 4 def test_pool_blocks_if_no_instances_available(mock_queue): from planetmint import utils - pool = utils.pool(lambda: 'hello', 4) + pool = utils.pool(lambda: "hello", 4) assert len(mock_queue.items) == 0 # We need to manually trigger the `__enter__` method so the context # manager will "hang" and not return the resource to the pool - assert pool().__enter__() == 'hello' + assert pool().__enter__() == "hello" assert len(mock_queue.items) == 0 - assert pool().__enter__() == 'hello' + assert pool().__enter__() == "hello" assert len(mock_queue.items) == 0 - assert pool().__enter__() == 'hello' + assert pool().__enter__() == "hello" assert len(mock_queue.items) == 0 # We need to keep a reference of the last context manager so we can # manually release the resource last = pool() - assert last.__enter__() == 'hello' + assert last.__enter__() == "hello" assert len(mock_queue.items) == 0 # This would block using `queue.Queue` but since we mocked it it will # just raise a IndexError because it's trying to pop from an empty list. with pytest.raises(IndexError): - assert pool().__enter__() == 'hello' + assert pool().__enter__() == "hello" assert len(mock_queue.items) == 0 # Release the last resource last.__exit__(None, None, None) assert len(mock_queue.items) == 1 - assert pool().__enter__() == 'hello' + assert pool().__enter__() == "hello" assert len(mock_queue.items) == 0 def test_pool_raises_empty_exception_when_timeout(mock_queue): from planetmint import utils - pool = utils.pool(lambda: 'hello', 1, timeout=1) + pool = utils.pool(lambda: "hello", 1, timeout=1) assert len(mock_queue.items) == 0 with pool() as instance: - assert instance == 'hello' + assert instance == "hello" assert len(mock_queue.items) == 1 # take the only resource available - assert pool().__enter__() == 'hello' + assert pool().__enter__() == "hello" with pytest.raises(queue.Empty): with pool() as instance: - assert instance == 'hello' + assert instance == "hello" -@patch('multiprocessing.Process') +@patch("multiprocessing.Process") def test_process_group_instantiates_and_start_processes(mock_process): from planetmint.utils import ProcessGroup @@ -126,13 +125,16 @@ def test_process_group_instantiates_and_start_processes(mock_process): concurrency = 10 - pg = ProcessGroup(concurrency=concurrency, group='test_group', target=noop) + pg = ProcessGroup(concurrency=concurrency, group="test_group", target=noop) pg.start() - mock_process.assert_has_calls([call(group='test_group', target=noop, - name=None, args=(), kwargs={}, - daemon=None) - for i in range(concurrency)], any_order=True) + mock_process.assert_has_calls( + [ + call(group="test_group", target=noop, name=None, args=(), kwargs={}, daemon=None) + for i in range(concurrency) + ], + any_order=True, + ) for process in pg.processes: process.start.assert_called_with() @@ -142,20 +144,20 @@ def test_lazy_execution(): from planetmint.utils import Lazy lz = Lazy() - lz.split(',')[1].split(' ').pop(1).strip() - result = lz.run('Like humans, cats tend to favor one paw over another') - assert result == 'cats' + lz.split(",")[1].split(" ").pop(1).strip() + result = lz.run("Like humans, cats tend to favor one paw over another") + assert result == "cats" class Cat: def __init__(self, name): self.name = name - cat = Cat('Shmui') + cat = Cat("Shmui") lz = Lazy() lz.name.upper() result = lz.run(cat) - assert result == 'SHMUI' + assert result == "SHMUI" def test_process_set_title(): @@ -167,7 +169,6 @@ def test_process_set_title(): queue = Queue() uuid = str(uuid4()) - process = Process(target=lambda: queue.put(getproctitle()), - name=uuid) + process = Process(target=lambda: queue.put(getproctitle()), name=uuid) process.start() assert queue.get() == uuid diff --git a/tests/upsert_validator/conftest.py b/tests/upsert_validator/conftest.py index 190e200..1749c5b 100644 --- a/tests/upsert_validator/conftest.py +++ b/tests/upsert_validator/conftest.py @@ -13,34 +13,27 @@ from planetmint.upsert_validator import ValidatorElection @pytest.fixture def valid_upsert_validator_election_b(b, node_key, new_validator): voters = ValidatorElection.recipients(b) - return ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + return ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign([node_key.private_key]) @pytest.fixture -@patch('planetmint.transactions.types.elections.election.uuid4', lambda: 'mock_uuid4') +@patch("planetmint.transactions.types.elections.election.uuid4", lambda: "mock_uuid4") def fixed_seed_election(b_mock, node_key, new_validator): voters = ValidatorElection.recipients(b_mock) - return ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + return ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign([node_key.private_key]) @pytest.fixture def concluded_election(b, ongoing_validator_election, ed25519_node_keys): - query.store_election(b.connection, ongoing_validator_election.id, - 2, is_concluded=True) + query.store_election(b.connection, ongoing_validator_election.id, 2, is_concluded=True) return ongoing_validator_election @pytest.fixture def inconclusive_election(b, ongoing_validator_election, new_validator): validators = b.get_validators(height=1) - validators[0]['voting_power'] = 15 - validator_update = {'validators': validators, - 'height': 2, - 'election_id': 'some_other_election'} + validators[0]["voting_power"] = 15 + validator_update = {"validators": validators, "height": 2, "election_id": "some_other_election"} query.store_validator_set(b.connection, validator_update) return ongoing_validator_election diff --git a/tests/upsert_validator/test_upsert_validator_vote.py b/tests/upsert_validator/test_upsert_validator_vote.py index 95ec43c..9ec7239 100644 --- a/tests/upsert_validator/test_upsert_validator_vote.py +++ b/tests/upsert_validator/test_upsert_validator_vote.py @@ -30,10 +30,9 @@ def test_upsert_validator_valid_election_vote(b_mock, valid_upsert_validator_ele election_pub_key = ValidatorElection.to_public_key(valid_upsert_validator_election.id) - vote = Vote.generate([input0], - [([election_pub_key], votes)], - election_id=valid_upsert_validator_election.id)\ - .sign([key0.private_key]) + vote = Vote.generate([input0], [([election_pub_key], votes)], election_id=valid_upsert_validator_election.id).sign( + [key0.private_key] + ) assert vote.validate(b_mock) @@ -50,10 +49,9 @@ def test_upsert_validator_valid_non_election_vote(b_mock, valid_upsert_validator # Ensure that threshold conditions are now allowed with pytest.raises(ValidationError): - Vote.generate([input0], - [([election_pub_key, key0.public_key], votes)], - election_id=valid_upsert_validator_election.id)\ - .sign([key0.private_key]) + Vote.generate( + [input0], [([election_pub_key, key0.public_key], votes)], election_id=valid_upsert_validator_election.id + ).sign([key0.private_key]) @pytest.mark.bdb @@ -67,10 +65,11 @@ def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_ public_key0 = input0.owners_before[0] key0 = ed25519_node_keys[public_key0] - delegate_vote = Vote.generate([input0], - [([alice.public_key], 3), ([key0.public_key], votes - 3)], - election_id=valid_upsert_validator_election.id)\ - .sign([key0.private_key]) + delegate_vote = Vote.generate( + [input0], + [([alice.public_key], 3), ([key0.public_key], votes - 3)], + election_id=valid_upsert_validator_election.id, + ).sign([key0.private_key]) assert delegate_vote.validate(b_mock) @@ -78,17 +77,15 @@ def test_upsert_validator_delegate_election_vote(b_mock, valid_upsert_validator_ election_pub_key = ValidatorElection.to_public_key(valid_upsert_validator_election.id) alice_votes = delegate_vote.to_inputs()[0] - alice_casted_vote = Vote.generate([alice_votes], - [([election_pub_key], 3)], - election_id=valid_upsert_validator_election.id)\ - .sign([alice.private_key]) + alice_casted_vote = Vote.generate( + [alice_votes], [([election_pub_key], 3)], election_id=valid_upsert_validator_election.id + ).sign([alice.private_key]) assert alice_casted_vote.validate(b_mock) key0_votes = delegate_vote.to_inputs()[1] - key0_casted_vote = Vote.generate([key0_votes], - [([election_pub_key], votes - 3)], - election_id=valid_upsert_validator_election.id)\ - .sign([key0.private_key]) + key0_casted_vote = Vote.generate( + [key0_votes], [([election_pub_key], votes - 3)], election_id=valid_upsert_validator_election.id + ).sign([key0.private_key]) assert key0_casted_vote.validate(b_mock) @@ -103,10 +100,9 @@ def test_upsert_validator_invalid_election_vote(b_mock, valid_upsert_validator_e election_pub_key = ValidatorElection.to_public_key(valid_upsert_validator_election.id) - vote = Vote.generate([input0], - [([election_pub_key], votes + 1)], - election_id=valid_upsert_validator_election.id)\ - .sign([key0.private_key]) + vote = Vote.generate( + [input0], [([election_pub_key], votes + 1)], election_id=valid_upsert_validator_election.id + ).sign([key0.private_key]) with pytest.raises(AmountError): assert vote.validate(b_mock) @@ -124,10 +120,11 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election, key0 = ed25519_node_keys[public_key0] # delegate some votes to alice - delegate_vote = Vote.generate([input0], - [([alice.public_key], 4), ([key0.public_key], votes - 4)], - election_id=valid_upsert_validator_election.id)\ - .sign([key0.private_key]) + delegate_vote = Vote.generate( + [input0], + [([alice.public_key], 4), ([key0.public_key], votes - 4)], + election_id=valid_upsert_validator_election.id, + ).sign([key0.private_key]) b_mock.store_bulk_transactions([delegate_vote]) assert valid_upsert_validator_election.get_commited_votes(b_mock) == 0 @@ -135,10 +132,11 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election, alice_votes = delegate_vote.to_inputs()[0] key0_votes = delegate_vote.to_inputs()[1] - alice_casted_vote = Vote.generate([alice_votes], - [([election_public_key], 2), ([alice.public_key], 2)], - election_id=valid_upsert_validator_election.id)\ - .sign([alice.private_key]) + alice_casted_vote = Vote.generate( + [alice_votes], + [([election_public_key], 2), ([alice.public_key], 2)], + election_id=valid_upsert_validator_election.id, + ).sign([alice.private_key]) assert alice_casted_vote.validate(b_mock) b_mock.store_bulk_transactions([alice_casted_vote]) @@ -146,10 +144,9 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election, # Check if the delegated vote is count as valid vote assert valid_upsert_validator_election.get_commited_votes(b_mock) == 2 - key0_casted_vote = Vote.generate([key0_votes], - [([election_public_key], votes - 4)], - election_id=valid_upsert_validator_election.id)\ - .sign([key0.private_key]) + key0_casted_vote = Vote.generate( + [key0_votes], [([election_public_key], votes - 4)], election_id=valid_upsert_validator_election.id + ).sign([key0.private_key]) assert key0_casted_vote.validate(b_mock) b_mock.store_bulk_transactions([key0_casted_vote]) @@ -219,30 +216,31 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551 @pytest.mark.abci def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys): - if b.get_latest_block()['height'] == 0: + if b.get_latest_block()["height"] == 0: generate_block(b) (node_pub, _) = list(node_keys.items())[0] - validators = [{'public_key': {'type': 'ed25519-base64', 'value': node_pub}, - 'voting_power': 10}] + validators = [{"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10}] latest_block = b.get_latest_block() # reset the validator set - b.store_validator_set(latest_block['height'], validators) + b.store_validator_set(latest_block["height"], validators) generate_block(b) power = 1 - public_key = '9B3119650DF82B9A5D8A12E38953EA47475C09F0C48A4E6A0ECE182944B24403' + public_key = "9B3119650DF82B9A5D8A12E38953EA47475C09F0C48A4E6A0ECE182944B24403" public_key64 = public_key_to_base64(public_key) - new_validator = {'public_key': {'value': public_key, 'type': 'ed25519-base16'}, - 'node_id': 'some_node_id', - 'power': power} + new_validator = { + "public_key": {"value": public_key, "type": "ed25519-base16"}, + "node_id": "some_node_id", + "power": power, + } voters = ValidatorElection.recipients(b) - election = ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + election = ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign( + [node_key.private_key] + ) code, message = b.write_transaction(election, BROADCAST_TX_COMMIT) assert code == 202 assert b.get_transaction(election.id) @@ -255,15 +253,15 @@ def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys): resp = b.get_validators() validator_pub_keys = [] for v in resp: - validator_pub_keys.append(v['public_key']['value']) + validator_pub_keys.append(v["public_key"]["value"]) - assert (public_key64 in validator_pub_keys) + assert public_key64 in validator_pub_keys new_validator_set = b.get_validators() validator_pub_keys = [] for v in new_validator_set: - validator_pub_keys.append(v['public_key']['value']) + validator_pub_keys.append(v["public_key"]["value"]) - assert (public_key64 in validator_pub_keys) + assert public_key64 in validator_pub_keys @pytest.mark.bdb @@ -271,15 +269,15 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys): reset_validator_set(b, node_keys, 1) power = 1 - public_key = '9B3119650DF82B9A5D8A12E38953EA47475C09F0C48A4E6A0ECE182944B24403' + public_key = "9B3119650DF82B9A5D8A12E38953EA47475C09F0C48A4E6A0ECE182944B24403" public_key64 = public_key_to_base64(public_key) - new_validator = {'public_key': {'value': public_key, 'type': 'ed25519-base16'}, - 'node_id': 'some_node_id', - 'power': power} + new_validator = { + "public_key": {"value": public_key, "type": "ed25519-base16"}, + "node_id": "some_node_id", + "power": power, + } voters = ValidatorElection.recipients(b) - election = ValidatorElection.generate([node_key.public_key], - voters, - new_validator).sign([node_key.private_key]) + election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key]) # store election b.store_bulk_transactions([election]) @@ -296,18 +294,18 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys): update = Election.process_block(b, 4, [tx_vote0, tx_vote1, tx_vote2]) assert len(update) == 1 - update_public_key = codecs.encode(update[0].pub_key.ed25519, 'base64').decode().rstrip('\n') + update_public_key = codecs.encode(update[0].pub_key.ed25519, "base64").decode().rstrip("\n") assert update_public_key == public_key64 # remove validator power = 0 - new_validator = {'public_key': {'value': public_key, 'type': 'ed25519-base16'}, - 'node_id': 'some_node_id', - 'power': power} + new_validator = { + "public_key": {"value": public_key, "type": "ed25519-base16"}, + "node_id": "some_node_id", + "power": power, + } voters = ValidatorElection.recipients(b) - election = ValidatorElection.generate([node_key.public_key], - voters, - new_validator).sign([node_key.private_key]) + election = ValidatorElection.generate([node_key.public_key], voters, new_validator).sign([node_key.private_key]) # store election b.store_bulk_transactions([election]) @@ -319,22 +317,21 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys): update = Election.process_block(b, 9, [tx_vote2]) assert len(update) == 1 - update_public_key = codecs.encode(update[0].pub_key.ed25519, 'base64').decode().rstrip('\n') + update_public_key = codecs.encode(update[0].pub_key.ed25519, "base64").decode().rstrip("\n") assert update_public_key == public_key64 # assert that the public key is not a part of the current validator set for v in b.get_validators(10): - assert not v['public_key']['value'] == public_key64 + assert not v["public_key"]["value"] == public_key64 # ============================================================================ # Helper functions # ============================================================================ + def reset_validator_set(b, node_keys, height): validators = [] for (node_pub, _) in node_keys.items(): - validators.append({'public_key': {'type': 'ed25519-base64', - 'value': node_pub}, - 'voting_power': 10}) + validators.append({"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10}) b.store_validator_set(height, validators) diff --git a/tests/upsert_validator/test_validator_election.py b/tests/upsert_validator/test_validator_election.py index 77aaf1c..fa0e73c 100644 --- a/tests/upsert_validator/test_validator_election.py +++ b/tests/upsert_validator/test_validator_election.py @@ -10,40 +10,42 @@ import pytest from planetmint.tendermint_utils import public_key_to_base64 from planetmint.upsert_validator import ValidatorElection from planetmint.transactions.common.exceptions import ( - DuplicateTransaction, UnequalValidatorSet, InvalidProposer, - MultipleInputsError, InvalidPowerChange) + DuplicateTransaction, + UnequalValidatorSet, + InvalidProposer, + MultipleInputsError, + InvalidPowerChange, +) pytestmark = pytest.mark.bdb def test_upsert_validator_valid_election(b_mock, new_validator, node_key): voters = ValidatorElection.recipients(b_mock) - election = ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + election = ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign( + [node_key.private_key] + ) assert election.validate(b_mock) def test_upsert_validator_invalid_election_public_key(b_mock, new_validator, node_key): from planetmint.transactions.common.exceptions import InvalidPublicKey - for iv in ['ed25519-base32', 'ed25519-base64']: - new_validator['public_key']['type'] = iv + for iv in ["ed25519-base32", "ed25519-base64"]: + new_validator["public_key"]["type"] = iv voters = ValidatorElection.recipients(b_mock) with pytest.raises(InvalidPublicKey): - ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign([node_key.private_key]) def test_upsert_validator_invalid_power_election(b_mock, new_validator, node_key): voters = ValidatorElection.recipients(b_mock) - new_validator['power'] = 30 + new_validator["power"] = 30 - election = ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + election = ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign( + [node_key.private_key] + ) with pytest.raises(InvalidPowerChange): election.validate(b_mock) @@ -53,9 +55,7 @@ def test_upsert_validator_invalid_proposed_election(b_mock, new_validator, node_ alice = generate_key_pair() voters = ValidatorElection.recipients(b_mock) - election = ValidatorElection.generate([alice.public_key], - voters, - new_validator, None).sign([alice.private_key]) + election = ValidatorElection.generate([alice.public_key], voters, new_validator, None).sign([alice.private_key]) with pytest.raises(InvalidProposer): election.validate(b_mock) @@ -65,19 +65,19 @@ def test_upsert_validator_invalid_inputs_election(b_mock, new_validator, node_ke alice = generate_key_pair() voters = ValidatorElection.recipients(b_mock) - election = ValidatorElection.generate([node_key.public_key, alice.public_key], - voters, - new_validator, None).sign([node_key.private_key, alice.private_key]) + election = ValidatorElection.generate([node_key.public_key, alice.public_key], voters, new_validator, None).sign( + [node_key.private_key, alice.private_key] + ) with pytest.raises(MultipleInputsError): election.validate(b_mock) -@patch('planetmint.transactions.types.elections.election.uuid4', lambda: 'mock_uuid4') +@patch("planetmint.transactions.types.elections.election.uuid4", lambda: "mock_uuid4") def test_upsert_validator_invalid_election(b_mock, new_validator, node_key, fixed_seed_election): voters = ValidatorElection.recipients(b_mock) - duplicate_election = ValidatorElection.generate([node_key.public_key], - voters, - new_validator, None).sign([node_key.private_key]) + duplicate_election = ValidatorElection.generate([node_key.public_key], voters, new_validator, None).sign( + [node_key.private_key] + ) with pytest.raises(DuplicateTransaction): fixed_seed_election.validate(b_mock, [duplicate_election]) @@ -88,9 +88,9 @@ def test_upsert_validator_invalid_election(b_mock, new_validator, node_key, fixe duplicate_election.validate(b_mock) # Try creating an election with incomplete voter set - invalid_election = ValidatorElection.generate([node_key.public_key], - voters[1:], - new_validator, None).sign([node_key.private_key]) + invalid_election = ValidatorElection.generate([node_key.public_key], voters[1:], new_validator, None).sign( + [node_key.private_key] + ) with pytest.raises(UnequalValidatorSet): invalid_election.validate(b_mock) @@ -102,9 +102,9 @@ def test_upsert_validator_invalid_election(b_mock, new_validator, node_key, fixe altered_recipients.append(([r_public_key], voting_power - 1)) # Create a transaction which doesn't enfore the network power - tx_election = ValidatorElection.generate([node_key.public_key], - altered_recipients, - new_validator, None).sign([node_key.private_key]) + tx_election = ValidatorElection.generate([node_key.public_key], altered_recipients, new_validator, None).sign( + [node_key.private_key] + ) with pytest.raises(UnequalValidatorSet): tx_election.validate(b_mock) @@ -124,35 +124,47 @@ def test_get_status_concluded(b, concluded_election, new_validator): def test_get_status_inconclusive(b, inconclusive_election, new_validator): def set_block_height_to_3(): - return {'height': 3} + return {"height": 3} def custom_mock_get_validators(height): if height >= 3: - return [{'pub_key': {'data': 'zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=', - 'type': 'AC26791624DE60'}, - 'voting_power': 15}, - {'pub_key': {'data': 'GIijU7GBcVyiVUcB0GwWZbxCxdk2xV6pxdvL24s/AqM=', - 'type': 'AC26791624DE60'}, - 'voting_power': 7}, - {'pub_key': {'data': 'JbfwrLvCVIwOPm8tj8936ki7IYbmGHjPiKb6nAZegRA=', - 'type': 'AC26791624DE60'}, - 'voting_power': 10}, - {'pub_key': {'data': 'PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=', - 'type': 'AC26791624DE60'}, - 'voting_power': 8}] + return [ + { + "pub_key": {"data": "zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=", "type": "AC26791624DE60"}, + "voting_power": 15, + }, + { + "pub_key": {"data": "GIijU7GBcVyiVUcB0GwWZbxCxdk2xV6pxdvL24s/AqM=", "type": "AC26791624DE60"}, + "voting_power": 7, + }, + { + "pub_key": {"data": "JbfwrLvCVIwOPm8tj8936ki7IYbmGHjPiKb6nAZegRA=", "type": "AC26791624DE60"}, + "voting_power": 10, + }, + { + "pub_key": {"data": "PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=", "type": "AC26791624DE60"}, + "voting_power": 8, + }, + ] else: - return [{'pub_key': {'data': 'zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=', - 'type': 'AC26791624DE60'}, - 'voting_power': 9}, - {'pub_key': {'data': 'GIijU7GBcVyiVUcB0GwWZbxCxdk2xV6pxdvL24s/AqM=', - 'type': 'AC26791624DE60'}, - 'voting_power': 7}, - {'pub_key': {'data': 'JbfwrLvCVIwOPm8tj8936ki7IYbmGHjPiKb6nAZegRA=', - 'type': 'AC26791624DE60'}, - 'voting_power': 10}, - {'pub_key': {'data': 'PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=', - 'type': 'AC26791624DE60'}, - 'voting_power': 8}] + return [ + { + "pub_key": {"data": "zL/DasvKulXZzhSNFwx4cLRXKkSM9GPK7Y0nZ4FEylM=", "type": "AC26791624DE60"}, + "voting_power": 9, + }, + { + "pub_key": {"data": "GIijU7GBcVyiVUcB0GwWZbxCxdk2xV6pxdvL24s/AqM=", "type": "AC26791624DE60"}, + "voting_power": 7, + }, + { + "pub_key": {"data": "JbfwrLvCVIwOPm8tj8936ki7IYbmGHjPiKb6nAZegRA=", "type": "AC26791624DE60"}, + "voting_power": 10, + }, + { + "pub_key": {"data": "PecJ58SaNRsWJZodDmqjpCWqG6btdwXFHLyE40RYlYM=", "type": "AC26791624DE60"}, + "voting_power": 8, + }, + ] b.get_validators = custom_mock_get_validators b.get_latest_block = set_block_height_to_3 @@ -165,14 +177,13 @@ def test_upsert_validator_show(caplog, ongoing_validator_election, b): from planetmint.commands.planetmint import run_election_show election_id = ongoing_validator_election.id - public_key = public_key_to_base64(ongoing_validator_election.asset['data']['public_key']['value']) - power = ongoing_validator_election.asset['data']['power'] - node_id = ongoing_validator_election.asset['data']['node_id'] + public_key = public_key_to_base64(ongoing_validator_election.asset["data"]["public_key"]["value"]) + power = ongoing_validator_election.asset["data"]["power"] + node_id = ongoing_validator_election.asset["data"]["node_id"] status = ValidatorElection.ONGOING - show_args = Namespace(action='show', - election_id=election_id) + show_args = Namespace(action="show", election_id=election_id) msg = run_election_show(show_args, b) - assert msg == f'public_key={public_key}\npower={power}\nnode_id={node_id}\nstatus={status}' + assert msg == f"public_key={public_key}\npower={power}\nnode_id={node_id}\nstatus={status}" diff --git a/tests/utils.py b/tests/utils.py index 1355da6..b418db8 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -57,10 +57,7 @@ def generate_block(planet): from planetmint.transactions.common.crypto import generate_key_pair alice = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)], - asset=None) \ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]) code, message = planet.write_transaction(tx, BROADCAST_TX_COMMIT) assert code == 202 @@ -77,61 +74,58 @@ def to_inputs(election, i, ed25519_node_keys): def gen_vote(election, i, ed25519_node_keys): (input_i, votes_i, key_i) = to_inputs(election, i, ed25519_node_keys) election_pub_key = Election.to_public_key(election.id) - return Vote.generate([input_i], - [([election_pub_key], votes_i)], - election_id=election.id) \ - .sign([key_i.private_key]) + return Vote.generate([input_i], [([election_pub_key], votes_i)], election_id=election.id).sign([key_i.private_key]) def generate_validators(powers): """Generates an arbitrary number of validators with random public keys. - The object under the `storage` key is in the format expected by DB. + The object under the `storage` key is in the format expected by DB. - The object under the `eleciton` key is in the format expected by - the upsert validator election. + The object under the `eleciton` key is in the format expected by + the upsert validator election. - `public_key`, `private_key` are in the format used for signing transactions. + `public_key`, `private_key` are in the format used for signing transactions. - Args: - powers: A list of intergers representing the voting power to - assign to the corresponding validators. + Args: + powers: A list of intergers representing the voting power to + assign to the corresponding validators. """ validators = [] for power in powers: kp = crypto.generate_key_pair() - validators.append({ - 'storage': { - 'public_key': { - 'value': key_to_base64(base58.b58decode(kp.public_key).hex()), - 'type': 'ed25519-base64', + validators.append( + { + "storage": { + "public_key": { + "value": key_to_base64(base58.b58decode(kp.public_key).hex()), + "type": "ed25519-base64", + }, + "voting_power": power, }, - 'voting_power': power, - }, - 'election': { - 'node_id': f'node-{random.choice(range(100))}', - 'power': power, - 'public_key': { - 'value': base64.b16encode(base58.b58decode(kp.public_key)).decode('utf-8'), - 'type': 'ed25519-base16', + "election": { + "node_id": f"node-{random.choice(range(100))}", + "power": power, + "public_key": { + "value": base64.b16encode(base58.b58decode(kp.public_key)).decode("utf-8"), + "type": "ed25519-base16", + }, }, - }, - 'public_key': kp.public_key, - 'private_key': kp.private_key, - }) + "public_key": kp.public_key, + "private_key": kp.private_key, + } + ) return validators def generate_election(b, cls, public_key, private_key, asset_data, voter_keys): voters = cls.recipients(b) - election = cls.generate([public_key], - voters, - asset_data, - None).sign([private_key]) + election = cls.generate([public_key], voters, asset_data, None).sign([private_key]) - votes = [Vote.generate([election.to_inputs()[i]], - [([Election.to_public_key(election.id)], power)], - election.id) for i, (_, power) in enumerate(voters)] + votes = [ + Vote.generate([election.to_inputs()[i]], [([Election.to_public_key(election.id)], power)], election.id) + for i, (_, power) in enumerate(voters) + ] for key, v in zip(voter_keys, votes): v.sign([key]) diff --git a/tests/validation/test_transaction_structure.py b/tests/validation/test_transaction_structure.py index 5fcb425..41ed2fd 100644 --- a/tests/validation/test_transaction_structure.py +++ b/tests/validation/test_transaction_structure.py @@ -10,14 +10,14 @@ structural / schematic issues are caught when reading a transaction import json import pytest + try: import hashlib as sha3 except ImportError: import sha3 from unittest.mock import MagicMock -from planetmint.transactions.common.exceptions import ( - AmountError, SchemaValidationError, ThresholdTooDeep) +from planetmint.transactions.common.exceptions import AmountError, SchemaValidationError, ThresholdTooDeep from planetmint.models import Transaction from planetmint.transactions.common.utils import _fulfillment_to_details, _fulfillment_from_details @@ -47,25 +47,26 @@ def test_validation_passes(signed_create_tx): def test_tx_serialization_hash_function(signed_create_tx): tx = signed_create_tx.to_dict() - tx['id'] = None - payload = json.dumps(tx, skipkeys=False, sort_keys=True, - separators=(',', ':')) + tx["id"] = None + payload = json.dumps(tx, skipkeys=False, sort_keys=True, separators=(",", ":")) assert sha3.sha3_256(payload.encode()).hexdigest() == signed_create_tx.id def test_tx_serialization_with_incorrect_hash(signed_create_tx): from planetmint.transactions.common.transaction import Transaction from planetmint.transactions.common.exceptions import InvalidHash + tx = signed_create_tx.to_dict() - tx['id'] = 'a' * 64 + tx["id"] = "a" * 64 with pytest.raises(InvalidHash): Transaction.validate_id(tx) def test_tx_serialization_with_no_hash(signed_create_tx): from planetmint.transactions.common.exceptions import InvalidHash + tx = signed_create_tx.to_dict() - del tx['id'] + del tx["id"] with pytest.raises(InvalidHash): Transaction.from_dict(tx) @@ -73,8 +74,9 @@ def test_tx_serialization_with_no_hash(signed_create_tx): ################################################################################ # Operation + def test_validate_invalid_operation(b, create_tx, alice): - create_tx.operation = 'something invalid' + create_tx.operation = "something invalid" signed_tx = create_tx.sign([alice.private_key]) validate_raises(signed_tx) @@ -82,8 +84,9 @@ def test_validate_invalid_operation(b, create_tx, alice): ################################################################################ # Metadata + def test_validate_fails_metadata_empty_dict(b, create_tx, alice): - create_tx.metadata = {'a': 1} + create_tx.metadata = {"a": 1} signed_tx = create_tx.sign([alice.private_key]) validate(signed_tx) @@ -103,45 +106,47 @@ def test_validate_fails_metadata_empty_dict(b, create_tx, alice): ################################################################################ # Asset + def test_transfer_asset_schema(user_sk, signed_transfer_tx): from planetmint.transactions.common.transaction import Transaction + tx = signed_transfer_tx.to_dict() validate(tx) - tx['id'] = None - tx['asset']['data'] = {} + tx["id"] = None + tx["asset"]["data"] = {} tx = Transaction.from_dict(tx).sign([user_sk]).to_dict() validate_raises(tx) - tx['id'] = None - del tx['asset']['data'] - tx['asset']['id'] = 'b' * 63 + tx["id"] = None + del tx["asset"]["data"] + tx["asset"]["id"] = "b" * 63 tx = Transaction.from_dict(tx).sign([user_sk]).to_dict() validate_raises(tx) def test_create_tx_no_asset_id(b, create_tx, alice): - create_tx.asset['id'] = 'b' * 64 + create_tx.asset["id"] = "b" * 64 signed_tx = create_tx.sign([alice.private_key]) validate_raises(signed_tx) def test_create_tx_asset_type(b, create_tx, alice): - create_tx.asset['data'] = 'a' + create_tx.asset["data"] = "a" signed_tx = create_tx.sign([alice.private_key]) validate_raises(signed_tx) def test_create_tx_no_asset_data(b, create_tx, alice): tx_body = create_tx.to_dict() - del tx_body['asset']['data'] - tx_serialized = json.dumps( - tx_body, skipkeys=False, sort_keys=True, separators=(',', ':')) - tx_body['id'] = sha3.sha3_256(tx_serialized.encode()).hexdigest() + del tx_body["asset"]["data"] + tx_serialized = json.dumps(tx_body, skipkeys=False, sort_keys=True, separators=(",", ":")) + tx_body["id"] = sha3.sha3_256(tx_serialized.encode()).hexdigest() validate_raises(tx_body) ################################################################################ # Inputs + def test_no_inputs(b, create_tx, alice): create_tx.inputs = [] signed_tx = create_tx.sign([alice.private_key]) @@ -150,21 +155,22 @@ def test_no_inputs(b, create_tx, alice): def test_create_single_input(b, create_tx, alice): from planetmint.transactions.common.transaction import Transaction + tx = create_tx.to_dict() - tx['inputs'] += tx['inputs'] + tx["inputs"] += tx["inputs"] tx = Transaction.from_dict(tx).sign([alice.private_key]).to_dict() validate_raises(tx) - tx['id'] = None - tx['inputs'] = [] + tx["id"] = None + tx["inputs"] = [] tx = Transaction.from_dict(tx).sign([alice.private_key]).to_dict() validate_raises(tx) def test_create_tx_no_fulfills(b, create_tx, alice): from planetmint.transactions.common.transaction import Transaction + tx = create_tx.to_dict() - tx['inputs'][0]['fulfills'] = {'transaction_id': 'a' * 64, - 'output_index': 0} + tx["inputs"][0]["fulfills"] = {"transaction_id": "a" * 64, "output_index": 0} tx = Transaction.from_dict(tx).sign([alice.private_key]).to_dict() validate_raises(tx) @@ -179,6 +185,7 @@ def test_transfer_has_inputs(user_sk, signed_transfer_tx, alice): ################################################################################ # Outputs + def test_low_amounts(b, user_sk, create_tx, signed_transfer_tx, alice): for sk, tx in [(alice.private_key, create_tx), (user_sk, signed_transfer_tx)]: tx.outputs[0].amount = 0 @@ -194,11 +201,11 @@ def test_low_amounts(b, user_sk, create_tx, signed_transfer_tx, alice): def test_high_amounts(b, create_tx, alice): # Should raise a SchemaValidationError - don't want to allow ridiculously # large numbers to get converted to int - create_tx.outputs[0].amount = 10 ** 21 + create_tx.outputs[0].amount = 10**21 create_tx.sign([alice.private_key]) validate_raises(create_tx) # Should raise AmountError - create_tx.outputs[0].amount = 9 * 10 ** 18 + 1 + create_tx.outputs[0].amount = 9 * 10**18 + 1 create_tx._id = None create_tx.sign([alice.private_key]) validate_raises(create_tx, AmountError) @@ -212,16 +219,17 @@ def test_high_amounts(b, create_tx, alice): ################################################################################ # Conditions + def test_handle_threshold_overflow(): cond = { - 'type': 'ed25519-sha-256', - 'public_key': 'a' * 43, + "type": "ed25519-sha-256", + "public_key": "a" * 43, } for i in range(1000): cond = { - 'type': 'threshold-sha-256', - 'threshold': 1, - 'subconditions': [cond], + "type": "threshold-sha-256", + "threshold": 1, + "subconditions": [cond], } with pytest.raises(ThresholdTooDeep): _fulfillment_from_details(cond) @@ -231,26 +239,27 @@ def test_unsupported_condition_type(): from cryptoconditions.exceptions import UnsupportedTypeError with pytest.raises(UnsupportedTypeError): - _fulfillment_from_details({'type': 'a'}) + _fulfillment_from_details({"type": "a"}) with pytest.raises(UnsupportedTypeError): - _fulfillment_to_details(MagicMock(type_name='a')) + _fulfillment_to_details(MagicMock(type_name="a")) ################################################################################ # Version + def test_validate_version(b, create_tx, alice): - create_tx.version = '2.0' + create_tx.version = "2.0" create_tx.sign([alice.private_key]) validate(create_tx) - create_tx.version = '0.10' + create_tx.version = "0.10" create_tx._id = None create_tx.sign([alice.private_key]) validate_raises(create_tx) - create_tx.version = '110' + create_tx.version = "110" create_tx._id = None create_tx.sign([alice.private_key]) validate_raises(create_tx) diff --git a/tests/web/conftest.py b/tests/web/conftest.py index 746615f..040517e 100644 --- a/tests/web/conftest.py +++ b/tests/web/conftest.py @@ -11,7 +11,7 @@ def app(request): from planetmint.web import server from planetmint.lib import Planetmint - if request.config.getoption('--database-backend') == 'localmongodb': + if request.config.getoption("--database-backend") == "localmongodb": app = server.create_app(debug=True, planetmint_factory=Planetmint) else: app = server.create_app(debug=True) diff --git a/tests/web/test_assets.py b/tests/web/test_assets.py index b88c2ef..865a9a9 100644 --- a/tests/web/test_assets.py +++ b/tests/web/test_assets.py @@ -6,13 +6,12 @@ import pytest from planetmint.transactions.types.assets.create import Create -ASSETS_ENDPOINT = '/api/v1/assets/' +ASSETS_ENDPOINT = "/api/v1/assets/" def test_get_assets_with_empty_text_search(client): - res = client.get(ASSETS_ENDPOINT + '?search=') - assert res.json == {'status': 400, - 'message': 'text_search cannot be empty'} + res = client.get(ASSETS_ENDPOINT + "?search=") + assert res.json == {"status": 400, "message": "text_search cannot be empty"} assert res.status_code == 400 @@ -25,47 +24,41 @@ def test_get_assets_with_missing_text_search(client): def test_get_assets_tendermint(client, b, alice): # test returns empty list when no assets are found - res = client.get(ASSETS_ENDPOINT + '?search=abc') + res = client.get(ASSETS_ENDPOINT + "?search=abc") assert res.json == [] assert res.status_code == 200 # create asset - asset = {'msg': 'abc'} - tx = Create.generate([alice.public_key], [([alice.public_key], 1)], - asset=asset).sign([alice.private_key]) + asset = {"msg": "abc"} + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset).sign([alice.private_key]) b.store_bulk_transactions([tx]) # test that asset is returned - res = client.get(ASSETS_ENDPOINT + '?search=abc') + res = client.get(ASSETS_ENDPOINT + "?search=abc") assert res.status_code == 200 assert len(res.json) == 1 - assert res.json[0] == { - 'data': {'msg': 'abc'}, - 'id': tx.id - } + assert res.json[0] == {"data": {"msg": "abc"}, "id": tx.id} @pytest.mark.bdb def test_get_assets_limit_tendermint(client, b, alice): # create two assets - asset1 = {'msg': 'abc 1'} - asset2 = {'msg': 'abc 2'} - tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], - asset=asset1).sign([alice.private_key]) - tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], - asset=asset2).sign([alice.private_key]) + asset1 = {"msg": "abc 1"} + asset2 = {"msg": "abc 2"} + tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset1).sign([alice.private_key]) + tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=asset2).sign([alice.private_key]) b.store_bulk_transactions([tx1]) b.store_bulk_transactions([tx2]) # test that both assets are returned without limit - res = client.get(ASSETS_ENDPOINT + '?search=abc') + res = client.get(ASSETS_ENDPOINT + "?search=abc") assert res.status_code == 200 assert len(res.json) == 2 # test that only one asset is returned when using limit=1 - res = client.get(ASSETS_ENDPOINT + '?search=abc&limit=1') + res = client.get(ASSETS_ENDPOINT + "?search=abc&limit=1") assert res.status_code == 200 assert len(res.json) == 1 diff --git a/tests/web/test_block_tendermint.py b/tests/web/test_block_tendermint.py index e52bac6..10a8189 100644 --- a/tests/web/test_block_tendermint.py +++ b/tests/web/test_block_tendermint.py @@ -8,14 +8,15 @@ import pytest from planetmint.transactions.types.assets.create import Create from planetmint.lib import Block -BLOCKS_ENDPOINT = '/api/v1/blocks/' +BLOCKS_ENDPOINT = "/api/v1/blocks/" @pytest.mark.bdb -@pytest.mark.usefixtures('inputs') +@pytest.mark.usefixtures("inputs") def test_get_block_endpoint(b, client, alice): import copy - tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset={'cycle': 'hero'}) + + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset={"cycle": "hero"}) tx = tx.sign([alice.private_key]) # with store_bulk_transactions we use `insert_many` where PyMongo @@ -25,38 +26,34 @@ def test_get_block_endpoint(b, client, alice): tx_dict = copy.deepcopy(tx.to_dict()) b.store_bulk_transactions([tx]) - block = Block(app_hash='random_utxo', - height=31, - transactions=[tx.id]) + block = Block(app_hash="random_utxo", height=31, transactions=[tx.id]) b.store_block(block._asdict()) res = client.get(BLOCKS_ENDPOINT + str(block.height)) - expected_response = {'height': block.height, 'transactions': [tx_dict]} + expected_response = {"height": block.height, "transactions": [tx_dict]} assert res.json == expected_response assert res.status_code == 200 @pytest.mark.bdb -@pytest.mark.usefixtures('inputs') +@pytest.mark.usefixtures("inputs") def test_get_block_returns_404_if_not_found(client): - res = client.get(BLOCKS_ENDPOINT + '123') + res = client.get(BLOCKS_ENDPOINT + "123") assert res.status_code == 404 - res = client.get(BLOCKS_ENDPOINT + '123/') + res = client.get(BLOCKS_ENDPOINT + "123/") assert res.status_code == 404 @pytest.mark.bdb def test_get_block_containing_transaction(b, client, alice): - tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset={'cycle': 'hero'}) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset={"cycle": "hero"}) tx = tx.sign([alice.private_key]) b.store_bulk_transactions([tx]) - block = Block(app_hash='random_utxo', - height=13, - transactions=[tx.id]) + block = Block(app_hash="random_utxo", height=13, transactions=[tx.id]) b.store_block(block._asdict()) - res = client.get('{}?transaction_id={}'.format(BLOCKS_ENDPOINT, tx.id)) + res = client.get("{}?transaction_id={}".format(BLOCKS_ENDPOINT, tx.id)) expected_response = [block.height] assert res.json == expected_response assert res.status_code == 200 @@ -64,10 +61,10 @@ def test_get_block_containing_transaction(b, client, alice): @pytest.mark.bdb def test_get_blocks_by_txid_endpoint_returns_empty_list_not_found(client): - res = client.get(BLOCKS_ENDPOINT + '?transaction_id=') + res = client.get(BLOCKS_ENDPOINT + "?transaction_id=") assert res.status_code == 200 assert len(res.json) == 0 - res = client.get(BLOCKS_ENDPOINT + '?transaction_id=123') + res = client.get(BLOCKS_ENDPOINT + "?transaction_id=123") assert res.status_code == 200 assert len(res.json) == 0 diff --git a/tests/web/test_blocks.py b/tests/web/test_blocks.py index 7dfc00e..2bcb8fe 100644 --- a/tests/web/test_blocks.py +++ b/tests/web/test_blocks.py @@ -5,32 +5,32 @@ import pytest -BLOCKS_ENDPOINT = '/api/v1/blocks/' +BLOCKS_ENDPOINT = "/api/v1/blocks/" @pytest.mark.bdb -@pytest.mark.usefixtures('inputs') +@pytest.mark.usefixtures("inputs") def test_get_block_returns_404_if_not_found(client): - res = client.get(BLOCKS_ENDPOINT + '123') + res = client.get(BLOCKS_ENDPOINT + "123") assert res.status_code == 404 - res = client.get(BLOCKS_ENDPOINT + '123/') + res = client.get(BLOCKS_ENDPOINT + "123/") assert res.status_code == 404 - res = client.get(BLOCKS_ENDPOINT + 'latest') + res = client.get(BLOCKS_ENDPOINT + "latest") assert res.status_code == 200 - res = client.get(BLOCKS_ENDPOINT + 'latest/') + res = client.get(BLOCKS_ENDPOINT + "latest/") assert res.status_code == 200 @pytest.mark.bdb def test_get_blocks_by_txid_endpoint_returns_empty_list_not_found(client): - res = client.get(BLOCKS_ENDPOINT + '?transaction_id=') + res = client.get(BLOCKS_ENDPOINT + "?transaction_id=") assert res.status_code == 200 assert len(res.json) == 0 - res = client.get(BLOCKS_ENDPOINT + '?transaction_id=123') + res = client.get(BLOCKS_ENDPOINT + "?transaction_id=123") assert res.status_code == 200 assert len(res.json) == 0 @@ -40,22 +40,18 @@ def test_get_blocks_by_txid_endpoint_returns_400_bad_query_params(client): res = client.get(BLOCKS_ENDPOINT) assert res.status_code == 400 - res = client.get(BLOCKS_ENDPOINT + '?ts_id=123') + res = client.get(BLOCKS_ENDPOINT + "?ts_id=123") assert res.status_code == 400 assert res.json == { - 'message': { - 'transaction_id': 'Missing required parameter in the JSON body or the post body or the query string' + "message": { + "transaction_id": "Missing required parameter in the JSON body or the post body or the query string" } } - res = client.get(BLOCKS_ENDPOINT + '?transaction_id=123&foo=123') + res = client.get(BLOCKS_ENDPOINT + "?transaction_id=123&foo=123") assert res.status_code == 400 - assert res.json == { - 'message': 'Unknown arguments: foo' - } + assert res.json == {"message": "Unknown arguments: foo"} - res = client.get(BLOCKS_ENDPOINT + '?transaction_id=123&status=123') + res = client.get(BLOCKS_ENDPOINT + "?transaction_id=123&status=123") assert res.status_code == 400 - assert res.json == { - 'message': 'Unknown arguments: status' - } + assert res.json == {"message": "Unknown arguments: status"} diff --git a/tests/web/test_content_type_middleware.py b/tests/web/test_content_type_middleware.py index fefe74e..866bef7 100644 --- a/tests/web/test_content_type_middleware.py +++ b/tests/web/test_content_type_middleware.py @@ -5,41 +5,41 @@ from unittest.mock import Mock -OUTPUTS_ENDPOINT = '/api/v1/outputs/' +OUTPUTS_ENDPOINT = "/api/v1/outputs/" def test_middleware_does_nothing_when_no_content_type_is_provided(): from planetmint.web.strip_content_type_middleware import StripContentTypeMiddleware + mock = Mock() middleware = StripContentTypeMiddleware(mock) - middleware({'REQUEST_METHOD': 'GET'}, None) + middleware({"REQUEST_METHOD": "GET"}, None) - assert 'CONTENT_TYPE' not in mock.call_args[0][0] + assert "CONTENT_TYPE" not in mock.call_args[0][0] def test_middleware_strips_content_type_from_gets(): from planetmint.web.strip_content_type_middleware import StripContentTypeMiddleware + mock = Mock() middleware = StripContentTypeMiddleware(mock) - middleware({'REQUEST_METHOD': 'GET', - 'CONTENT_TYPE': 'application/json'}, - None) + middleware({"REQUEST_METHOD": "GET", "CONTENT_TYPE": "application/json"}, None) - assert 'CONTENT_TYPE' not in mock.call_args[0][0] + assert "CONTENT_TYPE" not in mock.call_args[0][0] def test_middleware_does_notstrip_content_type_from_other_methods(): from planetmint.web.strip_content_type_middleware import StripContentTypeMiddleware + mock = Mock() middleware = StripContentTypeMiddleware(mock) - middleware({'REQUEST_METHOD': 'POST', - 'CONTENT_TYPE': 'application/json'}, - None) + middleware({"REQUEST_METHOD": "POST", "CONTENT_TYPE": "application/json"}, None) - assert 'CONTENT_TYPE' in mock.call_args[0][0] + assert "CONTENT_TYPE" in mock.call_args[0][0] def test_get_outputs_endpoint_with_content_type(client, user_pk): - res = client.get(OUTPUTS_ENDPOINT + '?public_key={}'.format(user_pk), - headers=[('Content-Type', 'application/json')]) + res = client.get( + OUTPUTS_ENDPOINT + "?public_key={}".format(user_pk), headers=[("Content-Type", "application/json")] + ) assert res.status_code == 200 diff --git a/tests/web/test_info.py b/tests/web/test_info.py index 47cf401..ed768a2 100644 --- a/tests/web/test_info.py +++ b/tests/web/test_info.py @@ -6,51 +6,45 @@ from unittest import mock -@mock.patch('planetmint.version.__short_version__', 'tst') -@mock.patch('planetmint.version.__version__', 'tsttst') +@mock.patch("planetmint.version.__short_version__", "tst") +@mock.patch("planetmint.version.__version__", "tsttst") def test_api_root_endpoint(client, wsserver_base_url): - res = client.get('/') - docs_url = ['https://docs.planetmint.io/projects/server/en/vtsttst', - '/http-client-server-api.html'] + res = client.get("/") + docs_url = ["https://docs.planetmint.io/projects/server/en/vtsttst", "/http-client-server-api.html"] assert res.json == { - 'api': { - 'v1': { - 'docs': ''.join(docs_url), - 'transactions': '/api/v1/transactions/', - 'blocks': '/api/v1/blocks/', - 'assets': '/api/v1/assets/', - 'outputs': '/api/v1/outputs/', - 'streams': '{}/api/v1/streams/valid_transactions'.format( - wsserver_base_url), - 'streamedblocks': '{}/api/v1/streams/valid_blocks'.format( - wsserver_base_url), - 'metadata': '/api/v1/metadata/', - 'validators': '/api/v1/validators', + "api": { + "v1": { + "docs": "".join(docs_url), + "transactions": "/api/v1/transactions/", + "blocks": "/api/v1/blocks/", + "assets": "/api/v1/assets/", + "outputs": "/api/v1/outputs/", + "streams": "{}/api/v1/streams/valid_transactions".format(wsserver_base_url), + "streamedblocks": "{}/api/v1/streams/valid_blocks".format(wsserver_base_url), + "metadata": "/api/v1/metadata/", + "validators": "/api/v1/validators", } }, - 'docs': 'https://docs.planetmint.io/projects/server/en/vtsttst/', - 'version': 'tsttst', - 'software': 'Planetmint', + "docs": "https://docs.planetmint.io/projects/server/en/vtsttst/", + "version": "tsttst", + "software": "Planetmint", } -@mock.patch('planetmint.version.__short_version__', 'tst') -@mock.patch('planetmint.version.__version__', 'tsttst') +@mock.patch("planetmint.version.__short_version__", "tst") +@mock.patch("planetmint.version.__version__", "tsttst") def test_api_v1_endpoint(client, wsserver_base_url): - docs_url = ['https://docs.planetmint.io/projects/server/en/vtsttst', - '/http-client-server-api.html'] + docs_url = ["https://docs.planetmint.io/projects/server/en/vtsttst", "/http-client-server-api.html"] api_v1_info = { - 'docs': ''.join(docs_url), - 'transactions': '/transactions/', - 'blocks': '/blocks/', - 'assets': '/assets/', - 'outputs': '/outputs/', - 'streams': '{}/api/v1/streams/valid_transactions'.format( - wsserver_base_url), - 'streamedblocks': '{}/api/v1/streams/valid_blocks'.format( - wsserver_base_url), - 'metadata': '/metadata/', - 'validators': '/validators' + "docs": "".join(docs_url), + "transactions": "/transactions/", + "blocks": "/blocks/", + "assets": "/assets/", + "outputs": "/outputs/", + "streams": "{}/api/v1/streams/valid_transactions".format(wsserver_base_url), + "streamedblocks": "{}/api/v1/streams/valid_blocks".format(wsserver_base_url), + "metadata": "/metadata/", + "validators": "/validators", } - res = client.get('/api/v1') + res = client.get("/api/v1") assert res.json == api_v1_info diff --git a/tests/web/test_metadata.py b/tests/web/test_metadata.py index 9e2acf2..7512c06 100644 --- a/tests/web/test_metadata.py +++ b/tests/web/test_metadata.py @@ -6,13 +6,12 @@ import pytest from planetmint.transactions.types.assets.create import Create -METADATA_ENDPOINT = '/api/v1/metadata/' +METADATA_ENDPOINT = "/api/v1/metadata/" def test_get_metadata_with_empty_text_search(client): - res = client.get(METADATA_ENDPOINT + '?search=') - assert res.json == {'status': 400, - 'message': 'text_search cannot be empty'} + res = client.get(METADATA_ENDPOINT + "?search=") + assert res.json == {"status": 400, "message": "text_search cannot be empty"} assert res.status_code == 400 @@ -25,50 +24,50 @@ def test_get_metadata_with_missing_text_search(client): def test_get_metadata_tendermint(client, b, alice): # test returns empty list when no assets are found - res = client.get(METADATA_ENDPOINT + '?search=abc') + res = client.get(METADATA_ENDPOINT + "?search=abc") assert res.json == [] assert res.status_code == 200 # create asset - asset = {'msg': 'abc'} - metadata = {'key': 'my_meta'} - tx = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=metadata, - asset=asset).sign([alice.private_key]) + asset = {"msg": "abc"} + metadata = {"key": "my_meta"} + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=metadata, asset=asset).sign( + [alice.private_key] + ) b.store_bulk_transactions([tx]) # test that metadata is returned - res = client.get(METADATA_ENDPOINT + '?search=my_meta') + res = client.get(METADATA_ENDPOINT + "?search=my_meta") assert res.status_code == 200 assert len(res.json) == 1 - assert res.json[0] == { - 'metadata': {'key': 'my_meta'}, - 'id': tx.id - } + assert res.json[0] == {"metadata": {"key": "my_meta"}, "id": tx.id} @pytest.mark.bdb def test_get_metadata_limit_tendermint(client, b, alice): # create two assets - asset1 = {'msg': 'abc 1'} - meta1 = {'key': 'meta 1'} - tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta1, - asset=asset1).sign([alice.private_key]) + asset1 = {"msg": "abc 1"} + meta1 = {"key": "meta 1"} + tx1 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta1, asset=asset1).sign( + [alice.private_key] + ) b.store_bulk_transactions([tx1]) - asset2 = {'msg': 'abc 2'} - meta2 = {'key': 'meta 2'} - tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta2, - asset=asset2).sign([alice.private_key]) + asset2 = {"msg": "abc 2"} + meta2 = {"key": "meta 2"} + tx2 = Create.generate([alice.public_key], [([alice.public_key], 1)], metadata=meta2, asset=asset2).sign( + [alice.private_key] + ) b.store_bulk_transactions([tx2]) # test that both assets are returned without limit - res = client.get(METADATA_ENDPOINT + '?search=meta') + res = client.get(METADATA_ENDPOINT + "?search=meta") assert res.status_code == 200 assert len(res.json) == 2 # test that only one asset is returned when using limit=1 - res = client.get(METADATA_ENDPOINT + '?search=meta&limit=1') + res = client.get(METADATA_ENDPOINT + "?search=meta&limit=1") assert res.status_code == 200 assert len(res.json) == 1 diff --git a/tests/web/test_outputs.py b/tests/web/test_outputs.py index 16783b8..a045851 100644 --- a/tests/web/test_outputs.py +++ b/tests/web/test_outputs.py @@ -10,75 +10,72 @@ from planetmint.transactions.types.assets.transfer import Transfer from unittest.mock import MagicMock, patch -OUTPUTS_ENDPOINT = '/api/v1/outputs/' +OUTPUTS_ENDPOINT = "/api/v1/outputs/" @pytest.mark.bdb -@pytest.mark.userfixtures('inputs') +@pytest.mark.userfixtures("inputs") def test_get_outputs_endpoint(client, user_pk): m = MagicMock() - m.txid = 'a' + m.txid = "a" m.output = 0 - with patch('planetmint.Planetmint.get_outputs_filtered') as gof: + with patch("planetmint.Planetmint.get_outputs_filtered") as gof: gof.return_value = [m, m] - res = client.get(OUTPUTS_ENDPOINT + '?public_key={}'.format(user_pk)) - assert res.json == [ - {'transaction_id': 'a', 'output_index': 0}, - {'transaction_id': 'a', 'output_index': 0} - ] + res = client.get(OUTPUTS_ENDPOINT + "?public_key={}".format(user_pk)) + assert res.json == [{"transaction_id": "a", "output_index": 0}, {"transaction_id": "a", "output_index": 0}] assert res.status_code == 200 gof.assert_called_once_with(user_pk, None) def test_get_outputs_endpoint_unspent(client, user_pk): m = MagicMock() - m.txid = 'a' + m.txid = "a" m.output = 0 - with patch('planetmint.Planetmint.get_outputs_filtered') as gof: + with patch("planetmint.Planetmint.get_outputs_filtered") as gof: gof.return_value = [m] - params = '?spent=False&public_key={}'.format(user_pk) + params = "?spent=False&public_key={}".format(user_pk) res = client.get(OUTPUTS_ENDPOINT + params) - assert res.json == [{'transaction_id': 'a', 'output_index': 0}] + assert res.json == [{"transaction_id": "a", "output_index": 0}] assert res.status_code == 200 gof.assert_called_once_with(user_pk, False) @pytest.mark.bdb -@pytest.mark.userfixtures('inputs') +@pytest.mark.userfixtures("inputs") def test_get_outputs_endpoint_spent(client, user_pk): m = MagicMock() - m.txid = 'a' + m.txid = "a" m.output = 0 - with patch('planetmint.Planetmint.get_outputs_filtered') as gof: + with patch("planetmint.Planetmint.get_outputs_filtered") as gof: gof.return_value = [m] - params = '?spent=true&public_key={}'.format(user_pk) + params = "?spent=true&public_key={}".format(user_pk) res = client.get(OUTPUTS_ENDPOINT + params) - assert res.json == [{'transaction_id': 'a', 'output_index': 0}] + assert res.json == [{"transaction_id": "a", "output_index": 0}] assert res.status_code == 200 gof.assert_called_once_with(user_pk, True) @pytest.mark.bdb -@pytest.mark.userfixtures('inputs') +@pytest.mark.userfixtures("inputs") def test_get_outputs_endpoint_without_public_key(client): res = client.get(OUTPUTS_ENDPOINT) assert res.status_code == 400 @pytest.mark.bdb -@pytest.mark.userfixtures('inputs') +@pytest.mark.userfixtures("inputs") def test_get_outputs_endpoint_with_invalid_public_key(client): - expected = {'message': {'public_key': 'Invalid base58 ed25519 key'}} - res = client.get(OUTPUTS_ENDPOINT + '?public_key=abc') + expected = {"message": {"public_key": "Invalid base58 ed25519 key"}} + res = client.get(OUTPUTS_ENDPOINT + "?public_key=abc") assert expected == res.json assert res.status_code == 400 @pytest.mark.bdb -@pytest.mark.userfixtures('inputs') +@pytest.mark.userfixtures("inputs") def test_get_outputs_endpoint_with_invalid_spent(client, user_pk): - expected = {'message': {'spent': 'Boolean value must be "true" or "false" (lowercase)'}} - params = '?spent=tru&public_key={}'.format(user_pk) + expected = {"message": {"spent": 'Boolean value must be "true" or "false" (lowercase)'}} + params = "?spent=tru&public_key={}".format(user_pk) res = client.get(OUTPUTS_ENDPOINT + params) assert expected == res.json assert res.status_code == 400 @@ -89,7 +86,7 @@ def test_get_divisble_transactions_returns_500(b, client): from planetmint.transactions.common import crypto import json - TX_ENDPOINT = '/api/v1/transactions' + TX_ENDPOINT = "/api/v1/transactions" def mine(tx_list): b.store_bulk_transactions(tx_list) @@ -106,9 +103,7 @@ def test_get_divisble_transactions_returns_500(b, client): mine([create_tx]) - transfer_tx = Transfer.generate(create_tx.to_inputs(), - [([alice_pub], 3), ([bob_pub], 1)], - asset_id=create_tx.id) + transfer_tx = Transfer.generate(create_tx.to_inputs(), [([alice_pub], 3), ([bob_pub], 1)], asset_id=create_tx.id) transfer_tx.sign([alice_priv]) res = client.post(TX_ENDPOINT, data=json.dumps(transfer_tx.to_dict())) @@ -116,9 +111,7 @@ def test_get_divisble_transactions_returns_500(b, client): mine([transfer_tx]) - transfer_tx_carly = Transfer.generate([transfer_tx.to_inputs()[1]], - [([carly_pub], 1)], - asset_id=create_tx.id) + transfer_tx_carly = Transfer.generate([transfer_tx.to_inputs()[1]], [([carly_pub], 1)], asset_id=create_tx.id) transfer_tx_carly.sign([bob_priv]) res = client.post(TX_ENDPOINT, data=json.dumps(transfer_tx_carly.to_dict())) @@ -128,15 +121,15 @@ def test_get_divisble_transactions_returns_500(b, client): asset_id = create_tx.id - url = TX_ENDPOINT + '?asset_id=' + asset_id + url = TX_ENDPOINT + "?asset_id=" + asset_id assert client.get(url).status_code == 200 assert len(client.get(url).json) == 3 - url = OUTPUTS_ENDPOINT + '?public_key=' + alice_pub + url = OUTPUTS_ENDPOINT + "?public_key=" + alice_pub assert client.get(url).status_code == 200 - url = OUTPUTS_ENDPOINT + '?public_key=' + bob_pub + url = OUTPUTS_ENDPOINT + "?public_key=" + bob_pub assert client.get(url).status_code == 200 - url = OUTPUTS_ENDPOINT + '?public_key=' + carly_pub + url = OUTPUTS_ENDPOINT + "?public_key=" + carly_pub assert client.get(url).status_code == 200 diff --git a/tests/web/test_parameters.py b/tests/web/test_parameters.py index 6d8b900..9b18303 100644 --- a/tests/web/test_parameters.py +++ b/tests/web/test_parameters.py @@ -9,16 +9,20 @@ import pytest def test_valid_txid(): from planetmint.web.views.parameters import valid_txid - valid = ['18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e4', - '18AC3E7343F016890C510E93F935261169D9E3F565436429830FAF0934F4F8E4'] + valid = [ + "18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e4", + "18AC3E7343F016890C510E93F935261169D9E3F565436429830FAF0934F4F8E4", + ] for h in valid: assert valid_txid(h) == h.lower() - non = ['18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e', - '18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e45', - '18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8eg', - '18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e ', - ''] + non = [ + "18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e", + "18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e45", + "18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8eg", + "18ac3e7343f016890c510e93f935261169d9e3f565436429830faf0934f4f8e ", + "", + ] for h in non: with pytest.raises(ValueError): valid_txid(h) @@ -27,54 +31,53 @@ def test_valid_txid(): def test_valid_bool(): from planetmint.web.views.parameters import valid_bool - assert valid_bool('true') is True - assert valid_bool('false') is False - assert valid_bool('tRUE') is True - assert valid_bool('fALSE') is False + assert valid_bool("true") is True + assert valid_bool("false") is False + assert valid_bool("tRUE") is True + assert valid_bool("fALSE") is False with pytest.raises(ValueError): - valid_bool('0') + valid_bool("0") with pytest.raises(ValueError): - valid_bool('1') + valid_bool("1") with pytest.raises(ValueError): - valid_bool('yes') + valid_bool("yes") with pytest.raises(ValueError): - valid_bool('no') + valid_bool("no") def test_valid_ed25519(): from planetmint.web.views.parameters import valid_ed25519 - valid = ['123456789abcdefghijkmnopqrstuvwxyz1111111111', - '123456789ABCDEFGHJKLMNPQRSTUVWXYZ1111111111'] + valid = ["123456789abcdefghijkmnopqrstuvwxyz1111111111", "123456789ABCDEFGHJKLMNPQRSTUVWXYZ1111111111"] for h in valid: assert valid_ed25519(h) == h with pytest.raises(ValueError): - valid_ed25519('1234556789abcdefghijkmnopqrstuvwxyz1111111') + valid_ed25519("1234556789abcdefghijkmnopqrstuvwxyz1111111") with pytest.raises(ValueError): - valid_ed25519('1234556789abcdefghijkmnopqrstuvwxyz1111111111') + valid_ed25519("1234556789abcdefghijkmnopqrstuvwxyz1111111111") with pytest.raises(ValueError): - valid_ed25519('123456789abcdefghijkmnopqrstuvwxyz111111111l') + valid_ed25519("123456789abcdefghijkmnopqrstuvwxyz111111111l") with pytest.raises(ValueError): - valid_ed25519('123456789abcdefghijkmnopqrstuvwxyz111111111I') + valid_ed25519("123456789abcdefghijkmnopqrstuvwxyz111111111I") with pytest.raises(ValueError): - valid_ed25519('1234556789abcdefghijkmnopqrstuvwxyz11111111O') + valid_ed25519("1234556789abcdefghijkmnopqrstuvwxyz11111111O") with pytest.raises(ValueError): - valid_ed25519('1234556789abcdefghijkmnopqrstuvwxyz111111110') + valid_ed25519("1234556789abcdefghijkmnopqrstuvwxyz111111110") def test_valid_operation(): from planetmint.web.views.parameters import valid_operation - assert valid_operation('create') == 'CREATE' - assert valid_operation('transfer') == 'TRANSFER' - assert valid_operation('CREATe') == 'CREATE' - assert valid_operation('TRANSFEr') == 'TRANSFER' + assert valid_operation("create") == "CREATE" + assert valid_operation("transfer") == "TRANSFER" + assert valid_operation("CREATe") == "CREATE" + assert valid_operation("TRANSFEr") == "TRANSFER" with pytest.raises(ValueError): - valid_operation('GENESIS') + valid_operation("GENESIS") with pytest.raises(ValueError): - valid_operation('blah') + valid_operation("blah") with pytest.raises(ValueError): - valid_operation('') + valid_operation("") diff --git a/tests/web/test_server.py b/tests/web/test_server.py index d7e7608..1ce6dd6 100644 --- a/tests/web/test_server.py +++ b/tests/web/test_server.py @@ -8,8 +8,8 @@ def test_settings(): from planetmint.config import Config from planetmint.web import server - s = server.create_server(Config().get()['server']) + s = server.create_server(Config().get()["server"]) # for whatever reason the value is wrapped in a list # needs further investigation - assert s.cfg.bind[0] == Config().get()['server']['bind'] + assert s.cfg.bind[0] == Config().get()["server"]["bind"] diff --git a/tests/web/test_transactions.py b/tests/web/test_transactions.py index dc479a2..b23a5c5 100644 --- a/tests/web/test_transactions.py +++ b/tests/web/test_transactions.py @@ -97,9 +97,7 @@ def test_post_create_transaction_endpoint(b, client): ], ) @pytest.mark.language -def test_post_create_transaction_with_language( - b, client, nested, language, expected_status_code -): +def test_post_create_transaction_with_language(b, client, nested, language, expected_status_code): from planetmint.backend.localmongodb.connection import LocalMongoDBConnection if isinstance(b.connection, LocalMongoDBConnection): @@ -138,9 +136,7 @@ def test_post_create_transaction_with_language( ({"good_key": "v"}, "good_key", 202), ], ) -def test_post_create_transaction_with_invalid_key( - b, client, field, value, err_key, expected_status_code -): +def test_post_create_transaction_with_invalid_key(b, client, field, value, err_key, expected_status_code): from planetmint.backend.localmongodb.connection import LocalMongoDBConnection user_priv, user_pub = crypto.generate_key_pair() @@ -184,10 +180,7 @@ def test_post_create_transaction_with_invalid_id(mock_logger, b, client): assert res.status_code == expected_status_code assert res.json["message"] == expected_error_message assert mock_logger.error.called - assert ( - "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" - in mock_logger.error.call_args[0] - ) + assert "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" in mock_logger.error.call_args[0] assert { "message": expected_error_message, "status": expected_status_code, @@ -219,16 +212,13 @@ def test_post_create_transaction_with_invalid_signature(mock_logger, b, client): res = client.post(TX_ENDPOINT, data=json.dumps(tx)) expected_status_code = 400 - expected_error_message = ( - "Invalid transaction ({}): Fulfillment URI " "couldn't been parsed" - ).format(InvalidSignature.__name__) + expected_error_message = ("Invalid transaction ({}): Fulfillment URI " "couldn't been parsed").format( + InvalidSignature.__name__ + ) assert res.status_code == expected_status_code assert res.json["message"] == expected_error_message assert mock_logger.error.called - assert ( - "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" - in mock_logger.error.call_args[0] - ) + assert "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" in mock_logger.error.call_args[0] assert { "message": expected_error_message, "status": expected_status_code, @@ -278,10 +268,7 @@ def test_post_create_transaction_with_invalid_schema(mock_logger, client): assert res.status_code == expected_status_code assert res.json["message"] == expected_error_message assert mock_logger.error.called - assert ( - "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" - in mock_logger.error.call_args[0] - ) + assert "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" in mock_logger.error.call_args[0] assert { "message": expected_error_message, "status": expected_status_code, @@ -324,19 +311,14 @@ def test_post_invalid_transaction( TransactionMock = Mock(validate=mock_validation) - monkeypatch.setattr( - "planetmint.models.Transaction.from_dict", lambda tx: TransactionMock - ) + monkeypatch.setattr("planetmint.models.Transaction.from_dict", lambda tx: TransactionMock) res = client.post(TX_ENDPOINT, data=json.dumps({})) expected_status_code = 400 expected_error_message = "Invalid transaction ({}): {}".format(exc, msg) assert res.status_code == expected_status_code assert res.json["message"] == "Invalid transaction ({}): {}".format(exc, msg) assert mock_logger.error.called - assert ( - "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" - in mock_logger.error.call_args[0] - ) + assert "HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s" in mock_logger.error.call_args[0] assert { "message": expected_error_message, "status": expected_status_code, @@ -351,9 +333,7 @@ def test_post_invalid_transaction( @pytest.mark.abci def test_post_transfer_transaction_endpoint(client, user_pk, user_sk, posted_create_tx): - transfer_tx = Transfer.generate( - posted_create_tx.to_inputs(), [([user_pk], 1)], asset_id=posted_create_tx.id - ) + transfer_tx = Transfer.generate(posted_create_tx.to_inputs(), [([user_pk], 1)], asset_id=posted_create_tx.id) transfer_tx = transfer_tx.sign([user_sk]) res = client.post(TX_ENDPOINT, data=json.dumps(transfer_tx.to_dict())) @@ -365,14 +345,10 @@ def test_post_transfer_transaction_endpoint(client, user_pk, user_sk, posted_cre @pytest.mark.abci -def test_post_invalid_transfer_transaction_returns_400( - client, user_pk, posted_create_tx -): +def test_post_invalid_transfer_transaction_returns_400(client, user_pk, posted_create_tx): from planetmint.transactions.common.exceptions import InvalidSignature - transfer_tx = Transfer.generate( - posted_create_tx.to_inputs(), [([user_pk], 1)], asset_id=posted_create_tx.id - ) + transfer_tx = Transfer.generate(posted_create_tx.to_inputs(), [([user_pk], 1)], asset_id=posted_create_tx.id) transfer_tx._hash() res = client.post(TX_ENDPOINT, data=json.dumps(transfer_tx.to_dict())) @@ -390,20 +366,14 @@ def test_post_wrong_asset_division_transfer_returns_400(b, client, user_pk): priv_key, pub_key = crypto.generate_key_pair() - create_tx = Create.generate( - [pub_key], [([pub_key], 10)], asset={"test": "asset"} - ).sign([priv_key]) - res = client.post( - TX_ENDPOINT + "?mode=commit", data=json.dumps(create_tx.to_dict()) - ) + create_tx = Create.generate([pub_key], [([pub_key], 10)], asset={"test": "asset"}).sign([priv_key]) + res = client.post(TX_ENDPOINT + "?mode=commit", data=json.dumps(create_tx.to_dict())) assert res.status_code == 202 - transfer_tx = Transfer.generate( - create_tx.to_inputs(), [([pub_key], 20)], asset_id=create_tx.id # 20 > 10 - ).sign([priv_key]) - res = client.post( - TX_ENDPOINT + "?mode=commit", data=json.dumps(transfer_tx.to_dict()) + transfer_tx = Transfer.generate(create_tx.to_inputs(), [([pub_key], 20)], asset_id=create_tx.id).sign( # 20 > 10 + [priv_key] ) + res = client.post(TX_ENDPOINT + "?mode=commit", data=json.dumps(transfer_tx.to_dict())) expected_error_message = ( f"Invalid transaction ({AmountError.__name__}): " + "The amount used in the inputs `10` needs to be same as the amount used in the outputs `20`" @@ -421,10 +391,7 @@ def test_transactions_get_list_good(client): of transactions it returns an array of shims with a to_dict() method that reports one of the arguments passed to `get_transactions_filtered`. """ - return [ - type("", (), {"to_dict": partial(lambda a: a, arg)}) - for arg in sorted(args.items()) - ] + return [type("", (), {"to_dict": partial(lambda a: a, arg)}) for arg in sorted(args.items())] asset_id = "1" * 64 @@ -487,9 +454,7 @@ def test_post_transaction_valid_modes(mock_post, client, mode): mock_post.side_effect = _mock_post alice = generate_key_pair() - tx = Create.generate( - [alice.public_key], [([alice.public_key], 1)], asset=None - ).sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]) mode_endpoint = TX_ENDPOINT + mode[0] client.post(mode_endpoint, data=json.dumps(tx.to_dict())) args, kwargs = mock_post.call_args @@ -501,13 +466,8 @@ def test_post_transaction_invalid_mode(client): from planetmint.transactions.common.crypto import generate_key_pair alice = generate_key_pair() - tx = Create.generate( - [alice.public_key], [([alice.public_key], 1)], asset=None - ).sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)], asset=None).sign([alice.private_key]) mode_endpoint = TX_ENDPOINT + "?mode=nope" response = client.post(mode_endpoint, data=json.dumps(tx.to_dict())) assert "400 BAD REQUEST" in response.status - assert ( - 'Mode must be "async", "sync" or "commit"' - == json.loads(response.data.decode("utf8"))["message"]["mode"] - ) + assert 'Mode must be "async", "sync" or "commit"' == json.loads(response.data.decode("utf8"))["message"]["mode"] diff --git a/tests/web/test_validators.py b/tests/web/test_validators.py index 304273d..c8b8034 100644 --- a/tests/web/test_validators.py +++ b/tests/web/test_validators.py @@ -3,14 +3,17 @@ # SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) # Code is Apache-2.0 and docs are CC-BY-4.0 -VALIDATORS_ENDPOINT = '/api/v1/validators/' +VALIDATORS_ENDPOINT = "/api/v1/validators/" def test_get_validators_endpoint(b, client): - validator_set = [{'address': 'F5426F0980E36E03044F74DD414248D29ABCBDB2', - 'pub_key': {'data': '4E2685D9016126864733225BE00F005515200727FBAB1312FC78C8B76831255A', - 'type': 'ed25519'}, - 'voting_power': 10}] + validator_set = [ + { + "address": "F5426F0980E36E03044F74DD414248D29ABCBDB2", + "pub_key": {"data": "4E2685D9016126864733225BE00F005515200727FBAB1312FC78C8B76831255A", "type": "ed25519"}, + "voting_power": 10, + } + ] b.store_validator_set(23, validator_set) res = client.get(VALIDATORS_ENDPOINT) @@ -20,4 +23,4 @@ def test_get_validators_endpoint(b, client): # Helper def is_validator(v): - return ('pub_key' in v) and ('voting_power' in v) + return ("pub_key" in v) and ("voting_power" in v) diff --git a/tests/web/test_websocket_server.py b/tests/web/test_websocket_server.py index e5f7b78..7b7f7ec 100644 --- a/tests/web/test_websocket_server.py +++ b/tests/web/test_websocket_server.py @@ -7,12 +7,14 @@ import asyncio import json import queue import threading + # from unittest.mock import patch from planetmint.transactions.types.assets.create import Create from planetmint.transactions.types.assets.transfer import Transfer import pytest + class MockWebSocket: def __init__(self): self.received = [] @@ -27,55 +29,49 @@ def test_eventify_block_works_with_any_transaction(): alice = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)])\ - .sign([alice.private_key]) - tx_transfer = Transfer.generate(tx.to_inputs(), - [([alice.public_key], 1)], - asset_id=tx.id)\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)]).sign([alice.private_key]) + tx_transfer = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], asset_id=tx.id).sign( + [alice.private_key] + ) - block = {'height': 1, - 'transactions': [tx, tx_transfer]} + block = {"height": 1, "transactions": [tx, tx_transfer]} expected_events = [ - { - 'height': 1, - 'asset_id': tx.id, - 'transaction_id': tx.id - }, - { - 'height': 1, - 'asset_id': tx_transfer.asset['id'], - 'transaction_id': tx_transfer.id - }] + {"height": 1, "asset_id": tx.id, "transaction_id": tx.id}, + {"height": 1, "asset_id": tx_transfer.asset["id"], "transaction_id": tx_transfer.id}, + ] for event, expected in zip(Dispatcher.eventify_block(block), expected_events): assert event == expected + def test_simplified_block_works(): from planetmint.web.websocket_dispatcher import Dispatcher from planetmint.transactions.common.crypto import generate_key_pair alice = generate_key_pair() - tx = Create.generate([alice.public_key], - [([alice.public_key], 1)])\ - .sign([alice.private_key]) - tx_transfer = Transfer.generate(tx.to_inputs(), - [([alice.public_key], 1)], - asset_id=tx.id)\ - .sign([alice.private_key]) + tx = Create.generate([alice.public_key], [([alice.public_key], 1)]).sign([alice.private_key]) + tx_transfer = Transfer.generate(tx.to_inputs(), [([alice.public_key], 1)], asset_id=tx.id).sign( + [alice.private_key] + ) - block = {'height': 1, 'hash': '27E2D48AFA5E4B7FF26AA9C84B5CFCA2A670DBD297740053C0D177EB18962B09', - 'transactions': [tx, tx_transfer]} + block = { + "height": 1, + "hash": "27E2D48AFA5E4B7FF26AA9C84B5CFCA2A670DBD297740053C0D177EB18962B09", + "transactions": [tx, tx_transfer], + } - expected_event = {'height': 1, 'hash': '27E2D48AFA5E4B7FF26AA9C84B5CFCA2A670DBD297740053C0D177EB18962B09', - 'transaction_ids': [tx.id, tx_transfer.id]} + expected_event = { + "height": 1, + "hash": "27E2D48AFA5E4B7FF26AA9C84B5CFCA2A670DBD297740053C0D177EB18962B09", + "transaction_ids": [tx.id, tx_transfer.id], + } blk_event = Dispatcher.simplified_block(block) assert blk_event == expected_event + @pytest.mark.asyncio async def test_bridge_sync_async_queue(event_loop): from planetmint.web.websocket_server import _multiprocessing_to_asyncio @@ -84,31 +80,32 @@ async def test_bridge_sync_async_queue(event_loop): async_queue = asyncio.Queue(loop=event_loop) async_queue2 = asyncio.Queue(loop=event_loop) - bridge = threading.Thread(target=_multiprocessing_to_asyncio, - args=(sync_queue, async_queue, async_queue2, event_loop), - daemon=True) + bridge = threading.Thread( + target=_multiprocessing_to_asyncio, args=(sync_queue, async_queue, async_queue2, event_loop), daemon=True + ) bridge.start() - sync_queue.put('fahren') - sync_queue.put('auf') - sync_queue.put('der') - sync_queue.put('Autobahn') + sync_queue.put("fahren") + sync_queue.put("auf") + sync_queue.put("der") + sync_queue.put("Autobahn") result = await async_queue.get() - assert result == 'fahren' + assert result == "fahren" result = await async_queue.get() - assert result == 'auf' + assert result == "auf" result = await async_queue.get() - assert result == 'der' + assert result == "der" result = await async_queue.get() - assert result == 'Autobahn' + assert result == "Autobahn" print(f" queue ({async_queue.qsize()}): {async_queue} ") assert async_queue.qsize() == 0 + # TODO: fix the test and uncomment it # @patch('threading.Thread') # @patch('aiohttp.web.run_app') @@ -135,6 +132,7 @@ async def test_bridge_sync_async_queue(event_loop): # port=config['wsserver']['port'], # ) + @pytest.mark.asyncio async def test_websocket_block_event(aiohttp_client, event_loop): from planetmint import events @@ -150,18 +148,21 @@ async def test_websocket_block_event(aiohttp_client, event_loop): app = init_app(tx_source, blk_source, loop=event_loop) client = await aiohttp_client(app) ws = await client.ws_connect(EVENTS_ENDPOINT_BLOCKS) - block = {'height': 1, 'hash': '27E2D48AFA5E4B7FF26AA9C84B5CFCA2A670DBD297740053C0D177EB18962B09', - 'transactions': [tx]} + block = { + "height": 1, + "hash": "27E2D48AFA5E4B7FF26AA9C84B5CFCA2A670DBD297740053C0D177EB18962B09", + "transactions": [tx], + } block_event = events.Event(events.EventTypes.BLOCK_VALID, block) await blk_source.put(block_event) result = await ws.receive() json_result = json.loads(result.data) - assert json_result['height'] == block['height'] - assert json_result['hash'] == block['hash'] - assert len(json_result['transaction_ids']) == 1 - assert json_result['transaction_ids'][0] == tx.id + assert json_result["height"] == block["height"] + assert json_result["hash"] == block["hash"] + assert len(json_result["transaction_ids"]) == 1 + assert json_result["transaction_ids"][0] == tx.id await blk_source.put(events.POISON_PILL) @@ -181,21 +182,22 @@ async def test_websocket_transaction_event(aiohttp_client, event_loop): app = init_app(tx_source, blk_source, loop=event_loop) client = await aiohttp_client(app) ws = await client.ws_connect(EVENTS_ENDPOINT) - block = {'height': 1, 'transactions': [tx]} + block = {"height": 1, "transactions": [tx]} block_event = events.Event(events.EventTypes.BLOCK_VALID, block) await tx_source.put(block_event) - for tx in block['transactions']: + for tx in block["transactions"]: result = await ws.receive() json_result = json.loads(result.data) - assert json_result['transaction_id'] == tx.id + assert json_result["transaction_id"] == tx.id # Since the transactions are all CREATEs, asset id == transaction id - assert json_result['asset_id'] == tx.id - assert json_result['height'] == block['height'] + assert json_result["asset_id"] == tx.id + assert json_result["height"] == block["height"] await tx_source.put(events.POISON_PILL) + @pytest.mark.asyncio async def test_websocket_string_event(aiohttp_client, event_loop): from planetmint.events import POISON_PILL @@ -207,23 +209,23 @@ async def test_websocket_string_event(aiohttp_client, event_loop): client = await aiohttp_client(app) ws = await client.ws_connect(EVENTS_ENDPOINT) - await tx_source.put('hack') - await tx_source.put('the') - await tx_source.put('planet!') + await tx_source.put("hack") + await tx_source.put("the") + await tx_source.put("planet!") result = await ws.receive() - assert result.data == 'hack' + assert result.data == "hack" result = await ws.receive() - assert result.data == 'the' + assert result.data == "the" result = await ws.receive() - assert result.data == 'planet!' + assert result.data == "planet!" await tx_source.put(POISON_PILL) -@pytest.mark.skip('Processes are not stopping properly, and the whole test suite would hang') +@pytest.mark.skip("Processes are not stopping properly, and the whole test suite would hang") def test_integration_from_webapi_to_websocket(monkeypatch, client, loop): # XXX: I think that the `pytest-aiohttp` plugin is sparkling too much # magic in the `asyncio` module: running this test without monkey-patching @@ -232,13 +234,14 @@ def test_integration_from_webapi_to_websocket(monkeypatch, client, loop): # # That's pretty weird because this test doesn't use the pytest-aiohttp # plugin explicitely. - monkeypatch.setattr('asyncio.get_event_loop', lambda: loop) + monkeypatch.setattr("asyncio.get_event_loop", lambda: loop) import json import random import aiohttp from planetmint.transactions.common import crypto + # TODO processes does not exist anymore, when reactivating this test it # will fail because of this from planetmint import processes @@ -249,9 +252,10 @@ def test_integration_from_webapi_to_websocket(monkeypatch, client, loop): loop = asyncio.get_event_loop() import time + time.sleep(1) - ws_url = client.get('http://localhost:9984/api/v1/').json['_links']['streams_v1'] + ws_url = client.get("http://localhost:9984/api/v1/").json["_links"]["streams_v1"] # Connect to the WebSocket endpoint session = aiohttp.ClientSession() @@ -259,12 +263,12 @@ def test_integration_from_webapi_to_websocket(monkeypatch, client, loop): # Create a keypair and generate a new asset user_priv, user_pub = crypto.generate_key_pair() - asset = {'random': random.random()} + asset = {"random": random.random()} tx = Create.generate([user_pub], [([user_pub], 1)], asset=asset) tx = tx.sign([user_priv]) # Post the transaction to the Planetmint Web API - client.post('/api/v1/transactions/', data=json.dumps(tx.to_dict())) + client.post("/api/v1/transactions/", data=json.dumps(tx.to_dict())) result = loop.run_until_complete(ws.receive()) json_result = json.loads(result.data) - assert json_result['transaction_id'] == tx.id + assert json_result["transaction_id"] == tx.id diff --git a/tox.ini b/tox.ini index 86badc5..853028d 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,10 @@ [tox] skipsdist = true -envlist = py{39}, flake8, docsroot +envlist = py{39}, docsroot + +[gh-actions] +python = + 3.9 = docsroot [base] basepython = python3.9 @@ -15,18 +19,6 @@ install_command = pip install {opts} {packages} extras = test commands = pytest -v -n auto --cov=planetmint --basetemp={envtmpdir} -[testenv:flake8] -basepython = {[base]basepython} -deps = - {[base]deps} - flake8 -skip_install = True -extras = None -commands = flake8 planetmint tests - -[flake8] -ignore = E126 E127 W504 E302 E126 E305 W503 E712 F401 - [testenv:docsroot] basepython = {[base]basepython} changedir = docs/root/source