catching Tarantool exceptions in case of concurrency (implicitly issu… (#312)

* catching Tarantool exceptions in case of concurrency (implicitly issued by the planetmint-diver-ts tests)
* fixed black version
* blackified (new version)

---------

Signed-off-by: Jürgen Eckel <juergen@riddleandcode.com>
This commit is contained in:
Jürgen Eckel 2023-02-01 13:43:39 +01:00 committed by GitHub
parent 87506ff4a1
commit 2bb0539b78
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 30 additions and 34 deletions

View File

@ -25,6 +25,9 @@ For reference, the possible headings are:
* **Known Issues** * **Known Issues**
* **Notes** * **Notes**
## [2.2.2] - 2023-31-01
* **Fixed** catching tarantool exceptions in case tarantool drivers throw execeptions due to concurrency issues. This issue got idenitfied during the testing of the planetmint-driver-ts.
## [2.2.0] - 2023-31-01 ## [2.2.0] - 2023-31-01
* **Changed** standardized blocks API * **Changed** standardized blocks API

View File

@ -115,7 +115,6 @@ def send_naughty_tx(assets, metadata):
@pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings) @pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings)
def test_naughty_keys(naughty_string): def test_naughty_keys(naughty_string):
assets = [{"data": {naughty_string: "nice_value"}}] assets = [{"data": {naughty_string: "nice_value"}}]
metadata = {naughty_string: "nice_value"} metadata = {naughty_string: "nice_value"}
@ -124,7 +123,6 @@ def test_naughty_keys(naughty_string):
@pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings) @pytest.mark.parametrize("naughty_string", naughty_strings, ids=naughty_strings)
def test_naughty_values(naughty_string): def test_naughty_values(naughty_string):
assets = [{"data": {"nice_key": naughty_string}}] assets = [{"data": {"nice_key": naughty_string}}]
metadata = {"nice_key": naughty_string} metadata = {"nice_key": naughty_string}

View File

@ -17,7 +17,6 @@ def test_zenroom_signing(
zenroom_script_input, zenroom_script_input,
condition_script_zencode, condition_script_zencode,
): ):
biolabs = generate_keypair() biolabs = generate_keypair()
version = "2.0" version = "2.0"

View File

@ -102,7 +102,6 @@ def store_block(conn, block):
@register_query(LocalMongoDBConnection) @register_query(LocalMongoDBConnection)
def get_txids_filtered(conn, asset_ids, operation=None, last_tx=None): def get_txids_filtered(conn, asset_ids, operation=None, last_tx=None):
match = { match = {
Transaction.CREATE: {"operation": "CREATE", "id": {"$in": asset_ids}}, Transaction.CREATE: {"operation": "CREATE", "id": {"$in": asset_ids}},
Transaction.TRANSFER: {"operation": "TRANSFER", "asset.id": {"$in": asset_ids}}, Transaction.TRANSFER: {"operation": "TRANSFER", "asset.id": {"$in": asset_ids}},

View File

@ -3,7 +3,7 @@
# SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0) # SPDX-License-Identifier: (Apache-2.0 AND CC-BY-4.0)
# Code is Apache-2.0 and docs are CC-BY-4.0 # Code is Apache-2.0 and docs are CC-BY-4.0
__version__ = "2.2.1" __version__ = "2.2.2"
__short_version__ = "2.2" __short_version__ = "2.2"
# Supported Tendermint versions # Supported Tendermint versions

View File

@ -18,9 +18,9 @@ from planetmint.web.views import (
def add_routes(app): def add_routes(app):
"""Add the routes to an app""" """Add the routes to an app"""
for (prefix, routes) in API_SECTIONS: for prefix, routes in API_SECTIONS:
api = Api(app, prefix=prefix) api = Api(app, prefix=prefix)
for ((pattern, resource, *args), kwargs) in routes: for (pattern, resource, *args), kwargs in routes:
kwargs.setdefault("strict_slashes", False) kwargs.setdefault("strict_slashes", False)
api.add_resource(resource, pattern, *args, **kwargs) api.add_resource(resource, pattern, *args, **kwargs)

View File

@ -14,7 +14,7 @@ from planetmint.config import Config
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def make_error(status_code, message=None): def make_error(status_code, message=None, level: str = "debug"):
if status_code == 404 and message is None: if status_code == 404 and message is None:
message = "Not found" message = "Not found"
@ -22,6 +22,9 @@ def make_error(status_code, message=None):
request_info = {"method": request.method, "path": request.path} request_info = {"method": request.method, "path": request.path}
request_info.update(response_content) request_info.update(response_content)
if level == "error":
logger.error("HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s", request_info)
else:
logger.debug("HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s", request_info) logger.debug("HTTP API error: %(status)s - %(method)s:%(path)s - %(message)s", request_info)
response = jsonify(response_content) response = jsonify(response_content)

View File

@ -6,6 +6,7 @@
from flask import current_app from flask import current_app
from flask_restful import reqparse, Resource from flask_restful import reqparse, Resource
from planetmint.web.views import parameters from planetmint.web.views import parameters
from planetmint.web.views.base import make_error
class OutputListApi(Resource): class OutputListApi(Resource):
@ -23,5 +24,12 @@ class OutputListApi(Resource):
pool = current_app.config["bigchain_pool"] pool = current_app.config["bigchain_pool"]
with pool() as planet: with pool() as planet:
try:
outputs = planet.get_outputs_filtered(args["public_key"], args["spent"]) outputs = planet.get_outputs_filtered(args["public_key"], args["spent"])
except Exception as e:
return make_error(
500,
"Invalid output ({}): {} : {} - {}".format(type(e).__name__, e, args["public_key"], args["spent"]),
level="error",
)
return [{"transaction_id": output.txid, "output_index": output.output} for output in outputs] return [{"transaction_id": output.txid, "output_index": output.output} for output in outputs]

View File

@ -73,7 +73,6 @@ class TransactionListApi(Resource):
# `force` will try to format the body of the POST request even if the # `force` will try to format the body of the POST request even if the
# `content-type` header is not set to `application/json` # `content-type` header is not set to `application/json`
tx = request.get_json(force=True) tx = request.get_json(force=True)
try: try:
tx_obj = Transaction.from_dict(tx, False) tx_obj = Transaction.from_dict(tx, False)
except SchemaValidationError as e: except SchemaValidationError as e:
@ -85,12 +84,18 @@ class TransactionListApi(Resource):
return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e)) return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e))
except ValidationError as e: except ValidationError as e:
return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e)) return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e))
except Exception as e:
return make_error(500, "Invalid transaction ({}): {} - {}".format(type(e).__name__, e, tx), level="error")
with pool() as planet: with pool() as planet:
try: try:
planet.validate_transaction(tx_obj) planet.validate_transaction(tx_obj)
except ValidationError as e: except ValidationError as e:
return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e)) return make_error(400, "Invalid transaction ({}): {}".format(type(e).__name__, e))
except Exception as e:
return make_error(
500, "Invalid transaction ({}): {} : {}".format(type(e).__name__, e, tx), level="error"
)
else: else:
if tx_obj.version != Transaction.VERSION: if tx_obj.version != Transaction.VERSION:
return make_error( return make_error(

View File

@ -60,7 +60,7 @@ docs_require = [
"mdit-py-plugins==0.3.0", "mdit-py-plugins==0.3.0",
"mdurl==0.1.1", "mdurl==0.1.1",
"myst-parser==0.17.2", "myst-parser==0.17.2",
"packaging==21.3", "packaging>=22.0",
"pockets==0.9.1", "pockets==0.9.1",
"Pygments==2.12.0", "Pygments==2.12.0",
"pyparsing==3.0.8", "pyparsing==3.0.8",
@ -94,7 +94,7 @@ dev_require = ["ipdb", "ipython", "watchdog", "logging_tree", "pre-commit", "twi
tests_require = [ tests_require = [
"coverage", "coverage",
"pep8", "pep8",
"black", "black>=23.1.0",
"hypothesis>=5.3.0", "hypothesis>=5.3.0",
"pytest>=3.0.0", "pytest>=3.0.0",
"pytest-cov==2.8.1", "pytest-cov==2.8.1",
@ -116,7 +116,7 @@ install_requires = [
"gunicorn==20.1.0", "gunicorn==20.1.0",
"jsonschema==4.16.0", "jsonschema==4.16.0",
"logstats==0.3.0", "logstats==0.3.0",
"packaging>=20.9", "packaging>=22.0",
"pymongo==3.11.4", "pymongo==3.11.4",
"tarantool==0.7.1", "tarantool==0.7.1",
"python-rapidjson>=1.0", "python-rapidjson>=1.0",

View File

@ -34,7 +34,6 @@ def test_single_in_single_own_single_out_single_own_create(alice, user_pk, b):
# Multiple outputs # Multiple outputs
# Single owners_after per output # Single owners_after per output
def test_single_in_single_own_multiple_out_single_own_create(alice, user_pk, b): def test_single_in_single_own_multiple_out_single_own_create(alice, user_pk, b):
tx = Create.generate( tx = Create.generate(
[alice.public_key], [alice.public_key],
[([user_pk], 50), ([user_pk], 50)], [([user_pk], 50), ([user_pk], 50)],
@ -55,7 +54,6 @@ def test_single_in_single_own_multiple_out_single_own_create(alice, user_pk, b):
# Single output # Single output
# Multiple owners_after # Multiple owners_after
def test_single_in_single_own_single_out_multiple_own_create(alice, user_pk, b): def test_single_in_single_own_single_out_multiple_own_create(alice, user_pk, b):
tx = Create.generate( tx = Create.generate(
[alice.public_key], [alice.public_key],
[([user_pk, user_pk], 100)], [([user_pk, user_pk], 100)],
@ -81,7 +79,6 @@ def test_single_in_single_own_single_out_multiple_own_create(alice, user_pk, b):
# Mix: one output with a single owners_after, one output with multiple # Mix: one output with a single owners_after, one output with multiple
# owners_after # owners_after
def test_single_in_single_own_multiple_out_mix_own_create(alice, user_pk, b): def test_single_in_single_own_multiple_out_mix_own_create(alice, user_pk, b):
tx = Create.generate( tx = Create.generate(
[alice.public_key], [alice.public_key],
[([user_pk], 50), ([user_pk, user_pk], 50)], [([user_pk], 50), ([user_pk, user_pk], 50)],
@ -130,7 +127,6 @@ def test_single_in_multiple_own_single_out_single_own_create(alice, b, user_pk,
# Single output # Single output
# Single owners_after # Single owners_after
def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk, user_sk): def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk, user_sk):
# CREATE divisible asset # CREATE divisible asset
tx_create = Create.generate( tx_create = Create.generate(
[alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}] [alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}]
@ -155,7 +151,6 @@ def test_single_in_single_own_single_out_single_own_transfer(alice, b, user_pk,
# Multiple output # Multiple output
# Single owners_after # Single owners_after
def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk, user_sk): def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk, user_sk):
# CREATE divisible asset # CREATE divisible asset
tx_create = Create.generate( tx_create = Create.generate(
[alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}] [alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}]
@ -183,7 +178,6 @@ def test_single_in_single_own_multiple_out_single_own_transfer(alice, b, user_pk
# Single output # Single output
# Multiple owners_after # Multiple owners_after
def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk, user_sk): def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk, user_sk):
# CREATE divisible asset # CREATE divisible asset
tx_create = Create.generate( tx_create = Create.generate(
[alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}] [alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}]
@ -219,7 +213,6 @@ def test_single_in_single_own_single_out_multiple_own_transfer(alice, b, user_pk
# Mix: one output with a single owners_after, one output with multiple # Mix: one output with a single owners_after, one output with multiple
# owners_after # owners_after
def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, user_sk): def test_single_in_single_own_multiple_out_mix_own_transfer(alice, b, user_pk, user_sk):
# CREATE divisible asset # CREATE divisible asset
tx_create = Create.generate( tx_create = Create.generate(
[alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}] [alice.public_key], [([user_pk], 100)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}]
@ -448,7 +441,6 @@ def test_muiltiple_in_mix_own_multiple_out_mix_own_transfer(alice, b, user_pk, u
# Single output # Single output
# Single owners_after # Single owners_after
def test_multiple_in_different_transactions(alice, b, user_pk, user_sk): def test_multiple_in_different_transactions(alice, b, user_pk, user_sk):
# CREATE divisible asset # CREATE divisible asset
# `b` creates a divisible asset and assigns 50 shares to `b` and # `b` creates a divisible asset and assigns 50 shares to `b` and
# 50 shares to `user_pk` # 50 shares to `user_pk`
@ -549,7 +541,6 @@ def test_threshold_same_public_key(alice, b, user_pk, user_sk):
def test_sum_amount(alice, b, user_pk, user_sk): def test_sum_amount(alice, b, user_pk, user_sk):
# CREATE divisible asset with 3 outputs with amount 1 # CREATE divisible asset with 3 outputs with amount 1
tx_create = Create.generate( tx_create = Create.generate(
[alice.public_key], [alice.public_key],
@ -575,7 +566,6 @@ def test_sum_amount(alice, b, user_pk, user_sk):
def test_divide(alice, b, user_pk, user_sk): def test_divide(alice, b, user_pk, user_sk):
# CREATE divisible asset with 1 output with amount 3 # CREATE divisible asset with 1 output with amount 3
tx_create = Create.generate( tx_create = Create.generate(
[alice.public_key], [([user_pk], 3)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}] [alice.public_key], [([user_pk], 3)], assets=[{"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}]

View File

@ -52,7 +52,6 @@ metadata = {"units": 300, "type": "KG"}
def test_zenroom_signing(): def test_zenroom_signing():
biolabs = generate_key_pair() biolabs = generate_key_pair()
version = "3.0" version = "3.0"

View File

@ -106,7 +106,6 @@ class TestBigchainApi(object):
b.validate_transaction(tx) b.validate_transaction(tx)
def test_write_transaction(self, b, user_sk, user_pk, alice, create_tx): def test_write_transaction(self, b, user_sk, user_pk, alice, create_tx):
asset1 = {"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"} asset1 = {"data": "QmaozNR7DZHQK1ZcU9p7QdrshMvXqWK6gpu5rmrkPdT3L4"}
tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=[asset1]).sign([alice.private_key]) tx = Create.generate([alice.public_key], [([alice.public_key], 1)], assets=[asset1]).sign([alice.private_key])

View File

@ -315,7 +315,6 @@ def test_write_config():
), ),
) )
def test_database_envs(env_name, env_value, config_key, monkeypatch): def test_database_envs(env_name, env_value, config_key, monkeypatch):
monkeypatch.setattr("os.environ", {env_name: env_value}) monkeypatch.setattr("os.environ", {env_name: env_value})
planetmint.config_utils.autoconfigure() planetmint.config_utils.autoconfigure()

View File

@ -156,7 +156,6 @@ def test_valid_election_votes_received(b_mock, valid_upsert_validator_election,
@pytest.mark.bdb @pytest.mark.bdb
def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed25519_node_keys): def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed25519_node_keys):
# Node 0: cast vote # Node 0: cast vote
tx_vote0 = gen_vote(valid_upsert_validator_election, 0, ed25519_node_keys) tx_vote0 = gen_vote(valid_upsert_validator_election, 0, ed25519_node_keys)
@ -215,7 +214,6 @@ def test_valid_election_conclude(b_mock, valid_upsert_validator_election, ed2551
@pytest.mark.abci @pytest.mark.abci
def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys): def test_upsert_validator(b, node_key, node_keys, ed25519_node_keys):
if b.get_latest_block()["height"] == 0: if b.get_latest_block()["height"] == 0:
generate_block(b) generate_block(b)
@ -344,6 +342,6 @@ def test_get_validator_update(b, node_keys, node_key, ed25519_node_keys):
def reset_validator_set(b, node_keys, height): def reset_validator_set(b, node_keys, height):
validators = [] validators = []
for (node_pub, _) in node_keys.items(): for node_pub, _ in node_keys.items():
validators.append({"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10}) validators.append({"public_key": {"type": "ed25519-base64", "value": node_pub}, "voting_power": 10})
b.store_validator_set(height, validators) b.store_validator_set(height, validators)

View File

@ -37,7 +37,6 @@ def test_get_metadata_tendermint(client, b, alice):
@pytest.mark.bdb @pytest.mark.bdb
def test_get_metadata_limit_tendermint(client, b, alice): def test_get_metadata_limit_tendermint(client, b, alice):
# create two assets # create two assets
assets1 = [{"data": multihash(marshal({"msg": "abc 1"}))}] assets1 = [{"data": multihash(marshal({"msg": "abc 1"}))}]
meta = multihash(marshal({"key": "meta 1"})) meta = multihash(marshal({"key": "meta 1"}))

View File

@ -340,7 +340,6 @@ def test_post_invalid_transaction(
@pytest.mark.abci @pytest.mark.abci
def test_post_transfer_transaction_endpoint(client, user_pk, user_sk, posted_create_tx): def test_post_transfer_transaction_endpoint(client, user_pk, user_sk, posted_create_tx):
transfer_tx = Transfer.generate(posted_create_tx.to_inputs(), [([user_pk], 1)], asset_ids=[posted_create_tx.id]) transfer_tx = Transfer.generate(posted_create_tx.to_inputs(), [([user_pk], 1)], asset_ids=[posted_create_tx.id])
transfer_tx = transfer_tx.sign([user_sk]) transfer_tx = transfer_tx.sign([user_sk])

View File

@ -138,7 +138,6 @@ async def test_bridge_sync_async_queue(event_loop):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_websocket_block_event(aiohttp_client, event_loop): async def test_websocket_block_event(aiohttp_client, event_loop):
user_priv, user_pub = crypto.generate_key_pair() user_priv, user_pub = crypto.generate_key_pair()
tx = Create.generate([user_pub], [([user_pub], 1)]) tx = Create.generate([user_pub], [([user_pub], 1)])
tx = tx.sign([user_priv]) tx = tx.sign([user_priv])
@ -169,7 +168,6 @@ async def test_websocket_block_event(aiohttp_client, event_loop):
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_websocket_transaction_event(aiohttp_client, event_loop): async def test_websocket_transaction_event(aiohttp_client, event_loop):
user_priv, user_pub = crypto.generate_key_pair() user_priv, user_pub = crypto.generate_key_pair()
tx = Create.generate([user_pub], [([user_pub], 1)]) tx = Create.generate([user_pub], [([user_pub], 1)])
tx = tx.sign([user_priv]) tx = tx.sign([user_priv])