mirror of
https://github.com/planetmint/planetmint.git
synced 2025-03-30 15:08:31 +00:00
updated to newest driver tarantool 0.12.1
Signed-off-by: Jürgen Eckel <juergen@riddleandcode.com>
This commit is contained in:
parent
3ffff234af
commit
2d83d449cf
@ -23,7 +23,7 @@ services:
|
||||
- "8081:8081"
|
||||
volumes:
|
||||
- ./planetmint/backend/tarantool/init.lua:/opt/tarantool/init.lua
|
||||
command: tarantool /opt/tarantool/init.lua
|
||||
entrypoint: tarantool /opt/tarantool/init.lua
|
||||
restart: always
|
||||
planetmint:
|
||||
depends_on:
|
||||
|
@ -49,7 +49,7 @@ class ApplicationLogic(BaseApplication):
|
||||
logger.debug("Checking values of types")
|
||||
logger.debug(dir(types_pb2))
|
||||
self.events_queue = events_queue
|
||||
self.validator = validator if validator else Validator()#(async_io=True)
|
||||
self.validator = validator if validator else Validator() # (async_io=True)
|
||||
self.models = models or Models()
|
||||
self.block_txn_ids = []
|
||||
self.block_txn_hash = ""
|
||||
@ -81,10 +81,7 @@ class ApplicationLogic(BaseApplication):
|
||||
chain_id = known_chain["chain_id"]
|
||||
|
||||
if known_chain["is_synced"]:
|
||||
msg = (
|
||||
f"Got invalid InitChain ABCI request ({genesis}) - "
|
||||
f"the chain {chain_id} is already synced."
|
||||
)
|
||||
msg = f"Got invalid InitChain ABCI request ({genesis}) - " f"the chain {chain_id} is already synced."
|
||||
logger.error(msg)
|
||||
sys.exit(1)
|
||||
if chain_id != genesis.chain_id:
|
||||
@ -165,9 +162,7 @@ class ApplicationLogic(BaseApplication):
|
||||
|
||||
chain_shift = 0 if self.chain is None else self.chain["height"]
|
||||
# req_begin_block.header.num_txs not found, so removing it.
|
||||
logger.debug(
|
||||
"BEGIN BLOCK, height:%s", req_begin_block.header.height + chain_shift
|
||||
)
|
||||
logger.debug("BEGIN BLOCK, height:%s", req_begin_block.header.height + chain_shift)
|
||||
|
||||
self.block_txn_ids = []
|
||||
self.block_transactions = []
|
||||
@ -183,9 +178,7 @@ class ApplicationLogic(BaseApplication):
|
||||
self.abort_if_abci_chain_is_not_synced()
|
||||
|
||||
logger.debug("deliver_tx: %s", raw_transaction)
|
||||
transaction = self.validator.is_valid_transaction(
|
||||
decode_transaction(raw_transaction), self.block_transactions
|
||||
)
|
||||
transaction = self.validator.is_valid_transaction(decode_transaction(raw_transaction), self.block_transactions)
|
||||
|
||||
if not transaction:
|
||||
logger.debug("deliver_tx: INVALID")
|
||||
@ -226,9 +219,7 @@ class ApplicationLogic(BaseApplication):
|
||||
else:
|
||||
self.block_txn_hash = block["app_hash"]
|
||||
|
||||
validator_update = self.validator.process_block(
|
||||
self.new_height, self.block_transactions
|
||||
)
|
||||
validator_update = self.validator.process_block(self.new_height, self.block_transactions)
|
||||
|
||||
return ResponseEndBlock(validator_updates=validator_update)
|
||||
|
||||
|
@ -40,7 +40,7 @@ class DBSingleton(type):
|
||||
raise ConfigurationError
|
||||
modulepath, _, class_name = BACKENDS[backend].rpartition(".")
|
||||
if "async_io" in kwargs and kwargs["async_io"] == True:
|
||||
class_name = class_name+"AsyncIO"
|
||||
class_name = class_name + "AsyncIO"
|
||||
Class = getattr(import_module(modulepath), class_name)
|
||||
cls._instances[cls] = super(DBSingleton, Class).__call__(*args, **kwargs)
|
||||
return cls._instances[cls]
|
||||
|
@ -59,7 +59,7 @@ class TarantoolDBConnection(DBConnection):
|
||||
|
||||
async def connect(self):
|
||||
if not self.__conn:
|
||||
self.__conn = asynctnt.Connection(host='127.0.0.1', port=3301)
|
||||
self.__conn = asynctnt.Connection(host="127.0.0.1", port=3301)
|
||||
await self.__conn.connect()
|
||||
return self.__conn
|
||||
|
||||
|
@ -363,7 +363,6 @@ def store_pre_commit_state(connection, state: dict):
|
||||
connection.space(TARANT_TABLE_PRE_COMMITS).upsert(
|
||||
_precommitTuple,
|
||||
op_list=[("=", 1, state["height"]), ("=", 2, state[TARANT_TABLE_TRANSACTION])],
|
||||
limit=1,
|
||||
),
|
||||
only_data=False,
|
||||
)
|
||||
@ -392,7 +391,6 @@ def store_validator_set(conn, validators_update: dict):
|
||||
conn.space(TARANT_TABLE_VALIDATOR_SETS).upsert(
|
||||
(unique_id, validators_update["height"], validators_update["validators"]),
|
||||
op_list=[("=", 1, validators_update["height"]), ("=", 2, validators_update["validators"])],
|
||||
limit=1,
|
||||
),
|
||||
only_data=False,
|
||||
)
|
||||
@ -413,7 +411,7 @@ def store_election(connection, election_id: str, height: int, is_concluded: bool
|
||||
try:
|
||||
connection.run(
|
||||
connection.space(TARANT_TABLE_ELECTIONS).upsert(
|
||||
(election_id, height, is_concluded), op_list=[("=", 1, height), ("=", 2, is_concluded)], limit=1
|
||||
(election_id, height, is_concluded), op_list=[("=", 1, height), ("=", 2, is_concluded)]
|
||||
),
|
||||
only_data=False,
|
||||
)
|
||||
|
@ -1,5 +1,10 @@
|
||||
box.cfg{listen = 3303}
|
||||
|
||||
box.once("bootstrap", function()
|
||||
box.schema.user.grant('guest','read,write,execute,create,drop','universe')
|
||||
end)
|
||||
|
||||
|
||||
function init()
|
||||
-- ABCI chains
|
||||
abci_chains = box.schema.create_space('abci_chains', { if_not_exists = true })
|
||||
@ -317,4 +322,4 @@ end
|
||||
|
||||
function delete_output( id )
|
||||
box.space.outputs:delete(id)
|
||||
end
|
||||
end
|
||||
|
@ -56,11 +56,15 @@ class TarantoolDBConnection(DBConnection):
|
||||
with open(path, "r") as f:
|
||||
execute = f.readlines()
|
||||
f.close()
|
||||
return "".join(execute).encode()
|
||||
return "".join(execute).encode(encoding="utf-8")
|
||||
|
||||
def connect(self):
|
||||
if not self.__conn:
|
||||
self.__conn = tarantool.connect(host=self.host, port=self.port)
|
||||
self.__conn = tarantool.Connection(
|
||||
host=self.host, port=self.port, encoding="utf-8", connect_now=True, reconnect_delay=0.1
|
||||
)
|
||||
elif self.__conn.connected == False:
|
||||
self.__conn.connect()
|
||||
return self.__conn
|
||||
|
||||
def close(self):
|
||||
@ -81,14 +85,5 @@ class TarantoolDBConnection(DBConnection):
|
||||
def drop_database(self):
|
||||
self.connect().call("drop")
|
||||
|
||||
# def run(self, query, only_data=True):
|
||||
# try:
|
||||
# conn = self.connect()
|
||||
# return query.run(conn).data if only_data else query.run(conn)
|
||||
# except tarantool.error.OperationalError as op_error:
|
||||
# raise op_error
|
||||
# except tarantool.error.NetworkError as net_error:
|
||||
# raise net_error
|
||||
|
||||
def init_database(self):
|
||||
self.connect().call("init")
|
||||
|
@ -38,9 +38,10 @@ from transactions.common.transaction import Transaction
|
||||
logger = logging.getLogger(__name__)
|
||||
register_query = module_dispatch_registrar(query)
|
||||
|
||||
from tarantool.error import OperationalError, NetworkError
|
||||
from tarantool.error import OperationalError, NetworkError, SchemaError
|
||||
from functools import wraps
|
||||
|
||||
|
||||
def catch_db_exception(function_to_decorate):
|
||||
@wraps(function_to_decorate)
|
||||
def wrapper(*args, **kw):
|
||||
@ -48,9 +49,12 @@ def catch_db_exception(function_to_decorate):
|
||||
output = function_to_decorate(*args, **kw)
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
return output
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
@ -97,10 +101,15 @@ def get_transactions_by_asset(connection, asset: str, limit: int = 1000) -> list
|
||||
@register_query(TarantoolDBConnection)
|
||||
@catch_db_exception
|
||||
def get_transactions_by_metadata(connection, metadata: str, limit: int = 1000) -> list[DbTransaction]:
|
||||
txs = connection.space(TARANT_TABLE_TRANSACTION).select(metadata, limit=limit, index="transactions_by_metadata_cid").data
|
||||
txs = (
|
||||
connection.space(TARANT_TABLE_TRANSACTION)
|
||||
.select(metadata, limit=limit, index="transactions_by_metadata_cid")
|
||||
.data
|
||||
)
|
||||
tx_ids = [tx[0] for tx in txs]
|
||||
return get_complete_transactions_by_ids(connection, tx_ids)
|
||||
|
||||
|
||||
@catch_db_exception
|
||||
def store_transaction_outputs(connection, output: Output, index: int) -> str:
|
||||
output_id = uuid4().hex
|
||||
@ -118,6 +127,8 @@ def store_transaction_outputs(connection, output: Output, index: int) -> str:
|
||||
return output_id
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
except Exception as e:
|
||||
@ -159,6 +170,8 @@ def store_transaction(connection, transaction, table=TARANT_TABLE_TRANSACTION):
|
||||
connection.space(table).insert(tx)
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
except Exception as e:
|
||||
@ -213,9 +226,11 @@ def get_assets(connection, assets_ids: list) -> list[Asset]:
|
||||
@register_query(TarantoolDBConnection)
|
||||
@catch_db_exception
|
||||
def get_spent(connection, fullfil_transaction_id: str, fullfil_output_index: str) -> list[DbTransaction]:
|
||||
_inputs = connection.space(TARANT_TABLE_TRANSACTION).select(
|
||||
[fullfil_transaction_id, fullfil_output_index], index=TARANT_INDEX_SPENDING_BY_ID_AND_OUTPUT_INDEX
|
||||
).data
|
||||
_inputs = (
|
||||
connection.space(TARANT_TABLE_TRANSACTION)
|
||||
.select([fullfil_transaction_id, fullfil_output_index], index=TARANT_INDEX_SPENDING_BY_ID_AND_OUTPUT_INDEX)
|
||||
.data
|
||||
)
|
||||
return get_complete_transactions_by_ids(txids=[inp[0] for inp in _inputs], connection=connection)
|
||||
|
||||
|
||||
@ -253,14 +268,20 @@ def store_block(connection, block: dict):
|
||||
def get_txids_filtered(connection, asset_ids: list[str], operation: str = "", last_tx: bool = False) -> list[str]:
|
||||
transactions = []
|
||||
if operation == "CREATE":
|
||||
transactions = connection.space(TARANT_TABLE_TRANSACTION).select(
|
||||
[asset_ids[0], operation], index="transactions_by_id_and_operation"
|
||||
).data
|
||||
transactions = (
|
||||
connection.space(TARANT_TABLE_TRANSACTION)
|
||||
.select([asset_ids[0], operation], index="transactions_by_id_and_operation")
|
||||
.data
|
||||
)
|
||||
elif operation == "TRANSFER":
|
||||
transactions = connection.space(TARANT_TABLE_TRANSACTION).select(asset_ids, index=TARANT_INDEX_TX_BY_ASSET_ID).data
|
||||
transactions = (
|
||||
connection.space(TARANT_TABLE_TRANSACTION).select(asset_ids, index=TARANT_INDEX_TX_BY_ASSET_ID).data
|
||||
)
|
||||
else:
|
||||
txs = connection.space(TARANT_TABLE_TRANSACTION).select(asset_ids, index=TARANT_ID_SEARCH).data
|
||||
asset_txs = connection.space(TARANT_TABLE_TRANSACTION).select(asset_ids, index=TARANT_INDEX_TX_BY_ASSET_ID).data
|
||||
asset_txs = (
|
||||
connection.space(TARANT_TABLE_TRANSACTION).select(asset_ids, index=TARANT_INDEX_TX_BY_ASSET_ID).data
|
||||
)
|
||||
transactions = txs + asset_txs
|
||||
|
||||
ids = tuple([tx[0] for tx in transactions])
|
||||
@ -345,9 +366,11 @@ def store_unspent_outputs(connection, *unspent_outputs: list):
|
||||
if unspent_outputs:
|
||||
for utxo in unspent_outputs:
|
||||
try:
|
||||
output = connection.space(TARANT_TABLE_UTXOS).insert(
|
||||
(uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo)
|
||||
).data
|
||||
output = (
|
||||
connection.space(TARANT_TABLE_UTXOS)
|
||||
.insert((uuid4().hex, utxo["transaction_id"], utxo["output_index"], utxo))
|
||||
.data
|
||||
)
|
||||
result.append(output)
|
||||
except Exception as e:
|
||||
logger.info(f"Could not insert unspent output: {e}")
|
||||
@ -361,10 +384,13 @@ def delete_unspent_outputs(connection, *unspent_outputs: list):
|
||||
result = []
|
||||
if unspent_outputs:
|
||||
for utxo in unspent_outputs:
|
||||
output = connection.space(TARANT_TABLE_UTXOS).delete(
|
||||
(utxo["transaction_id"], utxo["output_index"]),
|
||||
index="utxo_by_transaction_id_and_output_index"
|
||||
).data
|
||||
output = (
|
||||
connection.space(TARANT_TABLE_UTXOS)
|
||||
.delete(
|
||||
(utxo["transaction_id"], utxo["output_index"]), index="utxo_by_transaction_id_and_output_index"
|
||||
)
|
||||
.data
|
||||
)
|
||||
result.append(output)
|
||||
return result
|
||||
|
||||
@ -389,10 +415,11 @@ def store_pre_commit_state(connection, state: dict):
|
||||
connection.space(TARANT_TABLE_PRE_COMMITS).upsert(
|
||||
_precommitTuple,
|
||||
op_list=[("=", 1, state["height"]), ("=", 2, state[TARANT_TABLE_TRANSACTION])],
|
||||
limit=1,
|
||||
)
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
except Exception as e:
|
||||
@ -413,14 +440,21 @@ def get_pre_commit_state(connection) -> dict:
|
||||
@register_query(TarantoolDBConnection)
|
||||
@catch_db_exception
|
||||
def store_validator_set(conn, validators_update: dict):
|
||||
_validator = conn.space(TARANT_TABLE_VALIDATOR_SETS).select(validators_update["height"], index="height", limit=1).data
|
||||
_validator = (
|
||||
conn.space(TARANT_TABLE_VALIDATOR_SETS).select(validators_update["height"], index="height", limit=1).data
|
||||
)
|
||||
unique_id = uuid4().hex if _validator is None or len(_validator) == 0 else _validator[0][0]
|
||||
try:
|
||||
conn.space(TARANT_TABLE_VALIDATOR_SETS).upsert(
|
||||
(unique_id, validators_update["height"], validators_update["validators"]),
|
||||
op_list=[("=", 1, validators_update["height"]), ("=", 2, validators_update["validators"])],
|
||||
limit=1,
|
||||
)
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
except Exception as e:
|
||||
logger.info(f"Could not insert validator set: {e}")
|
||||
raise OperationDataInsertionError()
|
||||
@ -439,10 +473,12 @@ def delete_validator_set(connection, height: int):
|
||||
def store_election(connection, election_id: str, height: int, is_concluded: bool):
|
||||
try:
|
||||
connection.space(TARANT_TABLE_ELECTIONS).upsert(
|
||||
(election_id, height, is_concluded), op_list=[("=", 1, height), ("=", 2, is_concluded)], limit=1
|
||||
(election_id, height, is_concluded), op_list=[("=", 1, height), ("=", 2, is_concluded)]
|
||||
)
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
except Exception as e:
|
||||
@ -460,6 +496,8 @@ def store_elections(connection, elections: list):
|
||||
)
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
except Exception as e:
|
||||
@ -504,7 +542,9 @@ def get_election(connection, election_id: str) -> dict:
|
||||
@catch_db_exception
|
||||
def get_asset_tokens_for_public_key(connection, asset_id: str, public_key: str) -> list[DbTransaction]:
|
||||
id_transactions = connection.space(TARANT_TABLE_GOVERNANCE).select([asset_id]).data
|
||||
asset_id_transactions = connection.space(TARANT_TABLE_GOVERNANCE).select([asset_id], index="governance_by_asset_id").data
|
||||
asset_id_transactions = (
|
||||
connection.space(TARANT_TABLE_GOVERNANCE).select([asset_id], index="governance_by_asset_id").data
|
||||
)
|
||||
|
||||
transactions = id_transactions + asset_id_transactions
|
||||
return get_complete_transactions_by_ids(connection, [_tx[0] for _tx in transactions])
|
||||
@ -520,6 +560,8 @@ def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = T
|
||||
)
|
||||
except OperationalError as op_error:
|
||||
raise op_error
|
||||
except SchemaError as schema_error:
|
||||
raise schema_error
|
||||
except NetworkError as net_error:
|
||||
raise net_error
|
||||
except Exception as e:
|
||||
|
118
poetry.lock
generated
118
poetry.lock
generated
@ -1639,6 +1639,44 @@ files = [
|
||||
[package.dependencies]
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "1.24.2"
|
||||
description = "Fundamental package for array computing in Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "numpy-1.24.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d"},
|
||||
{file = "numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5"},
|
||||
{file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253"},
|
||||
{file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978"},
|
||||
{file = "numpy-1.24.2-cp310-cp310-win32.whl", hash = "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9"},
|
||||
{file = "numpy-1.24.2-cp310-cp310-win_amd64.whl", hash = "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0"},
|
||||
{file = "numpy-1.24.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a"},
|
||||
{file = "numpy-1.24.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0"},
|
||||
{file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281"},
|
||||
{file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910"},
|
||||
{file = "numpy-1.24.2-cp311-cp311-win32.whl", hash = "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95"},
|
||||
{file = "numpy-1.24.2-cp311-cp311-win_amd64.whl", hash = "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04"},
|
||||
{file = "numpy-1.24.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2"},
|
||||
{file = "numpy-1.24.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5"},
|
||||
{file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a"},
|
||||
{file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96"},
|
||||
{file = "numpy-1.24.2-cp38-cp38-win32.whl", hash = "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d"},
|
||||
{file = "numpy-1.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756"},
|
||||
{file = "numpy-1.24.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a"},
|
||||
{file = "numpy-1.24.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f"},
|
||||
{file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb"},
|
||||
{file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780"},
|
||||
{file = "numpy-1.24.2-cp39-cp39-win32.whl", hash = "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468"},
|
||||
{file = "numpy-1.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5"},
|
||||
{file = "numpy-1.24.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d"},
|
||||
{file = "numpy-1.24.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"},
|
||||
{file = "numpy-1.24.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f"},
|
||||
{file = "numpy-1.24.2.tar.gz", hash = "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "23.0"
|
||||
@ -1651,6 +1689,55 @@ files = [
|
||||
{file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pandas"
|
||||
version = "1.5.3"
|
||||
description = "Powerful data structures for data analysis, time series, and statistics"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"},
|
||||
{file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"},
|
||||
{file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"},
|
||||
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"},
|
||||
{file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"},
|
||||
{file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"},
|
||||
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"},
|
||||
{file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"},
|
||||
{file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"},
|
||||
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"},
|
||||
{file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"},
|
||||
{file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"},
|
||||
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"},
|
||||
{file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"},
|
||||
{file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"},
|
||||
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"},
|
||||
{file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"},
|
||||
{file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"},
|
||||
{file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"},
|
||||
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"},
|
||||
{file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"},
|
||||
{file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"},
|
||||
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"},
|
||||
{file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"},
|
||||
{file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"},
|
||||
{file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"},
|
||||
{file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
numpy = [
|
||||
{version = ">=1.20.3", markers = "python_version < \"3.10\""},
|
||||
{version = ">=1.21.0", markers = "python_version >= \"3.10\""},
|
||||
{version = ">=1.23.2", markers = "python_version >= \"3.11\""},
|
||||
]
|
||||
python-dateutil = ">=2.8.1"
|
||||
pytz = ">=2020.1"
|
||||
|
||||
[package.extras]
|
||||
test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"]
|
||||
|
||||
[[package]]
|
||||
name = "parso"
|
||||
version = "0.8.3"
|
||||
@ -2431,6 +2518,21 @@ files = [
|
||||
{file = "python-baseconv-1.2.2.tar.gz", hash = "sha256:0539f8bd0464013b05ad62e0a1673f0ac9086c76b43ebf9f833053527cd9931b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.8.2"
|
||||
description = "Extensions to the standard Python datetime module"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
files = [
|
||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.5"
|
||||
|
||||
[[package]]
|
||||
name = "python-decouple"
|
||||
version = "3.7"
|
||||
@ -3001,18 +3103,20 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
|
||||
|
||||
[[package]]
|
||||
name = "tarantool"
|
||||
version = "0.7.1"
|
||||
description = "Python client library for Tarantool 1.6 Database"
|
||||
version = "0.12.1"
|
||||
description = "Python client library for Tarantool"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "tarantool-0.7.1-py3-none-any.whl", hash = "sha256:e557e5faf5337e6040eb324b43a21701986e0c37fae87d4c80011632faa20ff6"},
|
||||
{file = "tarantool-0.7.1.tar.gz", hash = "sha256:a4bf212e86c5f43dcb6baf89487f0db478a45e2c5a1b16926fbbc0e9aa6eae22"},
|
||||
{file = "tarantool-0.12.1-py3-none-any.whl", hash = "sha256:711b47671aba6e6faedc71f57bc07a10f6d7ac728b696183e99a31f20082f187"},
|
||||
{file = "tarantool-0.12.1.tar.gz", hash = "sha256:80370cb5de0e35572f9515f09d8fc616367162d858ec8aacd3b537820b695c0e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
msgpack = ">=0.4.0"
|
||||
msgpack = "*"
|
||||
pandas = "*"
|
||||
pytz = "*"
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
@ -3357,4 +3461,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.9"
|
||||
content-hash = "8e7e3423a56069acb929a5666dacf8cbd68434435e7638080f6dc030d371665a"
|
||||
content-hash = "897f5a79e5084fc44f88d55ad8900da39fec9fa7dc14abb33a50c569e8e7c841"
|
||||
|
@ -35,7 +35,7 @@ jsonschema = "4.16.0"
|
||||
logstats = "0.3.0"
|
||||
packaging = ">=22.0"
|
||||
pymongo = "3.11.4"
|
||||
tarantool = "0.7.1"
|
||||
tarantool = ">=0.12.1"
|
||||
python-rapidjson = ">=1.0"
|
||||
pyyaml = "6.0.0"
|
||||
requests = "2.25.1"
|
||||
|
@ -37,6 +37,7 @@ def test_get_txids_filtered(signed_create_tx, signed_transfer_tx, db_conn):
|
||||
|
||||
def test_get_owned_ids(signed_create_tx, user_pk, db_conn):
|
||||
from planetmint.backend.tarantool.sync_io import query
|
||||
from planetmint.backend.connection import Connection
|
||||
|
||||
# insert a transaction
|
||||
query.store_transactions(connection=db_conn, signed_transactions=[signed_create_tx.to_dict()])
|
||||
@ -101,7 +102,11 @@ def test_validator_update(db_conn):
|
||||
from planetmint.backend.tarantool.sync_io import query
|
||||
|
||||
def gen_validator_update(height):
|
||||
return {"validators": [], "height": height, "election_id": f"election_id_at_height_{height}"}
|
||||
return {
|
||||
"validators": [],
|
||||
"height": height,
|
||||
"election_id": f"election_id_at_height_{height}",
|
||||
}
|
||||
# return {'data': 'somedata', 'height': height, 'election_id': f'election_id_at_height_{height}'}
|
||||
|
||||
for i in range(1, 100, 10):
|
||||
|
@ -141,6 +141,8 @@ def _bdb(_setup_database):
|
||||
from planetmint.config import Config
|
||||
|
||||
conn = Connection()
|
||||
conn.close()
|
||||
conn.connect()
|
||||
yield
|
||||
dbname = Config().get()["database"]["name"]
|
||||
flush_db(conn, dbname)
|
||||
@ -240,7 +242,6 @@ def merlin():
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
# def a():
|
||||
def abci_fixture():
|
||||
from tendermint.abci import types_pb2
|
||||
|
||||
@ -272,7 +273,10 @@ def test_abci_rpc():
|
||||
def b():
|
||||
from planetmint.application import Validator
|
||||
|
||||
return Validator()
|
||||
validator = Validator()
|
||||
validator.models.connection.close()
|
||||
validator.models.connection.connect()
|
||||
return validator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -384,7 +388,10 @@ def db_name(db_config):
|
||||
|
||||
@pytest.fixture
|
||||
def db_conn():
|
||||
return Connection()
|
||||
conn = Connection()
|
||||
conn.close()
|
||||
conn.connect()
|
||||
return conn
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -46,7 +46,6 @@ class TestBigchainApi(object):
|
||||
b.models.store_bulk_transactions([transfer_tx2])
|
||||
|
||||
def test_double_inclusion(self, b, alice):
|
||||
|
||||
from planetmint.backend.exceptions import OperationError
|
||||
from planetmint.backend.tarantool.sync_io.connection import TarantoolDBConnection
|
||||
|
||||
|
@ -25,7 +25,7 @@ def test_app(b, eventqueue_fixture, init_chain_request):
|
||||
from planetmint.abci.utils import calculate_hash
|
||||
from transactions.common.crypto import generate_key_pair
|
||||
|
||||
app = ApplicationLogic(validator=b, events_queue= eventqueue_fixture)
|
||||
app = ApplicationLogic(validator=b, events_queue=eventqueue_fixture)
|
||||
p = ProtocolHandler(app)
|
||||
|
||||
data = p.process("info", types.Request(info=types.RequestInfo(version=__tm_supported_versions__[0])))
|
||||
|
@ -38,7 +38,7 @@ def test_middleware_does_notstrip_content_type_from_other_methods():
|
||||
assert "CONTENT_TYPE" in mock.call_args[0][0]
|
||||
|
||||
|
||||
def test_get_outputs_endpoint_with_content_type(client, user_pk):
|
||||
def test_get_outputs_endpoint_with_content_type(client, user_pk,_bdb):
|
||||
res = client.get(
|
||||
OUTPUTS_ENDPOINT + "?public_key={}".format(user_pk), headers=[("Content-Type", "application/json")]
|
||||
)
|
||||
|
@ -40,7 +40,7 @@ def test_get_transaction_endpoint(client, posted_create_tx):
|
||||
assert res.status_code == 200
|
||||
|
||||
|
||||
def test_get_transaction_returns_404_if_not_found(client):
|
||||
def test_get_transaction_returns_404_if_not_found(client, b):
|
||||
res = client.get(TX_ENDPOINT + "123")
|
||||
assert res.status_code == 404
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user