From 4d1815c4c42920f9d42b18862b31ede4c520efbd Mon Sep 17 00:00:00 2001 From: andrei Date: Tue, 31 May 2022 16:40:39 +0300 Subject: [PATCH 1/2] queries from tarantool/query.py was adjusted to not get NonType error --- planetmint/backend/tarantool/query.py | 125 ++++++++++++++------------ 1 file changed, 70 insertions(+), 55 deletions(-) diff --git a/planetmint/backend/tarantool/query.py b/planetmint/backend/tarantool/query.py index dc9acfa..88a4fb7 100644 --- a/planetmint/backend/tarantool/query.py +++ b/planetmint/backend/tarantool/query.py @@ -114,8 +114,8 @@ def get_metadata(connection, transaction_ids: list): for _id in transaction_ids: metadata = connection.run( connection.space("meta_data").select(_id, index="id_search") - ).data - if len(metadata) > 0: + ) + if metadata is not None or len(metadata) > 0: _returned_data.append(metadata) return _returned_data if len(_returned_data) > 0 else None @@ -125,7 +125,8 @@ def store_asset(connection, asset): convert = lambda obj: obj if isinstance(obj, tuple) else (obj, obj["id"], obj["id"]) try: return connection.run( - connection.space("assets").insert(convert(asset)) + connection.space("assets").insert(convert(asset)), + only_data=False ) except tarantool.error.DatabaseError: pass @@ -137,7 +138,8 @@ def store_assets(connection, assets: list): for asset in assets: try: connection.run( - connection.space("assets").insert(convert(asset)) + connection.space("assets").insert(convert(asset)), + only_data=False ) except tarantool.error.DatabaseError: pass @@ -147,7 +149,7 @@ def store_assets(connection, assets: list): def get_asset(connection, asset_id: str): _data = connection.run( connection.space("assets").select(asset_id, index="txid_search") - ).data + ) return _data[0][0] if len(_data) > 0 else [] @@ -164,7 +166,7 @@ def get_assets(connection, assets_ids: list) -> list: def get_spent(connection, fullfil_transaction_id: str, fullfil_output_index: str): _inputs = connection.run( connection.space("inputs").select([fullfil_transaction_id, str(fullfil_output_index)], index="spent_search") - ).data + ) _transactions = _group_transaction_by_ids(txids=[inp[0] for inp in _inputs], connection=connection) return _transactions @@ -173,14 +175,14 @@ def get_spent(connection, fullfil_transaction_id: str, fullfil_output_index: str def get_latest_block(connection): # TODO Here is used DESCENDING OPERATOR _all_blocks = connection.run( connection.space("blocks").select() - ).data + ) block = {"app_hash": '', "height": 0, "transactions": []} if len(_all_blocks) > 0: _block = sorted(_all_blocks, key=itemgetter(1), reverse=True)[0] _txids = connection.run( connection.space("blocks_tx").select(_block[2], index="block_search") - ).data + ) block["app_hash"] = _block[0] block["height"] = _block[1] block["transactions"] = [tx[0] for tx in _txids] @@ -195,11 +197,13 @@ def store_block(connection, block: dict): connection.run( connection.space("blocks").insert((block["app_hash"], block["height"], - block_unique_id)) + block_unique_id)), + only_data=False ) for txid in block["transactions"]: connection.run( - connection.space("blocks_tx").insert((txid, block_unique_id)) + connection.space("blocks_tx").insert((txid, block_unique_id)), + only_data=False ) @@ -217,22 +221,22 @@ def get_txids_filtered(connection, asset_id: str, operation: str = None, if actions["sets"][0] == "CREATE": # + _transactions = connection.run( connection.space("transactions").select([operation, asset_id], index=actions["index"]) - ).data + ) elif actions["sets"][0] == "TRANSFER": # + _assets = connection.run( connection.space("assets").select([asset_id], index="only_asset_search") - ).data + ) for asset in _assets: _txid = asset[1] _transactions = connection.run( connection.space("transactions").select([operation, _txid], index=actions["index"]) - ).data + ) if len(_transactions) != 0: break else: _tx_ids = connection.run( connection.space("transactions").select([asset_id], index="id_search") - ).data + ) _assets_ids = connection.run( connection.space("assets").select([asset_id], index="only_asset_search") ) @@ -274,8 +278,8 @@ def _remove_text_score(asset): def get_owned_ids(connection, owner: str): _keys = connection.run( connection.space("keys").select(owner, index="keys_search") - ).data - if len(_keys) == 0: + ) + if _keys is None or len(_keys) == 0: return [] _transactionids = list(set([key[1] for key in _keys])) _transactions = _group_transaction_by_ids(txids=_transactionids, connection=connection) @@ -299,13 +303,13 @@ def get_spending_transactions(connection, inputs): def get_block(connection, block_id=[]): _block = connection.run( connection.space("blocks").select(block_id, index="block_search", limit=1) - ).data - if len(_block) == 0: + ) + if _block is None or len(_block) == 0: return [] _block = _block[0] _txblock = connection.run( connection.space("blocks_tx").select(_block[2], index="block_search") - ).data + ) return {"app_hash": _block[0], "height": _block[1], "transactions": [_tx[0] for _tx in _txblock]} @@ -313,36 +317,35 @@ def get_block(connection, block_id=[]): def get_block_with_transaction(connection, txid: str): _all_blocks_tx = connection.run( connection.space("blocks_tx").select(txid, index="id_search") - ).data - if len(_all_blocks_tx) == 0: + ) + if _all_blocks_tx is None or len(_all_blocks_tx) == 0: return [] _block = connection.run( connection.space("blocks").select(_all_blocks_tx[0][1], index="block_id_search") - ).data + ) return [{"height": _height[1]} for _height in _block] @register_query(TarantoolDBConnection) def delete_transactions(connection, txn_ids: list): - tx_space = connection.get_space("transactions") for _id in txn_ids: - tx_space.delete(_id) + connection.run(connection.space("transactions").delete(_id), only_data=False) for _id in txn_ids: - _inputs = connection.run(connection.space("inputs").select(_id, index="id_search")) - _outputs = connection.run(connection.space("outputs").select(_id, index="id_search")) - _keys = connection.run(connection.space("keys").select(_id, index="txid_search")) + _inputs = connection.run(connection.space("inputs").select(_id, index="id_search"), only_data=False) + _outputs = connection.run(connection.space("outputs").select(_id, index="id_search"), only_data=False) + _keys = connection.run(connection.space("keys").select(_id, index="txid_search"), only_data=False) for _kID in _keys: - connection.run(connection.space("keys").delete(_kID[0], index="id_search")) + connection.run(connection.space("keys").delete(_kID[0], index="id_search"), only_data=False) for _inpID in _inputs: - connection.run(connection.space("inputs").delete(_inpID[5], index="delete_search")) + connection.run(connection.space("inputs").delete(_inpID[5], index="delete_search"), only_data=False) for _outpID in _outputs: - connection.run(connection.space("outputs").delete(_outpID[5], index="unique_search")) + connection.run(connection.space("outputs").delete(_outpID[5], index="unique_search"), only_data=False) for _id in txn_ids: - connection.run(connection.space("meta_data").delete(_id, index="id_search")) + connection.run(connection.space("meta_data").delete(_id, index="id_search"), only_data=False) for _id in txn_ids: - connection.run(connection.space("assets").delete(_id, index="txid_search")) + connection.run(connection.space("assets").delete(_id, index="txid_search"), only_data=False) @register_query(TarantoolDBConnection) @@ -352,7 +355,7 @@ def store_unspent_outputs(connection, *unspent_outputs: list): for utxo in unspent_outputs: output = connection.run( connection.space("utxos").insert((utxo['transaction_id'], utxo['output_index'], dumps(utxo))) - ).data + ) result.append(output) return result @@ -364,7 +367,7 @@ def delete_unspent_outputs(connection, *unspent_outputs: list): for utxo in unspent_outputs: output = connection.run( connection.space("utxos").delete((utxo['transaction_id'], utxo['output_index'])) - ).data + ) result.append(output) return result @@ -373,7 +376,7 @@ def delete_unspent_outputs(connection, *unspent_outputs: list): def get_unspent_outputs(connection, query=None): # for now we don't have implementation for 'query'. _utxos = connection.run( connection.space("utxos").select([]) - ).data + ) return [loads(utx[2]) for utx in _utxos] @@ -382,13 +385,14 @@ def store_pre_commit_state(connection, state: dict): _precommit = connection.run( connection.space("pre_commits").select(state["height"], index="height_search", limit=1) ) - unique_id = token_hex(8) if (len(_precommit.data) == 0) else _precommit.data[0][0] + unique_id = token_hex(8) if _precommit is None or len(_precommit.data) == 0 else _precommit.data[0][0] connection.run( connection.space("pre_commits").upsert((unique_id, state["height"], state["transactions"]), op_list=[('=', 0, unique_id), ('=', 1, state["height"]), ('=', 2, state["transactions"])], - limit=1) + limit=1), + only_data=False ) @@ -408,13 +412,14 @@ def store_validator_set(conn, validators_update: dict): _validator = conn.run( conn.space("validators").select(validators_update["height"], index="height_search", limit=1) ) - unique_id = token_hex(8) if (len(_validator.data) == 0) else _validator.data[0][0] + unique_id = token_hex(8) if _validator is None or len(_validator.data) == 0 else _validator.data[0][0] conn.run( conn.space("validators").upsert((unique_id, validators_update["height"], validators_update["validators"]), op_list=[('=', 0, unique_id), ('=', 1, validators_update["height"]), ('=', 2, validators_update["validators"])], - limit=1) + limit=1), + only_data=False ) @@ -423,9 +428,10 @@ def delete_validator_set(connection, height: int): _validators = connection.run( connection.space("validators").select(height, index="height_search") ) - for _valid in _validators.data: + for _valid in _validators: connection.run( - connection.space("validators").delete(_valid[0]) + connection.space("validators").delete(_valid[0]), + only_data=False ) @@ -436,7 +442,8 @@ def store_election(connection, election_id: str, height: int, is_concluded: bool op_list=[('=', 0, election_id), ('=', 1, height), ('=', 2, is_concluded)], - limit=1) + limit=1), + only_data=False ) @@ -446,7 +453,8 @@ def store_elections(connection, elections: list): _election = connection.run( connection.space("elections").insert((election["election_id"], election["height"], - election["is_concluded"])) + election["is_concluded"])), + only_data=False ) @@ -454,10 +462,11 @@ def store_elections(connection, elections: list): def delete_elections(connection, height: int): _elections = connection.run( connection.space("elections").select(height, index="height_search") - ).data + ) for _elec in _elections: connection.run( - connection.space("elections").delete(_elec[0]) + connection.space("elections").delete(_elec[0]), + only_data=False ) @@ -465,7 +474,7 @@ def delete_elections(connection, height: int): def get_validator_set(connection, height: int = None): _validators = connection.run( connection.space("validators").select() - ).data + ) if height is not None: _validators = [{"height": validator[1], "validators": validator[2]} for validator in _validators if validator[1] <= height] @@ -479,18 +488,22 @@ def get_validator_set(connection, height: int = None): def get_election(connection, election_id: str): _elections = connection.run( connection.space("elections").select(election_id, index="id_search") - ).data - if len(_elections) == 0: + ) + if _elections is None or len(_elections) == 0: return None _election = sorted(_elections, key=itemgetter(0), reverse=True)[0] return {"election_id": _election[0], "height": _election[1], "is_concluded": _election[2]} @register_query(TarantoolDBConnection) -def get_asset_tokens_for_public_key(connection, asset_id: str, public_key: str): +def get_asset_tokens_for_public_key(connection, asset_id: str, public_key: str): # FIXME Something can be wrong with this function ! (public_key) is not used + # space = connection.space("keys") + # _keys = space.select([public_key], index="keys_search") _transactions = connection.run( connection.space("assets").select([asset_id], index="assetid_search") - ).data + ) + # _transactions = _transactions + # _keys = _keys.data _grouped_transactions = _group_transaction_by_ids(connection=connection, txids=[_tx[1] for _tx in _transactions]) return _grouped_transactions @@ -502,7 +515,8 @@ def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = T op_list=[('=', 0, height), ('=', 1, is_synced), ('=', 2, chain_id)], - limit=1) + limit=1), + only_data=False ) @@ -510,10 +524,11 @@ def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = T def delete_abci_chain(connection, height: int): _chains = connection.run( connection.space("abci_chains").select(height, index="height_search") - ).data + ) for _chain in _chains: connection.run( - connection.space("abci_chains").delete(_chain[2]) + connection.space("abci_chains").delete(_chain[2]), + only_data=False ) @@ -521,8 +536,8 @@ def delete_abci_chain(connection, height: int): def get_latest_abci_chain(connection): _all_chains = connection.run( connection.space("abci_chains").select() - ).data - if len(_all_chains) == 0: + ) + if _all_chains is None or len(_all_chains) == 0: return None _chain = sorted(_all_chains, key=itemgetter(0), reverse=True)[0] return {"height": _chain[0], "is_synced": _chain[1], "chain_id": _chain[2]} From 4819fccb59e13e767cd07f54018fde06b4587cc1 Mon Sep 17 00:00:00 2001 From: andrei Date: Tue, 31 May 2022 17:33:33 +0300 Subject: [PATCH 2/2] fixes for error NoneType --- planetmint/backend/tarantool/query.py | 31 +++++++++++++++------------ 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/planetmint/backend/tarantool/query.py b/planetmint/backend/tarantool/query.py index 88a4fb7..8b8d141 100644 --- a/planetmint/backend/tarantool/query.py +++ b/planetmint/backend/tarantool/query.py @@ -115,8 +115,9 @@ def get_metadata(connection, transaction_ids: list): metadata = connection.run( connection.space("meta_data").select(_id, index="id_search") ) - if metadata is not None or len(metadata) > 0: - _returned_data.append(metadata) + if metadata is not None: + if len(metadata) > 0: + _returned_data.append(metadata) return _returned_data if len(_returned_data) > 0 else None @@ -178,16 +179,17 @@ def get_latest_block(connection): # TODO Here is used DESCENDING OPERATOR ) block = {"app_hash": '', "height": 0, "transactions": []} - if len(_all_blocks) > 0: - _block = sorted(_all_blocks, key=itemgetter(1), reverse=True)[0] - _txids = connection.run( - connection.space("blocks_tx").select(_block[2], index="block_search") - ) - block["app_hash"] = _block[0] - block["height"] = _block[1] - block["transactions"] = [tx[0] for tx in _txids] - else: - block = None + if _all_blocks is not None: + if len(_all_blocks) > 0: + _block = sorted(_all_blocks, key=itemgetter(1), reverse=True)[0] + _txids = connection.run( + connection.space("blocks_tx").select(_block[2], index="block_search") + ) + block["app_hash"] = _block[0] + block["height"] = _block[1] + block["transactions"] = [tx[0] for tx in _txids] + else: + block = None return block @@ -475,13 +477,14 @@ def get_validator_set(connection, height: int = None): _validators = connection.run( connection.space("validators").select() ) - if height is not None: + if height is not None and _validators is not None: _validators = [{"height": validator[1], "validators": validator[2]} for validator in _validators if validator[1] <= height] return next(iter(sorted(_validators, key=lambda k: k["height"], reverse=True)), None) - else: + elif _validators is not None: _validators = [{"height": validator[1], "validators": validator[2]} for validator in _validators] return next(iter(sorted(_validators, key=lambda k: k["height"], reverse=True)), None) + return None @register_query(TarantoolDBConnection)