mirror of
https://github.com/planetmint/planetmint.git
synced 2025-11-24 22:45:44 +00:00
using init.lua for db setup
Signed-off-by: Lorenz Herzberger <lorenzherzberger@gmail.com>
This commit is contained in:
parent
a91556f434
commit
b5d0edee61
@ -22,8 +22,8 @@ services:
|
|||||||
- "3303:3303"
|
- "3303:3303"
|
||||||
- "8081:8081"
|
- "8081:8081"
|
||||||
volumes:
|
volumes:
|
||||||
- ./planetmint/backend/tarantool/basic.lua:/opt/tarantool/basic.lua
|
- ./planetmint/backend/tarantool/init.lua:/opt/tarantool/init.lua
|
||||||
command: tarantool /opt/tarantool/basic.lua
|
command: tarantool /opt/tarantool/init.lua
|
||||||
restart: always
|
restart: always
|
||||||
planetmint:
|
planetmint:
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|||||||
@ -12,6 +12,10 @@ class Transaction:
|
|||||||
id: str = ""
|
id: str = ""
|
||||||
operation: str = ""
|
operation: str = ""
|
||||||
version: str = ""
|
version: str = ""
|
||||||
|
metadata: str = ""
|
||||||
|
assets: list = field(default_factory=list)
|
||||||
|
inputs: list = field(default_factory=list)
|
||||||
|
scripts: Optional[map] = None
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_dict(transaction: dict) -> Transaction:
|
def from_dict(transaction: dict) -> Transaction:
|
||||||
@ -19,6 +23,10 @@ class Transaction:
|
|||||||
id=transaction["id"],
|
id=transaction["id"],
|
||||||
operation=transaction["operation"],
|
operation=transaction["operation"],
|
||||||
version=transaction["version"],
|
version=transaction["version"],
|
||||||
|
metadata=transaction["metadata"],
|
||||||
|
assets=transaction["assets"],
|
||||||
|
inputs=transaction["inputs"],
|
||||||
|
scripts=transaction["scripts"] if "scripts" in transaction.keys() else None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -118,8 +118,8 @@ def drop_database(connection, dbname):
|
|||||||
|
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@singledispatch
|
||||||
def init_database(connection=None, dbname=None):
|
def init_database(connection, dbname):
|
||||||
"""Initialize the configured backend for use with Planetmint.
|
"""Initialize the configured backend for use with Planetmint.
|
||||||
|
|
||||||
Creates a database with :attr:`dbname` with any required tables
|
Creates a database with :attr:`dbname` with any required tables
|
||||||
@ -134,11 +134,7 @@ def init_database(connection=None, dbname=None):
|
|||||||
configuration.
|
configuration.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
connection = connection or Connection()
|
raise NotImplementedError
|
||||||
dbname = dbname or Config().get()["database"]["name"]
|
|
||||||
|
|
||||||
create_database(connection, dbname)
|
|
||||||
create_tables(connection, dbname)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_language_key(obj, key):
|
def validate_language_key(obj, key):
|
||||||
|
|||||||
@ -33,18 +33,12 @@ class TarantoolDBConnection(DBConnection):
|
|||||||
self.connect()
|
self.connect()
|
||||||
self.SPACE_NAMES = [
|
self.SPACE_NAMES = [
|
||||||
"abci_chains",
|
"abci_chains",
|
||||||
"assets",
|
|
||||||
"blocks",
|
"blocks",
|
||||||
"blocks_tx",
|
|
||||||
"elections",
|
"elections",
|
||||||
"meta_data",
|
|
||||||
"pre_commits",
|
"pre_commits",
|
||||||
"validators",
|
"validator_sets",
|
||||||
"transactions",
|
"transactions",
|
||||||
"inputs",
|
"outputs"
|
||||||
"outputs",
|
|
||||||
"keys",
|
|
||||||
"scripts",
|
|
||||||
]
|
]
|
||||||
except tarantool.error.NetworkError as network_err:
|
except tarantool.error.NetworkError as network_err:
|
||||||
logger.info("Host cant be reached")
|
logger.info("Host cant be reached")
|
||||||
@ -101,12 +95,14 @@ class TarantoolDBConnection(DBConnection):
|
|||||||
raise net_error
|
raise net_error
|
||||||
|
|
||||||
def drop_database(self):
|
def drop_database(self):
|
||||||
db_config = Config().get()["database"]
|
self.connect().call('drop')
|
||||||
cmd_resp = self.run_command(command=self.drop_path, config=db_config) # noqa: F841
|
# db_config = Config().get()["database"]
|
||||||
|
# cmd_resp = self.run_command(command=self.drop_path, config=db_config) # noqa: F841
|
||||||
|
|
||||||
def init_database(self):
|
def init_database(self):
|
||||||
db_config = Config().get()["database"]
|
self.connect().call('init')
|
||||||
cmd_resp = self.run_command(command=self.init_path, config=db_config) # noqa: F841
|
# db_config = Config().get()["database"]
|
||||||
|
# cmd_resp = self.run_command(command=self.init_path, config=db_config) # noqa: F841
|
||||||
|
|
||||||
def run_command(self, command: str, config: dict):
|
def run_command(self, command: str, config: dict):
|
||||||
from subprocess import run
|
from subprocess import run
|
||||||
|
|||||||
@ -10,5 +10,5 @@ TARANT_TABLE_TRANSACTION = "transactions"
|
|||||||
TARANT_TABLE_INPUT = "inputs"
|
TARANT_TABLE_INPUT = "inputs"
|
||||||
TARANT_TABLE_OUTPUT = "outputs"
|
TARANT_TABLE_OUTPUT = "outputs"
|
||||||
TARANT_TABLE_SCRIPT = "scripts"
|
TARANT_TABLE_SCRIPT = "scripts"
|
||||||
TARANT_TX_ID_SEARCH = "txid_search"
|
TARANT_TX_ID_SEARCH = "transaction_id"
|
||||||
TARANT_ID_SEARCH = "id_search"
|
TARANT_ID_SEARCH = "id"
|
||||||
|
|||||||
@ -8,8 +8,14 @@ function init()
|
|||||||
{ name = 'height', type = 'unsigned' },
|
{ name = 'height', type = 'unsigned' },
|
||||||
{ name = 'is_synced', type = 'boolean' }
|
{ name = 'is_synced', type = 'boolean' }
|
||||||
})
|
})
|
||||||
abci_chains:create_index('id', { parts = {'id'}})
|
abci_chains:create_index('id', {
|
||||||
abci_chains:create_index('height', { parts = {'height'}})
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
abci_chains:create_index('height', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'height', type = 'unsigned' }}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
-- Transactions
|
-- Transactions
|
||||||
@ -18,19 +24,26 @@ function init()
|
|||||||
{ name = 'id', type = 'string' },
|
{ name = 'id', type = 'string' },
|
||||||
{ name = 'operation', type = 'string' },
|
{ name = 'operation', type = 'string' },
|
||||||
{ name = 'version', type = 'string' },
|
{ name = 'version', type = 'string' },
|
||||||
{ name = 'metadata', type = 'string' },
|
{ name = 'metadata', type = 'string', is_nullable = true },
|
||||||
{ name = 'assets', type = 'array' },
|
{ name = 'assets', type = 'array' },
|
||||||
{ name = 'inputs', type = 'array' },
|
{ name = 'inputs', type = 'array' },
|
||||||
{ name = 'scripts', type = 'map', is_nullable = true }
|
{ name = 'scripts', type = 'map', is_nullable = true }
|
||||||
})
|
})
|
||||||
transactions:create_index('id', { parts = {{ field = 'id', type = 'string' }}})
|
transactions:create_index('id', {
|
||||||
transactions:create_index('transactions_by_asset', { parts = {
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
transactions:create_index('transactions_by_asset', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {
|
||||||
{ field = 'assets[*].id', type = 'string', is_nullable = true },
|
{ field = 'assets[*].id', type = 'string', is_nullable = true },
|
||||||
{ field = 'assets[*].data', type = 'string', is_nullable = true }
|
{ field = 'assets[*].data', type = 'string', is_nullable = true }
|
||||||
}})
|
}})
|
||||||
transactions:create_index('spending_transaction_by_id_and_output_index', { parts = {
|
transactions:create_index('spending_transaction_by_id_and_output_index', {
|
||||||
{ field = 'inputs[*].fulfills["transaction_id"]', type = 'string' },
|
if_not_exists = true,
|
||||||
{ field = 'inputs[*].fulfills["output_index"]', type = 'unsigned' }
|
parts = {
|
||||||
|
{ field = 'inputs[*].fulfills["transaction_id"]', type = 'string', is_nullable = true },
|
||||||
|
{ field = 'inputs[*].fulfills["output_index"]', type = 'unsigned', is_nullable = true }
|
||||||
}})
|
}})
|
||||||
|
|
||||||
|
|
||||||
@ -44,9 +57,20 @@ function init()
|
|||||||
{ name = 'output_index', type = 'number' },
|
{ name = 'output_index', type = 'number' },
|
||||||
{ name = 'transaction_id' , type = 'string' }
|
{ name = 'transaction_id' , type = 'string' }
|
||||||
})
|
})
|
||||||
outputs:create_index('id', { parts = {{ field = 'id', type = 'string' }}})
|
outputs:create_index('id', {
|
||||||
outputs:create_index('transaction_id', { unique = false, parts = {{ field = 'id', type = 'string' }}})
|
if_not_exists = true,
|
||||||
outputs:create_index('public_keys', { unique = false, parts = {{field = 'public_keys[*]', type = 'string' }}})
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
outputs:create_index('transaction_id', {
|
||||||
|
if_not_exists = true,
|
||||||
|
unique = false,
|
||||||
|
parts = {{ field = 'transaction_id', type = 'string' }}
|
||||||
|
})
|
||||||
|
outputs:create_index('public_keys', {
|
||||||
|
if_not_exists = true,
|
||||||
|
unique = false,
|
||||||
|
parts = {{field = 'public_keys[*]', type = 'string' }}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
-- Precommits
|
-- Precommits
|
||||||
@ -56,8 +80,14 @@ function init()
|
|||||||
{ name = 'height', type = 'unsigned' },
|
{ name = 'height', type = 'unsigned' },
|
||||||
{ name = 'transaction_ids', type = 'array'}
|
{ name = 'transaction_ids', type = 'array'}
|
||||||
})
|
})
|
||||||
pre_commits:create_index('id', { parts = {{ field = 'id', type = 'string' }}})
|
pre_commits:create_index('id', {
|
||||||
pre_commits:create_index('height', { parts = {{ field = 'height', type = 'unsigned' }}})
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
pre_commits:create_index('height', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'height', type = 'unsigned' }}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
-- Blocks
|
-- Blocks
|
||||||
@ -68,9 +98,18 @@ function init()
|
|||||||
{ name = 'height', type = 'unsigned' },
|
{ name = 'height', type = 'unsigned' },
|
||||||
{ name = 'transaction_ids', type = 'array' }
|
{ name = 'transaction_ids', type = 'array' }
|
||||||
})
|
})
|
||||||
blocks:create_index('id', { parts = {{ field = 'id', type = 'string' }}})
|
blocks:create_index('id', {
|
||||||
blocks:create_index('height', { parts = {{ field = 'height', type = 'unsigned' }}})
|
if_not_exists = true,
|
||||||
blocks:create_index('block_by_transaction_id', { parts = {{ field = 'transaction_ids[*]', type = 'string' }}})
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
blocks:create_index('height', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'height', type = 'unsigned' }}
|
||||||
|
})
|
||||||
|
blocks:create_index('block_by_transaction_id', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'transaction_ids[*]', type = 'string' }}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
-- UTXO
|
-- UTXO
|
||||||
@ -81,8 +120,13 @@ function init()
|
|||||||
{ name = 'output_index', type = 'unsigned' },
|
{ name = 'output_index', type = 'unsigned' },
|
||||||
{ name = 'utxo', type = 'map' }
|
{ name = 'utxo', type = 'map' }
|
||||||
})
|
})
|
||||||
utxos:create_index('id', { parts = {{ field = 'id', type = 'string' }}})
|
utxos:create_index('id', {
|
||||||
utxos:create_index('utxo_by_transaction_id_and_output_index', { parts = {
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
utxos:create_index('utxo_by_transaction_id_and_output_index', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {
|
||||||
{ field = 'transaction_id', type = 'string' },
|
{ field = 'transaction_id', type = 'string' },
|
||||||
{ field = 'output_index', type = 'unsigned' }
|
{ field = 'output_index', type = 'unsigned' }
|
||||||
}})
|
}})
|
||||||
@ -95,8 +139,14 @@ function init()
|
|||||||
{ name = 'height', type = 'unsigned' },
|
{ name = 'height', type = 'unsigned' },
|
||||||
{ name = 'is_concluded', type = 'boolean' }
|
{ name = 'is_concluded', type = 'boolean' }
|
||||||
})
|
})
|
||||||
elections:create_index('id', { parts = {{ field = 'id', type = 'string' }}})
|
elections:create_index('id', {
|
||||||
elections:create_index('height', { parts = {{ field = 'height', type = 'unsigned' }}})
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
elections:create_index('height', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'height', type = 'unsigned' }}
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
-- Validators
|
-- Validators
|
||||||
@ -106,10 +156,18 @@ function init()
|
|||||||
{ name = 'height', type = 'unsigned' },
|
{ name = 'height', type = 'unsigned' },
|
||||||
{ name = 'set', type = 'array' }
|
{ name = 'set', type = 'array' }
|
||||||
})
|
})
|
||||||
validator_sets:create_index('id', { parts = {{ field = 'id', type = 'string' }}})
|
validator_sets:create_index('id', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'id', type = 'string' }}
|
||||||
|
})
|
||||||
|
validator_sets:create_index('height', {
|
||||||
|
if_not_exists = true,
|
||||||
|
parts = {{ field = 'height', type = 'unsigned' }}
|
||||||
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
function drop()
|
function drop()
|
||||||
|
if pcall(function()
|
||||||
box.space.abci_chains:drop()
|
box.space.abci_chains:drop()
|
||||||
box.space.blocks:drop()
|
box.space.blocks:drop()
|
||||||
box.space.elections:drop()
|
box.space.elections:drop()
|
||||||
@ -118,6 +176,9 @@ function drop()
|
|||||||
box.space.validator_sets:drop()
|
box.space.validator_sets:drop()
|
||||||
box.space.transactions:drop()
|
box.space.transactions:drop()
|
||||||
box.space.outputs:drop()
|
box.space.outputs:drop()
|
||||||
|
end) then
|
||||||
|
print("Error: specified space not found")
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
function indexed_pattern_search(space_name, field_no, pattern)
|
function indexed_pattern_search(space_name, field_no, pattern)
|
||||||
|
|||||||
@ -8,6 +8,7 @@ import json
|
|||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
from typing import Union
|
||||||
from tarantool.error import DatabaseError
|
from tarantool.error import DatabaseError
|
||||||
from planetmint.backend import query
|
from planetmint.backend import query
|
||||||
from planetmint.backend.models.keys import Keys
|
from planetmint.backend.models.keys import Keys
|
||||||
@ -26,28 +27,32 @@ register_query = module_dispatch_registrar(query)
|
|||||||
def _group_transaction_by_ids(connection, txids: list):
|
def _group_transaction_by_ids(connection, txids: list):
|
||||||
_transactions = []
|
_transactions = []
|
||||||
for txid in txids:
|
for txid in txids:
|
||||||
_txobject = connection.run(connection.space(TARANT_TABLE_TRANSACTION).get(txid, index=TARANT_ID_SEARCH))
|
_txobject = connection.run(connection.space(TARANT_TABLE_TRANSACTION).select(txid, index=TARANT_ID_SEARCH))
|
||||||
|
|
||||||
if _txobject is None:
|
if _txobject is None:
|
||||||
continue
|
_txobject = _txobject[0]
|
||||||
|
|
||||||
_txinputs = get_inputs_by_tx_id(connection, txid)
|
# _txinputs = get_inputs_by_tx_id(connection, txid)
|
||||||
_txoutputs = get_outputs_by_tx_id(connection, txid)
|
_txoutputs = get_outputs_by_tx_id(connection, txid)
|
||||||
_txkeys = get_keys_by_tx_id(connection, txid)
|
# _txkeys = get_keys_by_tx_id(connection, txid)
|
||||||
_txassets = get_assets(connection, [txid])
|
# _txassets = get_assets(connection, [txid])
|
||||||
_txmeta = get_metadata_by_tx_id(connection, txid)
|
# _txmeta = get_metadata_by_tx_id(connection, txid)
|
||||||
_txscript = get_script_by_tx_id(connection, txid)
|
# _txscript = get_script_by_tx_id(connection, txid)
|
||||||
|
|
||||||
|
print('TX_OBJECT', _txobject)
|
||||||
|
|
||||||
_transaction = get_transaction(connection, txid)
|
_transaction = get_transaction(connection, txid)
|
||||||
_transaction[TARANT_TABLE_TRANSACTION] = [tx.to_dict for tx in _transactions]
|
|
||||||
_transaction[TARANT_TABLE_INPUT] + [input.to_input_dict() for input in _txinputs]
|
|
||||||
_transaction[TARANT_TABLE_OUTPUT] = [output.to_output_dict() for output in _txoutputs]
|
|
||||||
_transaction[TARANT_TABLE_KEYS] = [key.to_dict() for key in _txkeys]
|
|
||||||
_transaction["assets"] = [asset.data for asset in _txassets]
|
|
||||||
_transaction["metadata"] = _txmeta.metadata
|
|
||||||
|
|
||||||
if _txscript.script:
|
print('Transaction', _transaction)
|
||||||
_transaction[TARANT_TABLE_SCRIPT] = _txscript.script
|
# _transaction[TARANT_TABLE_TRANSACTION] = [tx.to_dict for tx in _transactions]
|
||||||
|
# _transaction[TARANT_TABLE_INPUT] + [input.to_input_dict() for input in _txinputs]
|
||||||
|
# _transaction[TARANT_TABLE_OUTPUT] = [output.to_output_dict() for output in _txoutputs]
|
||||||
|
# _transaction[TARANT_TABLE_KEYS] = [key.to_dict() for key in _txkeys]
|
||||||
|
# _transaction["assets"] = [asset.data for asset in _txassets]
|
||||||
|
# _transaction["metadata"] = _txmeta.metadata
|
||||||
|
|
||||||
|
# if _txscript.script:
|
||||||
|
# _transaction[TARANT_TABLE_SCRIPT] = _txscript.script
|
||||||
_transactions.append(_transaction)
|
_transactions.append(_transaction)
|
||||||
return _transactions
|
return _transactions
|
||||||
|
|
||||||
@ -141,43 +146,49 @@ def store_transaction_keys(connection, keys: Keys, output_id: str, index: int):
|
|||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def store_transactions(connection, signed_transactions: list):
|
def store_transactions(connection, signed_transactions: list):
|
||||||
for transaction in signed_transactions:
|
for transaction in signed_transactions:
|
||||||
|
store_transaction(connection, transaction)
|
||||||
|
|
||||||
[store_transaction_inputs(connection, Input.from_dict(input, transaction["id"]), index) for
|
# [store_transaction_inputs(connection, Input.from_dict(input, transaction["id"]), index) for
|
||||||
index, input in enumerate(transaction[TARANT_TABLE_INPUT])]
|
# index, input in enumerate(transaction[TARANT_TABLE_INPUT])]
|
||||||
|
|
||||||
[store_transaction_outputs_and_keys(connection, Output.outputs_and_keys_dict(output, transaction["id"]), index)
|
# [store_transaction_outputs_and_keys(connection, Output.outputs_and_keys_dict(output, transaction["id"]), index)
|
||||||
for index, output in
|
# for index, output in
|
||||||
enumerate(transaction[TARANT_TABLE_OUTPUT])]
|
# enumerate(transaction[TARANT_TABLE_OUTPUT])]
|
||||||
|
|
||||||
store_metadatas(connection, [MetaData(transaction["id"], transaction["metadata"])])
|
# store_metadatas(connection, [MetaData(transaction["id"], transaction["metadata"])])
|
||||||
|
|
||||||
assets = []
|
# assets = []
|
||||||
for asset in transaction[TARANT_TABLE_ASSETS]:
|
# for asset in transaction[TARANT_TABLE_ASSETS]:
|
||||||
id = transaction["id"] if "id" not in asset else asset["id"]
|
# id = transaction["id"] if "id" not in asset else asset["id"]
|
||||||
assets.append(Asset(id, transaction["id"], asset))
|
# assets.append(Asset(id, transaction["id"], asset))
|
||||||
store_assets(connection, assets)
|
# store_assets(connection, assets)
|
||||||
|
|
||||||
if TARANT_TABLE_SCRIPT in transaction:
|
# if TARANT_TABLE_SCRIPT in transaction:
|
||||||
connection.run(
|
# connection.run(
|
||||||
connection.space(TARANT_TABLE_SCRIPT).insert((transaction["id"], transaction[TARANT_TABLE_SCRIPT])),
|
# connection.space(TARANT_TABLE_SCRIPT).insert((transaction["id"], transaction[TARANT_TABLE_SCRIPT])),
|
||||||
only_data=False)
|
# only_data=False)
|
||||||
|
|
||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def store_transaction(connection, transaction):
|
def store_transaction(connection, transaction):
|
||||||
tx = Transaction(id=transaction["id"], operation=transaction["operation"], version=transaction["version"])
|
# tx = Transaction(id=transaction["id"], operation=transaction["operation"], version=transaction["version"])
|
||||||
connection.run(connection.space(TARANT_TABLE_TRANSACTION).insert(
|
tx = Transaction.from_dict(transaction)
|
||||||
|
connection.run(connection.space(TARANT_TABLE_TRANSACTION).insert((
|
||||||
tx.id,
|
tx.id,
|
||||||
tx.operation,
|
tx.operation,
|
||||||
tx.version,
|
tx.version,
|
||||||
),
|
tx.metadata,
|
||||||
only_data=False)
|
tx.assets,
|
||||||
|
tx.inputs,
|
||||||
|
tx.scripts
|
||||||
|
)), only_data=False)
|
||||||
|
|
||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def get_transaction(connection, transaction_id: str) -> Transaction:
|
def get_transaction(connection, transaction_id: str) -> Union[Transaction, None]:
|
||||||
return Transaction.from_tuple(
|
tx = connection.run(connection.space(TARANT_TABLE_TRANSACTION).select(transaction_id, index=TARANT_ID_SEARCH))
|
||||||
connection.run(connection.space(TARANT_TABLE_TRANSACTION).get(transaction_id, index=TARANT_ID_SEARCH)))
|
return Transaction.from_tuple(tx[0]) if tx else None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
@ -260,29 +271,30 @@ def get_spent(connection, fullfil_transaction_id: str, fullfil_output_index: str
|
|||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def get_latest_block(connection): # TODO Here is used DESCENDING OPERATOR
|
def get_latest_block(connection): # TODO Here is used DESCENDING OPERATOR
|
||||||
_all_blocks = connection.run(connection.space("blocks").select())
|
# NOTE:TARANTOOL THROWS ERROR ON ITERATOR 'REQ'
|
||||||
block = {"app_hash": "", "height": 0, TARANT_TABLE_TRANSACTION: []}
|
latest_blocks = connection.run(connection.space("blocks").select())
|
||||||
|
|
||||||
|
|
||||||
|
if not latest_blocks:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# TODO: return Block dataclass instance
|
||||||
|
block = {
|
||||||
|
"app_hash": latest_blocks[0][1],
|
||||||
|
"height": latest_blocks[0][2],
|
||||||
|
TARANT_TABLE_TRANSACTION: latest_blocks[0][3]
|
||||||
|
}
|
||||||
|
|
||||||
if _all_blocks is not None:
|
|
||||||
if len(_all_blocks) > 0:
|
|
||||||
_block = sorted(_all_blocks, key=itemgetter(1), reverse=True)[0]
|
|
||||||
_txids = connection.run(connection.space("blocks_tx").select(_block[2], index="block_search"))
|
|
||||||
block["app_hash"] = _block[0]
|
|
||||||
block["height"] = _block[1]
|
|
||||||
block[TARANT_TABLE_TRANSACTION] = [tx[0] for tx in _txids]
|
|
||||||
else:
|
|
||||||
block = None
|
|
||||||
return block
|
return block
|
||||||
|
|
||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def store_block(connection, block: dict):
|
def store_block(connection, block: dict):
|
||||||
|
|
||||||
block_unique_id = uuid4().hex
|
block_unique_id = uuid4().hex
|
||||||
connection.run(
|
connection.run(
|
||||||
connection.space("blocks").insert((block["app_hash"], block["height"], block_unique_id)), only_data=False
|
connection.space("blocks").insert((block_unique_id, block["app_hash"], block["height"], block[TARANT_TABLE_TRANSACTION])), only_data=False
|
||||||
)
|
)
|
||||||
for txid in block[TARANT_TABLE_TRANSACTION]:
|
|
||||||
connection.run(connection.space("blocks_tx").insert((txid, block_unique_id)), only_data=False)
|
|
||||||
|
|
||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
@ -469,10 +481,10 @@ def get_pre_commit_state(connection):
|
|||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def store_validator_set(conn, validators_update: dict):
|
def store_validator_set(conn, validators_update: dict):
|
||||||
_validator = conn.run(conn.space("validators").select(validators_update["height"], index="height_search", limit=1))
|
_validator = conn.run(conn.space("validator_sets").select(validators_update["height"], index="height", limit=1))
|
||||||
unique_id = uuid4().hex if _validator is None or len(_validator) == 0 else _validator[0][0]
|
unique_id = uuid4().hex if _validator is None or len(_validator) == 0 else _validator[0][0]
|
||||||
conn.run(
|
conn.run(
|
||||||
conn.space("validators").upsert(
|
conn.space("validator_sets").upsert(
|
||||||
(unique_id, validators_update["height"], validators_update["validators"]),
|
(unique_id, validators_update["height"], validators_update["validators"]),
|
||||||
op_list=[("=", 1, validators_update["height"]), ("=", 2, validators_update["validators"])],
|
op_list=[("=", 1, validators_update["height"]), ("=", 2, validators_update["validators"])],
|
||||||
limit=1,
|
limit=1,
|
||||||
@ -511,14 +523,14 @@ def store_elections(connection, elections: list):
|
|||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def delete_elections(connection, height: int):
|
def delete_elections(connection, height: int):
|
||||||
_elections = connection.run(connection.space("elections").select(height, index="height_search"))
|
_elections = connection.run(connection.space("elections").select(height, index="height"))
|
||||||
for _elec in _elections:
|
for _elec in _elections:
|
||||||
connection.run(connection.space("elections").delete(_elec[0]), only_data=False)
|
connection.run(connection.space("elections").delete(_elec[0]), only_data=False)
|
||||||
|
|
||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def get_validator_set(connection, height: int = None):
|
def get_validator_set(connection, height: int = None):
|
||||||
_validators = connection.run(connection.space("validators").select())
|
_validators = connection.run(connection.space("validator_sets").select())
|
||||||
if height is not None and _validators is not None:
|
if height is not None and _validators is not None:
|
||||||
_validators = [
|
_validators = [
|
||||||
{"height": validator[1], "validators": validator[2]} for validator in _validators if validator[1] <= height
|
{"height": validator[1], "validators": validator[2]} for validator in _validators if validator[1] <= height
|
||||||
@ -554,11 +566,10 @@ def get_asset_tokens_for_public_key(
|
|||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = True):
|
def store_abci_chain(connection, height: int, chain_id: str, is_synced: bool = True):
|
||||||
hash_id_primarykey = sha256(json.dumps(obj={"height": height}).encode()).hexdigest()
|
|
||||||
connection.run(
|
connection.run(
|
||||||
connection.space("abci_chains").upsert(
|
connection.space("abci_chains").upsert(
|
||||||
(height, is_synced, chain_id, hash_id_primarykey),
|
(chain_id, height, is_synced),
|
||||||
op_list=[("=", 0, height), ("=", 1, is_synced), ("=", 2, chain_id)],
|
op_list=[("=", 0, chain_id), ("=", 1, height), ("=", 0, is_synced)],
|
||||||
),
|
),
|
||||||
only_data=False,
|
only_data=False,
|
||||||
)
|
)
|
||||||
@ -576,7 +587,7 @@ def get_latest_abci_chain(connection):
|
|||||||
if _all_chains is None or len(_all_chains) == 0:
|
if _all_chains is None or len(_all_chains) == 0:
|
||||||
return None
|
return None
|
||||||
_chain = sorted(_all_chains, key=itemgetter(0), reverse=True)[0]
|
_chain = sorted(_all_chains, key=itemgetter(0), reverse=True)[0]
|
||||||
return {"height": _chain[0], "is_synced": _chain[1], "chain_id": _chain[2]}
|
return {"chain_id": _chain[0], "height": _chain[1], "is_synced": _chain[2]}
|
||||||
|
|
||||||
|
|
||||||
@register_query(TarantoolDBConnection)
|
@register_query(TarantoolDBConnection)
|
||||||
|
|||||||
@ -144,17 +144,15 @@ SCHEMA_DROP_COMMANDS = {
|
|||||||
"scripts": "box.space.scripts:drop()",
|
"scripts": "box.space.scripts:drop()",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@register_schema(TarantoolDBConnection)
|
||||||
|
def init_database(connection, db_name=None):
|
||||||
|
print('init database tarantool schema')
|
||||||
|
connection.connect().call('init')
|
||||||
|
|
||||||
@register_schema(TarantoolDBConnection)
|
@register_schema(TarantoolDBConnection)
|
||||||
def drop_database(connection, not_used=None):
|
def drop_database(connection, db_name=None):
|
||||||
for _space in SPACE_NAMES:
|
print('drop database tarantool schema')
|
||||||
try:
|
connection.connect().call('drop')
|
||||||
cmd = SCHEMA_DROP_COMMANDS[_space].encode()
|
|
||||||
run_command_with_output(command=cmd)
|
|
||||||
print(f"Space '{_space}' was dropped succesfuly.")
|
|
||||||
except Exception:
|
|
||||||
print(f"Unexpected error while trying to drop space '{_space}'")
|
|
||||||
|
|
||||||
|
|
||||||
@register_schema(TarantoolDBConnection)
|
@register_schema(TarantoolDBConnection)
|
||||||
def create_database(connection, dbname):
|
def create_database(connection, dbname):
|
||||||
@ -182,31 +180,4 @@ def run_command_with_output(command):
|
|||||||
|
|
||||||
@register_schema(TarantoolDBConnection)
|
@register_schema(TarantoolDBConnection)
|
||||||
def create_tables(connection, dbname):
|
def create_tables(connection, dbname):
|
||||||
for _space in SPACE_NAMES:
|
connection.connect().call('init')
|
||||||
try:
|
|
||||||
cmd = SPACE_COMMANDS[_space].encode()
|
|
||||||
run_command_with_output(command=cmd)
|
|
||||||
print(f"Space '{_space}' created.")
|
|
||||||
except Exception as err:
|
|
||||||
print(f"Unexpected error while trying to create '{_space}': {err}")
|
|
||||||
create_schema(space_name=_space)
|
|
||||||
create_indexes(space_name=_space)
|
|
||||||
|
|
||||||
|
|
||||||
def create_indexes(space_name):
|
|
||||||
indexes = INDEX_COMMANDS[space_name]
|
|
||||||
for index_name, index_cmd in indexes.items():
|
|
||||||
try:
|
|
||||||
run_command_with_output(command=index_cmd.encode())
|
|
||||||
print(f"Index '{index_name}' created succesfully.")
|
|
||||||
except Exception as err:
|
|
||||||
print(f"Unexpected error while trying to create index '{index_name}': '{err}'")
|
|
||||||
|
|
||||||
|
|
||||||
def create_schema(space_name):
|
|
||||||
try:
|
|
||||||
cmd = SCHEMA_COMMANDS[space_name].encode()
|
|
||||||
run_command_with_output(command=cmd)
|
|
||||||
print(f"Schema created for {space_name} succesfully.")
|
|
||||||
except Exception as unexpected_error:
|
|
||||||
print(f"Got unexpected error when creating index for '{space_name}' Space.\n {unexpected_error}")
|
|
||||||
|
|||||||
@ -240,7 +240,7 @@ def run_election_show(args, planet):
|
|||||||
|
|
||||||
def _run_init():
|
def _run_init():
|
||||||
bdb = planetmint.Planetmint()
|
bdb = planetmint.Planetmint()
|
||||||
schema.init_database(connection=bdb.connection)
|
schema.init_database(bdb.connection)
|
||||||
|
|
||||||
|
|
||||||
@configure_planetmint
|
@configure_planetmint
|
||||||
|
|||||||
@ -203,6 +203,8 @@ class App(BaseApplication):
|
|||||||
block_txn_hash = calculate_hash(self.block_txn_ids)
|
block_txn_hash = calculate_hash(self.block_txn_ids)
|
||||||
block = self.planetmint_node.get_latest_block()
|
block = self.planetmint_node.get_latest_block()
|
||||||
|
|
||||||
|
logger.debug('BLOCK: ', block)
|
||||||
|
|
||||||
if self.block_txn_ids:
|
if self.block_txn_ids:
|
||||||
self.block_txn_hash = calculate_hash([block["app_hash"], block_txn_hash])
|
self.block_txn_hash = calculate_hash([block["app_hash"], block_txn_hash])
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -1,12 +0,0 @@
|
|||||||
[[source]]
|
|
||||||
url = "https://pypi.python.org/simple"
|
|
||||||
verify_ssl = true
|
|
||||||
name = "pypi"
|
|
||||||
|
|
||||||
[packages]
|
|
||||||
pytest = "*"
|
|
||||||
|
|
||||||
[dev-packages]
|
|
||||||
|
|
||||||
[requires]
|
|
||||||
python_version = "3.8"
|
|
||||||
78
tests/backend/tarantool/Pipfile.lock
generated
78
tests/backend/tarantool/Pipfile.lock
generated
@ -1,78 +0,0 @@
|
|||||||
{
|
|
||||||
"_meta": {
|
|
||||||
"hash": {
|
|
||||||
"sha256": "97a0be44f6d5351e166a90d91c789c8100486c7cc30d922ef7f7e3541838acae"
|
|
||||||
},
|
|
||||||
"pipfile-spec": 6,
|
|
||||||
"requires": {
|
|
||||||
"python_version": "3.8"
|
|
||||||
},
|
|
||||||
"sources": [
|
|
||||||
{
|
|
||||||
"name": "pypi",
|
|
||||||
"url": "https://pypi.python.org/simple",
|
|
||||||
"verify_ssl": true
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"default": {
|
|
||||||
"attrs": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4",
|
|
||||||
"sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"
|
|
||||||
],
|
|
||||||
"version": "==21.4.0"
|
|
||||||
},
|
|
||||||
"iniconfig": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3",
|
|
||||||
"sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"
|
|
||||||
],
|
|
||||||
"version": "==1.1.1"
|
|
||||||
},
|
|
||||||
"packaging": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb",
|
|
||||||
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"
|
|
||||||
],
|
|
||||||
"version": "==21.3"
|
|
||||||
},
|
|
||||||
"pluggy": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159",
|
|
||||||
"sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"
|
|
||||||
],
|
|
||||||
"version": "==1.0.0"
|
|
||||||
},
|
|
||||||
"py": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719",
|
|
||||||
"sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"
|
|
||||||
],
|
|
||||||
"version": "==1.11.0"
|
|
||||||
},
|
|
||||||
"pyparsing": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea",
|
|
||||||
"sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"
|
|
||||||
],
|
|
||||||
"version": "==3.0.7"
|
|
||||||
},
|
|
||||||
"pytest": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db",
|
|
||||||
"sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"
|
|
||||||
],
|
|
||||||
"index": "pypi",
|
|
||||||
"version": "==7.0.1"
|
|
||||||
},
|
|
||||||
"tomli": {
|
|
||||||
"hashes": [
|
|
||||||
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc",
|
|
||||||
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"
|
|
||||||
],
|
|
||||||
"version": "==2.0.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"develop": {}
|
|
||||||
}
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from planetmint.backend.connection import Connection
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# @pytest.fixture
|
|
||||||
# def dummy_db(request):
|
|
||||||
# from planetmint.backend import Connection
|
|
||||||
#
|
|
||||||
# conn = Connection()
|
|
||||||
# dbname = request.fixturename
|
|
||||||
# xdist_suffix = getattr(request.config, 'slaveinput', {}).get('slaveid')
|
|
||||||
# if xdist_suffix:
|
|
||||||
# dbname = '{}_{}'.format(dbname, xdist_suffix)
|
|
||||||
#
|
|
||||||
# conn.drop_database()
|
|
||||||
# #_drop_db(conn, dbname) # make sure we start with a clean DB
|
|
||||||
# #schema.init_database(conn, dbname)
|
|
||||||
# conn.init_database()
|
|
||||||
# yield dbname
|
|
||||||
#
|
|
||||||
# conn.drop_database()
|
|
||||||
# #_drop_db(conn, dbname)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def db_conn():
|
|
||||||
conn = Connection()
|
|
||||||
return conn
|
|
||||||
@ -5,7 +5,6 @@
|
|||||||
|
|
||||||
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
from planetmint.backend.tarantool.connection import TarantoolDBConnection
|
||||||
|
|
||||||
|
|
||||||
def _check_spaces_by_list(conn, space_names):
|
def _check_spaces_by_list(conn, space_names):
|
||||||
_exists = []
|
_exists = []
|
||||||
for name in space_names:
|
for name in space_names:
|
||||||
@ -25,5 +24,6 @@ def test_create_tables(db_conn):
|
|||||||
|
|
||||||
def test_drop(db_conn): # remove dummy_db as argument
|
def test_drop(db_conn): # remove dummy_db as argument
|
||||||
db_conn.drop_database()
|
db_conn.drop_database()
|
||||||
|
db_conn.close()
|
||||||
actual_spaces = _check_spaces_by_list(conn=db_conn, space_names=db_conn.SPACE_NAMES)
|
actual_spaces = _check_spaces_by_list(conn=db_conn, space_names=db_conn.SPACE_NAMES)
|
||||||
assert [] == actual_spaces
|
assert [] == actual_spaces
|
||||||
|
|||||||
@ -120,7 +120,7 @@ def _setup_database(_configure_planetmint): # TODO Here is located setup databa
|
|||||||
dbname = Config().get()["database"]["name"]
|
dbname = Config().get()["database"]["name"]
|
||||||
conn = Connection()
|
conn = Connection()
|
||||||
|
|
||||||
_drop_db(conn, dbname)
|
schema.drop_database(conn, dbname)
|
||||||
schema.init_database(conn, dbname)
|
schema.init_database(conn, dbname)
|
||||||
print("Finishing init database")
|
print("Finishing init database")
|
||||||
|
|
||||||
@ -128,7 +128,7 @@ def _setup_database(_configure_planetmint): # TODO Here is located setup databa
|
|||||||
|
|
||||||
print("Deleting `{}` database".format(dbname))
|
print("Deleting `{}` database".format(dbname))
|
||||||
conn = Connection()
|
conn = Connection()
|
||||||
_drop_db(conn, dbname)
|
schema.drop_database(conn, dbname)
|
||||||
|
|
||||||
print("Finished deleting `{}`".format(dbname))
|
print("Finished deleting `{}`".format(dbname))
|
||||||
|
|
||||||
@ -339,32 +339,6 @@ def inputs(user_pk, b, alice):
|
|||||||
b.store_bulk_transactions(transactions)
|
b.store_bulk_transactions(transactions)
|
||||||
|
|
||||||
|
|
||||||
# @pytest.fixture
|
|
||||||
# def dummy_db(request):
|
|
||||||
# from planetmint.backend import Connection
|
|
||||||
#
|
|
||||||
# conn = Connection()
|
|
||||||
# dbname = request.fixturename
|
|
||||||
# xdist_suffix = getattr(request.config, 'slaveinput', {}).get('slaveid')
|
|
||||||
# if xdist_suffix:
|
|
||||||
# dbname = '{}_{}'.format(dbname, xdist_suffix)
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# _drop_db(conn, dbname) # make sure we start with a clean DB
|
|
||||||
# schema.init_database(conn, dbname)
|
|
||||||
# yield dbname
|
|
||||||
#
|
|
||||||
# _drop_db(conn, dbname)
|
|
||||||
|
|
||||||
|
|
||||||
def _drop_db(conn, dbname):
|
|
||||||
print(f"CONNECTION FOR DROPPING {conn}")
|
|
||||||
try:
|
|
||||||
schema.drop_database(conn, dbname)
|
|
||||||
except DatabaseDoesNotExist:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def db_config():
|
def db_config():
|
||||||
return Config().get()["database"]
|
return Config().get()["database"]
|
||||||
|
|||||||
@ -32,25 +32,7 @@ def flush_localmongo_db(connection, dbname):
|
|||||||
|
|
||||||
@flush_db.register(TarantoolDBConnection)
|
@flush_db.register(TarantoolDBConnection)
|
||||||
def flush_tarantool_db(connection, dbname):
|
def flush_tarantool_db(connection, dbname):
|
||||||
for s in SPACE_NAMES:
|
connection.connect().call('drop')
|
||||||
_all_data = connection.run(connection.space(s).select([]))
|
|
||||||
if _all_data is None:
|
|
||||||
continue
|
|
||||||
for _id in _all_data:
|
|
||||||
if "assets" == s:
|
|
||||||
connection.run(connection.space(s).delete(_id[1]), only_data=False)
|
|
||||||
elif s == "blocks":
|
|
||||||
connection.run(connection.space(s).delete(_id[2]), only_data=False)
|
|
||||||
elif s == "inputs":
|
|
||||||
connection.run(connection.space(s).delete(_id[-2]), only_data=False)
|
|
||||||
elif s == "outputs":
|
|
||||||
connection.run(connection.space(s).delete(_id[-4]), only_data=False)
|
|
||||||
elif s == "utxos":
|
|
||||||
connection.run(connection.space(s).delete([_id[0], _id[1]]), only_data=False)
|
|
||||||
elif s == "abci_chains":
|
|
||||||
connection.run(connection.space(s).delete(_id[-1]), only_data=False)
|
|
||||||
else:
|
|
||||||
connection.run(connection.space(s).delete(_id[0]), only_data=False)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_block(planet):
|
def generate_block(planet):
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user