mirror of
https://github.com/bigchaindb/bigchaindb.git
synced 2024-10-13 13:34:05 +00:00
Memoize data
This commit is contained in:
parent
3092beb995
commit
c3d0bca921
@ -56,6 +56,7 @@ def create_transactions_secondary_index(conn, dbname):
|
|||||||
|
|
||||||
# to query the transactions for a transaction id, this field is unique
|
# to query the transactions for a transaction id, this field is unique
|
||||||
conn.conn[dbname]['transactions'].create_index('id',
|
conn.conn[dbname]['transactions'].create_index('id',
|
||||||
|
unique=True,
|
||||||
name='transaction_id')
|
name='transaction_id')
|
||||||
|
|
||||||
# secondary index for asset uuid, this field is unique
|
# secondary index for asset uuid, this field is unique
|
||||||
@ -90,7 +91,7 @@ def create_assets_secondary_index(conn, dbname):
|
|||||||
|
|
||||||
def create_blocks_secondary_index(conn, dbname):
|
def create_blocks_secondary_index(conn, dbname):
|
||||||
conn.conn[dbname]['blocks']\
|
conn.conn[dbname]['blocks']\
|
||||||
.create_index([('height', DESCENDING)], name='height')
|
.create_index([('height', DESCENDING)], name='height', unique=True)
|
||||||
|
|
||||||
|
|
||||||
def create_metadata_secondary_index(conn, dbname):
|
def create_metadata_secondary_index(conn, dbname):
|
||||||
|
|||||||
@ -9,6 +9,8 @@ Attributes:
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
import functools
|
||||||
|
import ujson
|
||||||
|
|
||||||
import base58
|
import base58
|
||||||
from cryptoconditions import Fulfillment, ThresholdSha256, Ed25519Sha256
|
from cryptoconditions import Fulfillment, ThresholdSha256, Ed25519Sha256
|
||||||
@ -38,6 +40,51 @@ UnspentOutput = namedtuple(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def memoize(func):
|
||||||
|
cache = func.cache = {}
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
def memoized_func(*args, **kwargs):
|
||||||
|
key = args[1]['id']
|
||||||
|
if key not in cache:
|
||||||
|
cache[key] = func(*args, **kwargs)
|
||||||
|
|
||||||
|
return cache[key]
|
||||||
|
|
||||||
|
return memoized_func
|
||||||
|
|
||||||
|
|
||||||
|
def memoize_class(func):
|
||||||
|
cache = func.cache = {}
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
def memoized_func(*args, **kwargs):
|
||||||
|
key = args[0].id
|
||||||
|
if key not in cache:
|
||||||
|
cache[key] = func(*args, **kwargs)
|
||||||
|
|
||||||
|
return cache[key]
|
||||||
|
|
||||||
|
return memoized_func
|
||||||
|
|
||||||
|
|
||||||
|
def memoize_input_valid(func):
|
||||||
|
cache = func.cache = {}
|
||||||
|
|
||||||
|
@functools.wraps(func)
|
||||||
|
def memoized_func(*args, **kwargs):
|
||||||
|
inp_fulfillment = args[1].fulfillment
|
||||||
|
op = args[2]
|
||||||
|
msg = args[3]
|
||||||
|
key = '{}.{}.{}'.format(inp_fulfillment, op, msg)
|
||||||
|
if key not in cache:
|
||||||
|
cache[key] = func(*args, **kwargs)
|
||||||
|
|
||||||
|
return cache[key]
|
||||||
|
|
||||||
|
return memoized_func
|
||||||
|
|
||||||
|
|
||||||
class Input(object):
|
class Input(object):
|
||||||
"""A Input is used to spend assets locked by an Output.
|
"""A Input is used to spend assets locked by an Output.
|
||||||
|
|
||||||
@ -496,7 +543,7 @@ class Transaction(object):
|
|||||||
VERSION = '2.0'
|
VERSION = '2.0'
|
||||||
|
|
||||||
def __init__(self, operation, asset, inputs=None, outputs=None,
|
def __init__(self, operation, asset, inputs=None, outputs=None,
|
||||||
metadata=None, version=None, hash_id=None):
|
metadata=None, version=None, hash_id=None, tx_dict=None):
|
||||||
"""The constructor allows to create a customizable Transaction.
|
"""The constructor allows to create a customizable Transaction.
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
@ -549,6 +596,7 @@ class Transaction(object):
|
|||||||
self.outputs = outputs or []
|
self.outputs = outputs or []
|
||||||
self.metadata = metadata
|
self.metadata = metadata
|
||||||
self._id = hash_id
|
self._id = hash_id
|
||||||
|
self.tx_dict = tx_dict
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def unspent_outputs(self):
|
def unspent_outputs(self):
|
||||||
@ -986,7 +1034,7 @@ class Transaction(object):
|
|||||||
raise ValueError('Inputs and '
|
raise ValueError('Inputs and '
|
||||||
'output_condition_uris must have the same count')
|
'output_condition_uris must have the same count')
|
||||||
|
|
||||||
tx_dict = self.to_dict()
|
tx_dict = self.tx_dict # self.to_dict()
|
||||||
tx_dict = Transaction._remove_signatures(tx_dict)
|
tx_dict = Transaction._remove_signatures(tx_dict)
|
||||||
tx_dict['id'] = None
|
tx_dict['id'] = None
|
||||||
tx_serialized = Transaction._to_str(tx_dict)
|
tx_serialized = Transaction._to_str(tx_dict)
|
||||||
@ -999,6 +1047,7 @@ class Transaction(object):
|
|||||||
return all(validate(i, cond)
|
return all(validate(i, cond)
|
||||||
for i, cond in enumerate(output_condition_uris))
|
for i, cond in enumerate(output_condition_uris))
|
||||||
|
|
||||||
|
@memoize_input_valid
|
||||||
def _input_valid(self, input_, operation, message, output_condition_uri=None):
|
def _input_valid(self, input_, operation, message, output_condition_uri=None):
|
||||||
"""Validates a single Input against a single Output.
|
"""Validates a single Input against a single Output.
|
||||||
|
|
||||||
@ -1044,6 +1093,7 @@ class Transaction(object):
|
|||||||
ffill_valid = parsed_ffill.validate(message=message.digest())
|
ffill_valid = parsed_ffill.validate(message=message.digest())
|
||||||
return output_valid and ffill_valid
|
return output_valid and ffill_valid
|
||||||
|
|
||||||
|
@memoize_class
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Transforms the object to a Python dictionary.
|
"""Transforms the object to a Python dictionary.
|
||||||
|
|
||||||
@ -1092,7 +1142,8 @@ class Transaction(object):
|
|||||||
return self._id
|
return self._id
|
||||||
|
|
||||||
def to_hash(self):
|
def to_hash(self):
|
||||||
return self.to_dict()['id']
|
return self.id
|
||||||
|
# return self.to_dict()['id']
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _to_str(value):
|
def _to_str(value):
|
||||||
@ -1146,7 +1197,9 @@ class Transaction(object):
|
|||||||
tx_body (dict): The Transaction to be transformed.
|
tx_body (dict): The Transaction to be transformed.
|
||||||
"""
|
"""
|
||||||
# NOTE: Remove reference to avoid side effects
|
# NOTE: Remove reference to avoid side effects
|
||||||
tx_body = deepcopy(tx_body)
|
# tx_body = deepcopy(tx_body)
|
||||||
|
# tx_body = rapidjson.loads(rapidjson.dumps(tx_body))
|
||||||
|
tx_body = ujson.loads(ujson.dumps(tx_body))
|
||||||
try:
|
try:
|
||||||
proposed_tx_id = tx_body['id']
|
proposed_tx_id = tx_body['id']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -1163,6 +1216,7 @@ class Transaction(object):
|
|||||||
raise InvalidHash(err_msg.format(proposed_tx_id))
|
raise InvalidHash(err_msg.format(proposed_tx_id))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@memoize
|
||||||
def from_dict(cls, tx, skip_schema_validation=True):
|
def from_dict(cls, tx, skip_schema_validation=True):
|
||||||
"""Transforms a Python dictionary to a Transaction object.
|
"""Transforms a Python dictionary to a Transaction object.
|
||||||
|
|
||||||
@ -1180,7 +1234,7 @@ class Transaction(object):
|
|||||||
inputs = [Input.from_dict(input_) for input_ in tx['inputs']]
|
inputs = [Input.from_dict(input_) for input_ in tx['inputs']]
|
||||||
outputs = [Output.from_dict(output) for output in tx['outputs']]
|
outputs = [Output.from_dict(output) for output in tx['outputs']]
|
||||||
return cls(tx['operation'], tx['asset'], inputs, outputs,
|
return cls(tx['operation'], tx['asset'], inputs, outputs,
|
||||||
tx['metadata'], tx['version'], hash_id=tx['id'])
|
tx['metadata'], tx['version'], hash_id=tx['id'], tx_dict=tx)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_db(cls, bigchain, tx_dict_list):
|
def from_db(cls, bigchain, tx_dict_list):
|
||||||
|
|||||||
@ -139,7 +139,7 @@ class BigchainDB(object):
|
|||||||
txn_metadatas = []
|
txn_metadatas = []
|
||||||
for transaction_obj in transactions:
|
for transaction_obj in transactions:
|
||||||
# self.update_utxoset(transaction)
|
# self.update_utxoset(transaction)
|
||||||
transaction = transaction_obj.to_dict()
|
transaction = transaction_obj.tx_dict
|
||||||
if transaction['operation'] == transaction_obj.CREATE:
|
if transaction['operation'] == transaction_obj.CREATE:
|
||||||
asset = transaction.pop('asset')
|
asset = transaction.pop('asset')
|
||||||
asset['id'] = transaction['id']
|
asset['id'] = transaction['id']
|
||||||
@ -239,6 +239,13 @@ class BigchainDB(object):
|
|||||||
return backend.query.delete_unspent_outputs(
|
return backend.query.delete_unspent_outputs(
|
||||||
self.connection, *unspent_outputs)
|
self.connection, *unspent_outputs)
|
||||||
|
|
||||||
|
def is_commited(self, transaction_id):
|
||||||
|
transaction = backend.query.get_transaction(self.connection, transaction_id)
|
||||||
|
if transaction:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
def get_transaction(self, transaction_id):
|
def get_transaction(self, transaction_id):
|
||||||
transaction = backend.query.get_transaction(self.connection, transaction_id)
|
transaction = backend.query.get_transaction(self.connection, transaction_id)
|
||||||
|
|
||||||
|
|||||||
@ -23,7 +23,7 @@ class Transaction(Transaction):
|
|||||||
|
|
||||||
if self.operation == Transaction.CREATE:
|
if self.operation == Transaction.CREATE:
|
||||||
duplicates = any(txn for txn in current_transactions if txn.id == self.id)
|
duplicates = any(txn for txn in current_transactions if txn.id == self.id)
|
||||||
if bigchain.get_transaction(self.to_dict()['id']) or duplicates:
|
if bigchain.is_commited(self.id) or duplicates:
|
||||||
raise DuplicateTransaction('transaction `{}` already exists'
|
raise DuplicateTransaction('transaction `{}` already exists'
|
||||||
.format(self.id))
|
.format(self.id))
|
||||||
|
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user