From 09376f35fe08cd42b014280f7ea557c20362fd7d Mon Sep 17 00:00:00 2001 From: Merl111 Date: Fri, 5 Apr 2019 15:15:39 +0200 Subject: [PATCH 01/23] feat, wip, first step towards a generic db interface --- neo/Core/Blockchain.py | 776 +++++++++++++++--- neo/Settings.py | 13 + neo/SmartContract/StateMachine.py | 35 +- neo/Storage/Common/CachedScriptTable.py | 24 + neo/Storage/Common/DBPrefix.py | 18 + neo/Storage/Common/DebugStorage.py | 40 + neo/Storage/Implementation/DBFactory.py | 81 ++ .../LevelDB/LevelDBClassMethods.py | 94 +++ .../Implementation/LevelDB/test_leveldb.py | 8 + neo/Storage/Implementation/__init__.py | 0 neo/Storage/Implementation/test2.py | 4 + neo/Storage/Implementation/test_db_factory.py | 15 + neo/Storage/Interface/AbstractDBInterface.py | 25 + neo/Storage/Interface/DBInterface.py | 242 ++++++ neo/Storage/Interface/__init__.py | 0 neo/Storage/Interface/test_db_interface.py | 1 + neo/Storage/__init__.py | 0 neo/bin/api_server.py | 4 +- neo/bin/prompt.py | 4 +- 19 files changed, 1268 insertions(+), 116 deletions(-) create mode 100644 neo/Storage/Common/CachedScriptTable.py create mode 100644 neo/Storage/Common/DBPrefix.py create mode 100644 neo/Storage/Common/DebugStorage.py create mode 100644 neo/Storage/Implementation/DBFactory.py create mode 100644 neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py create mode 100644 neo/Storage/Implementation/LevelDB/test_leveldb.py create mode 100644 neo/Storage/Implementation/__init__.py create mode 100644 neo/Storage/Implementation/test2.py create mode 100644 neo/Storage/Implementation/test_db_factory.py create mode 100644 neo/Storage/Interface/AbstractDBInterface.py create mode 100644 neo/Storage/Interface/DBInterface.py create mode 100644 neo/Storage/Interface/__init__.py create mode 100644 neo/Storage/Interface/test_db_interface.py create mode 100644 neo/Storage/__init__.py diff --git a/neo/Core/Blockchain.py b/neo/Core/Blockchain.py index 2a74ad387..04f6a0180 100644 --- a/neo/Core/Blockchain.py +++ b/neo/Core/Blockchain.py @@ -1,30 +1,52 @@ import pytz +import binascii from itertools import groupby from datetime import datetime from events import Events +from functools import lru_cache + from neo.Core.Block import Block from neo.Core.TX.Transaction import TransactionOutput from neo.Core.AssetType import AssetType from neo.Core.Cryptography.Crypto import Crypto +from neo.Core.Header import Header from neo.Core.TX.RegisterTransaction import RegisterTransaction from neo.Core.TX.MinerTransaction import MinerTransaction from neo.Core.TX.IssueTransaction import IssueTransaction +from neo.Core.TX.Transaction import Transaction, TransactionType from neo.Core.Witness import Witness -from neo.VM.OpCode import PUSHF, PUSHT -from neo.Core.State.SpentCoinState import SpentCoin -from neo.SmartContract.Contract import Contract +from neo.Core.State.UnspentCoinState import UnspentCoinState +from neo.Core.State.AccountState import AccountState +from neo.Core.State.CoinState import CoinState +from neo.Core.State.ContractState import ContractState +from neo.Core.State.StorageItem import StorageItem +from neo.Core.State.SpentCoinState import SpentCoinState, SpentCoinItem, SpentCoin +from neo.Core.State.ValidatorState import ValidatorState +from neo.EventHub import events +from neo.IO.MemoryStream import StreamManager +from neo.logging import log_manager from neo.Settings import settings from collections import Counter from neo.Core.Fixed8 import Fixed8 from neo.Core.Cryptography.ECCurve import ECDSA from neo.Core.UInt256 import UInt256 -from functools import lru_cache +from neo.Core.IO.BinaryWriter import BinaryWriter +from neo.SmartContract.Contract import Contract +from neo.SmartContract.ApplicationEngine import ApplicationEngine +from neo.SmartContract import TriggerType +from neo.Storage.Common.DBPrefix import DBPrefix +from neo.Storage.Common.CachedScriptTable import CachedScriptTable +from neo.Storage.Interface.DBInterface import DBInterface +from neo.VM.OpCode import PUSHF, PUSHT + from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: from neo.Core.State import AssetState +logger = log_manager.getLogger('Blockchain') + class Blockchain: SECONDS_PER_BLOCK = 15 @@ -33,6 +55,8 @@ class Blockchain: GENERATION_AMOUNT = [8, 7, 6, 5, 4, 3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + _db = None + _blockchain = None _validators = [] @@ -45,6 +69,16 @@ class Blockchain: _paused = False + _disposed = False + + _header_index = [] + + _block_cache = {} + + _current_block_height = 0 + + _persisting_block = None + BlockSearchTries = 0 CACHELIM = 4000 @@ -55,6 +89,9 @@ class Blockchain: Notify = Events() + def __init__(self, db): + self._db = db + @staticmethod def StandbyValidators(): if len(Blockchain._validators) < 1: @@ -121,8 +158,12 @@ def GenesisBlock() -> Block: output = TransactionOutput( Blockchain.SystemShare().Hash, Blockchain.SystemShare().Amount, - Crypto.ToScriptHash(Contract.CreateMultiSigRedeemScript(int(len(Blockchain.StandbyValidators()) / 2) + 1, - Blockchain.StandbyValidators())) + Crypto.ToScriptHash( + Contract.CreateMultiSigRedeemScript( + int(len(Blockchain.StandbyValidators()) / 2) + 1, + Blockchain.StandbyValidators() + ) + ) ) it = IssueTransaction([], [output], [], [script]) @@ -138,7 +179,7 @@ def Default() -> 'Blockchain': Get the default registered blockchain instance. Returns: - obj: Currently set to `neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain`. + obj: Blockchain based on the configured database backend. """ if Blockchain._instance is None: Blockchain._instance = Blockchain() @@ -148,32 +189,124 @@ def Default() -> 'Blockchain': @property def CurrentBlockHash(self): - pass + try: + return self._header_index[self._current_block_height] + except Exception as e: + logger.info("Could not get current block hash, returning none: %s ", e) + + return None @property def CurrentHeaderHash(self): - pass + return self._header_index[-1] @property def HeaderHeight(self): - pass + height = len(self._header_index) - 1 + return height @property def Height(self): - pass + return self._current_block_height @property def CurrentBlock(self): - pass + if self._persisting_block: + return self._persisting_block + return self.GetBlockByHeight(self.Height) def AddBlock(self, block): - pass + if not block.Hash.ToBytes() in self._block_cache: + self._block_cache[block.Hash.ToBytes()] = block + + header_len = len(self._header_index) + + if block.Index - 1 >= header_len: + return False + + if block.Index == header_len: + + if self._verify_blocks and not block.Verify(): + return False + elif len(block.Transactions) < 1: + return False + self.AddHeader(block.Header) + + return True def AddBlockDirectly(self, block, do_persist_complete=True): - pass + # Adds a block when importing, which skips adding + # the block header + if block.Index != self.Height + 1: + raise Exception("Invalid block") + self.Persist(block) + if do_persist_complete: + self.OnPersistCompleted(block) + + def AddHeader(self, header): + self.AddHeaders([header]) def AddHeaders(self, headers): - pass + newheaders = [] + count = 0 + for header in headers: + + if header.Index - 1 >= len(self._header_index) + count: + logger.info( + "header is greater than header index length: %s %s " % (header.Index, len(self._header_index))) + break + + if header.Index < count + len(self._header_index): + continue + if self._verify_blocks and not header.Verify(): + break + + count = count + 1 + + newheaders.append(header) + + if len(newheaders): + self.ProcessNewHeaders(newheaders) + + return True + + def ProcessNewHeaders(self, headers): + lastheader = headers[-1] + hashes = [h.Hash.ToBytes() for h in headers] + self._header_index = self._header_index + hashes + + if lastheader is not None: + self.OnAddHeader(lastheader) + + def OnAddHeader(self, header): + + hHash = header.Hash.ToBytes() + + if hHash not in self._header_index: + self._header_index.append(hHash) + + wb = self._db.getBatch() + while header.Index - 2000 >= self._stored_header_count: + ms = StreamManager.GetStream() + w = BinaryWriter(ms) + headers_to_write = self._header_index[self._stored_header_count:self._stored_header_count + 2000] + w.Write2000256List(headers_to_write) + out = ms.ToArray() + StreamManager.ReleaseStream(ms) + wb.put(DBPrefix.IX_HeaderHashList + self._stored_header_count.to_bytes(4, 'little'), out) + + self._stored_header_count += 2000 + + # clean up + self._db.dropBatch() + + wb = self._db.getBatch() + if self._db.get(DBPrefix.DATA_Block + hHash) is None: + wb.put(DBPrefix.DATA_Block + hHash, bytes(8) + header.ToArray()) + wb.put(DBPrefix.SYS_CurrentHeader, hHash + header.Index.to_bytes(4, 'little')) + + # clean up + self._db.dropBatch() @property def BlockRequests(self): @@ -285,79 +418,343 @@ def CalculateBonusInternal(unclaimed): def OnNotify(self, notification): self.Notify.on_change(notification) - def ContainsBlock(self, hash): - pass + def ContainsBlock(self, index): + if index <= self._current_block_height: + return True + return False def ContainsTransaction(self, hash): - pass + tx = self._db.get(DBPrefix.DATA_Transaction + hash.ToBytes()) + return True if tx is not None else False - def ContainsUnspent(self, hash, index): - pass + def GetHeader(self, hash): + if isinstance(hash, UInt256): + hash = hash.ToString().encode() + + try: + out = bytearray(self._db.get(DBPrefix.DATA_Block + hash)) + out = out[8:] + outhex = binascii.unhexlify(out) + return Header.FromTrimmedData(outhex, 0) + except TypeError as e2: + pass + except Exception as e: + logger.info("OTHER ERRROR %s " % e) + return None + + # TODO refactor function + def GetHeaderBy(self, height_or_hash): + hash = None + + intval = None + try: + intval = int(height_or_hash) + except Exception as e: + pass + + if intval is None and len(height_or_hash) == 64: + bhash = height_or_hash.encode('utf-8') + if bhash in self._header_index: + hash = bhash + + elif intval is None and len(height_or_hash) == 66: + bhash = height_or_hash[2:].encode('utf-8') + if bhash in self._header_index: + hash = bhash + + elif intval is not None and self.GetHeaderHash(intval) is not None: + hash = self.GetHeaderHash(intval) + + if hash is not None: + return self.GetHeader(hash) + + return None - def Dispose(self): - pass + def GetHeaderByHeight(self, height): - def GetStates(self, prefix, classref): - pass + if len(self._header_index) <= height: + return False - def GetAccountStateByIndex(self, index): - pass + hash = self._header_index[height] - def GetAccountState(self, address): - pass + return self.GetHeader(hash) - def GetAssetState(self, assetId) -> Optional['AssetState']: - # abstract - pass + def GetHeaderHash(self, height): + if height < len(self._header_index) and height >= 0: + return self._header_index[height] + return None - def SearchAssetState(self, query): - pass + def GetBlockHash(self, height): + """ + Get the block hash by its block height + Args: + height(int): height of the block to retrieve hash from. - def ShowAllAssets(self): - pass + Returns: + bytes: a non-raw block hash (e.g. b'6dd83ed8a3fc02e322f91f30431bf3662a8c8e8ebe976c3565f0d21c70620991', but not b'\x6d\xd8...etc' + """ + if self._current_block_height < height: + return - def GetHeaderHash(self, height): - pass + if len(self._header_index) <= height: + return + + return self._header_index[height] + + def GetSysFeeAmount(self, hash): + + if type(hash) is UInt256: + hash = hash.ToBytes() + try: + value = self._db.get(DBPrefix.DATA_Block + hash)[0:8] + amount = int.from_bytes(value, 'little', signed=False) + return amount + except Exception as e: + logger.debug("Could not get sys fee: %s " % e) + + return 0 def GetBlockByHeight(self, height): - pass + """ + Get a block by its height. + Args: + height(int): the height of the block to retrieve. + + Returns: + neo.Core.Block: block instance. + """ + hash = self.GetBlockHash(height) + if hash is not None: + return self.GetBlockByHash(hash) def GetBlock(self, height_or_hash): - pass + + hash = None + + intval = None + try: + intval = int(height_or_hash) + except Exception as e: + pass + + if intval is None and len(height_or_hash) == 64: + if isinstance(height_or_hash, str): + bhash = height_or_hash.encode('utf-8') + else: + bhash = height_or_hash + + if bhash in self._header_index: + hash = bhash + elif intval is None and len(height_or_hash) == 66: + bhash = height_or_hash[2:].encode('utf-8') + if bhash in self._header_index: + hash = bhash + elif intval is not None and self.GetBlockHash(intval) is not None: + hash = self.GetBlockHash(intval) + + if hash is not None: + return self.GetBlockByHash(hash) + + return None def GetBlockByHash(self, hash): - # abstract - pass + try: + out = bytearray(self._db.get(DBPrefix.DATA_Block + hash)) + out = out[8:] + outhex = binascii.unhexlify(out) + return Block.FromTrimmedData(outhex) + except Exception as e: + logger.info("Could not get block %s " % e) + return None - def GetBlockHash(self, height): - # abstract - pass + def GetStates(self, prefix, classref): + return DBInterface(self._db, prefix, classref) - def GetSpentCoins(self, tx_hash): - pass + def GetAccountState(self, address, print_all_accounts=False): - def GetAllSpentCoins(self): - pass + if type(address) is str: + try: + address = address.encode('utf-8') + except Exception as e: + logger.info("could not convert argument to bytes :%s " % e) + return None + + accounts = DBInterface(self._db, DBPrefix.ST_Account, AccountState) + acct = accounts.TryGet(keyval=address) + + return acct + + def GetStorageItem(self, storage_key): + storages = DBInterface(self._db, DBPrefix.ST_Storage, StorageItem) + item = storages.TryGet(storage_key.ToArray()) + return item + + def GetAssetState(self, assetId): + if type(assetId) is str: + try: + assetId = assetId.encode('utf-8') + except Exception as e: + logger.info("could not convert argument to bytes :%s " % e) + return None + + assets = DBInterface(self._db, DBPrefix.ST_Asset, AssetState) + asset = assets.TryGet(assetId) + + return asset + + def ShowAllAssets(self): + + assets = DBInterface(self._db, DBPrefix.ST_Asset, AssetState) + keys = assets.Keys + return keys + + def GetTransaction(self, hash): + if type(hash) is str: + hash = hash.encode('utf-8') + elif type(hash) is UInt256: + hash = hash.ToBytes() + + out = self._db.get(DBPrefix.DATA_Transaction + hash) + if out is not None: + out = bytearray(out) + height = int.from_bytes(out[:4], 'little') + out = out[4:] + outhex = binascii.unhexlify(out) + return Transaction.DeserializeFromBufer(outhex, 0), height + return None, -1 def SearchContracts(self, query): - pass + res = [] + contracts = DBInterface(self._db, DBPrefix.ST_Contract, ContractState) + keys = contracts.Keys + + query = query.casefold() + + for item in keys: + + contract = contracts.TryGet(keyval=item) + try: + if query in contract.Name.decode('utf-8').casefold(): + res.append(contract) + elif query in contract.Author.decode('utf-8').casefold(): + res.append(contract) + elif query in contract.Description.decode('utf-8').casefold(): + res.append(contract) + elif query in contract.Email.decode('utf-8').casefold(): + res.append(contract) + except Exception as e: + logger.info("Could not query contract: %s " % e) + + return res def ShowAllContracts(self): - pass + + contracts = DBInterface(self._db, DBPrefix.ST_Contract, ContractState) + keys = contracts.Keys + return keys def GetContract(self, hash): - # abstract - pass - def GetEnrollments(self): - # abstract - pass + if type(hash) is str: + try: + hash = UInt160.ParseString(hash).ToBytes() + except Exception as e: + logger.info("could not convert argument to bytes :%s " % e) + return None - def GetHeader(self, hash): - # abstract - pass + contracts = DBInterface(self._db, DBPrefix.ST_Contract, ContractState) + contract = contracts.TryGet(keyval=hash) + return contract - def GetHeaderByHeight(self, height): + def GetAllSpentCoins(self): + coins = DBInterface(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) + + return coins.Keys + + def GetUnspent(self, hash, index): + + coins = DBInterface(self._db, DBPrefix.ST_Coin, UnspentCoinState) + + state = coins.TryGet(hash) + + if state is None: + return None + if index >= len(state.Items): + return None + if state.Items[index] & CoinState.Spent > 0: + return None + tx, height = self.GetTransaction(hash) + + return tx.outputs[index] + + def GetSpentCoins(self, tx_hash): + + if type(tx_hash) is not bytes: + tx_hash = bytes(tx_hash.encode('utf-8')) + + coins = DBInterface(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) + result = coins.TryGet(keyval=tx_hash) + + return result + + def GetAllUnspent(self, hash): + + unspents = [] + + unspentcoins = DBInterface(self._db, DBPrefix.ST_Coin, UnspentCoinState) + + state = unspentcoins.TryGet(keyval=hash.ToBytes()) + + if state: + tx, height = self.GetTransaction(hash) + + for index, item in enumerate(state.Items): + if item & CoinState.Spent == 0: + unspents.append(tx.outputs[index]) + return unspents + + def GetUnclaimed(self, hash): + + tx, height = self.GetTransaction(hash) + + if tx is None: + return None + + out = {} + coins = DBInterface(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) + + state = coins.TryGet(keyval=hash.ToBytes()) + + if state: + for item in state.Items: + out[item.index] = SpentCoin(tx.outputs[item.index], height, item.height) + + return out + + def SearchAssetState(self, query): + res = [] + assets = DBInterface(self._db, DBPrefix.ST_Asset, AssetState) + keys = assets.Keys + + if query.lower() == "neo": + query = "AntShare" + + if query.lower() in {"gas", "neogas"}: + query = "AntCoin" + + for item in keys: + asset = assets.TryGet(keyval=item) + if query in asset.Name.decode('utf-8'): + res.append(asset) + elif query in Crypto.ToAddress(asset.Issuer): + res.append(asset) + elif query in Crypto.ToAddress(asset.Admin): + res.append(asset) + + return res + + def GetEnrollments(self): + # abstract pass @staticmethod @@ -377,8 +774,7 @@ def GetConsensusAddress(validators): def GetValidators(self, others): - votes = Counter([len(vs.PublicKeys) for vs in self.GetVotes(others)]).items() - + # votes = Counter([len(vs.PublicKeys) for vs in self.GetVotes(others)]).items() # TODO: Sorting here may cost a lot of memory, considering whether to use other mechanisms # votes = GetVotes(others).OrderBy(p => p.PublicKeys.Length).ToArray() # int validators_count = (int)votes.WeightedFilter(0.25, 0.75, p => p.Count.GetData(), (p, w) => new @@ -402,20 +798,21 @@ def GetValidators(self, others): raise NotImplementedError() def GetNextBlockHash(self, hash): - # abstract - pass + if isinstance(hash, (UInt256, bytes)): + header = self.GetHeader(hash) + else: + # unclear why this branch exists + header = self.GetHeader(hash.ToBytes()) + + if header: + if header.Index + 1 >= len(self._header_index): + return None + return self._header_index[header.Index + 1] + return None def GetScript(self, script_hash): return self.GetContract(script_hash) - def GetStorageItem(self, storage_key): - # abstract - pass - - def GetSysFeeAmount(self, hash): - # abstract - pass - def GetSysFeeAmountByHeight(self, height): """ Get the system fee for the specified block. @@ -429,34 +826,20 @@ def GetSysFeeAmountByHeight(self, height): hash = self.GetBlockHash(height) return self.GetSysFeeAmount(hash) - def GetTransaction(self, hash): - return None, 0 - - def GetUnclaimed(self, hash): - # abstract - pass - - def GetUnspent(self, hash, index): - # abstract - pass - - def GetAllUnspent(self, hash): - # abstract - pass - - def GetVotes(self, transactions): - # abstract - pass + # def GetVotes(self, transactions): + # # abstract + # pass - def IsDoubleSpend(self, tx): - # abstract - pass + # def IsDoubleSpend(self, tx): + # # abstract + # pass def OnPersistCompleted(self, block): self.PersistCompleted.on_change(block) + @property def BlockCacheCount(self): - pass + return len(self._block_cache) def Pause(self): self._paused = True @@ -464,6 +847,213 @@ def Pause(self): def Resume(self): self._paused = False + def Persist(self, block): + + self._persisting_block = block + + accounts = DBInterface(self._db, DBPrefix.ST_Account, AccountState) + unspentcoins = DBInterface(self._db, DBPrefix.ST_Coin, UnspentCoinState) + spentcoins = DBInterface(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) + assets = DBInterface(self._db, DBPrefix.ST_Asset, AssetState) + validators = DBInterface(self._db, DBPrefix.ST_Validator, ValidatorState) + contracts = DBInterface(self._db, DBPrefix.ST_Contract, ContractState) + storages = DBInterface(self._db, DBPrefix.ST_Storage, StorageItem) + + amount_sysfee = self.GetSysFeeAmount(block.PrevHash) + block.TotalFees().value + amount_sysfee_bytes = amount_sysfee.to_bytes(8, 'little') + + to_dispatch = [] + + wb = self._db.getBatch() + + wb.put(DBPrefix.DATA_Block + block.Hash.ToBytes(), amount_sysfee_bytes + block.Trim()) + + for tx in block.Transactions: + + wb.put(DBPrefix.DATA_Transaction + tx.Hash.ToBytes(), block.IndexBytes() + tx.ToArray()) + + # go through all outputs and add unspent coins to them + + unspentcoinstate = UnspentCoinState.FromTXOutputsConfirmed(tx.outputs) + unspentcoins.Add(tx.Hash.ToBytes(), unspentcoinstate) + + # go through all the accounts in the tx outputs + for output in tx.outputs: + account = accounts.GetAndChange(output.AddressBytes, AccountState(output.ScriptHash)) + + if account.HasBalance(output.AssetId): + account.AddToBalance(output.AssetId, output.Value) + else: + account.SetBalanceFor(output.AssetId, output.Value) + + # go through all tx inputs + unique_tx_input_hashes = [] + for input in tx.inputs: + if input.PrevHash not in unique_tx_input_hashes: + unique_tx_input_hashes.append(input.PrevHash) + + for txhash in unique_tx_input_hashes: + prevTx, height = self.GetTransaction(txhash.ToBytes()) + coin_refs_by_hash = [coinref for coinref in tx.inputs if + coinref.PrevHash.ToBytes() == txhash.ToBytes()] + for input in coin_refs_by_hash: + + uns = unspentcoins.GetAndChange(input.PrevHash.ToBytes()) + uns.OrEqValueForItemAt(input.PrevIndex, CoinState.Spent) + + if prevTx.outputs[input.PrevIndex].AssetId.ToBytes() == Blockchain.SystemShare().Hash.ToBytes(): + sc = spentcoins.GetAndChange(input.PrevHash.ToBytes(), + SpentCoinState(input.PrevHash, height, [])) + sc.Items.append(SpentCoinItem(input.PrevIndex, block.Index)) + + output = prevTx.outputs[input.PrevIndex] + acct = accounts.GetAndChange(prevTx.outputs[input.PrevIndex].AddressBytes, + AccountState(output.ScriptHash)) + assetid = prevTx.outputs[input.PrevIndex].AssetId + acct.SubtractFromBalance(assetid, prevTx.outputs[input.PrevIndex].Value) + + # do a whole lotta stuff with tx here... + if tx.Type == TransactionType.RegisterTransaction: + asset = AssetState(tx.Hash, tx.AssetType, tx.Name, tx.Amount, + Fixed8(0), tx.Precision, Fixed8(0), Fixed8(0), UInt160(data=bytearray(20)), + tx.Owner, tx.Admin, tx.Admin, block.Index + 2 * 2000000, False) + + assets.Add(tx.Hash.ToBytes(), asset) + + elif tx.Type == TransactionType.IssueTransaction: + + txresults = [result for result in tx.GetTransactionResults() if result.Amount.value < 0] + for result in txresults: + asset = assets.GetAndChange(result.AssetId.ToBytes()) + asset.Available = asset.Available - result.Amount + + elif tx.Type == TransactionType.ClaimTransaction: + for input in tx.Claims: + + sc = spentcoins.TryGet(input.PrevHash.ToBytes()) + if sc and sc.HasIndex(input.PrevIndex): + sc.DeleteIndex(input.PrevIndex) + spentcoins.GetAndChange(input.PrevHash.ToBytes()) + + elif tx.Type == TransactionType.EnrollmentTransaction: + newvalidator = ValidatorState(pub_key=tx.PublicKey) + validators.GetAndChange(tx.PublicKey.ToBytes(), newvalidator) + elif tx.Type == TransactionType.StateTransaction: + # @TODO Implement persistence for State Descriptors + pass + + elif tx.Type == TransactionType.PublishTransaction: + contract = ContractState(tx.Code, tx.NeedStorage, tx.Name, tx.CodeVersion, + tx.Author, tx.Email, tx.Description) + + contracts.GetAndChange(tx.Code.ScriptHash().ToBytes(), contract) + elif tx.Type == TransactionType.InvocationTransaction: + from neo.SmartContract.StateMachine import StateMachine + + script_table = CachedScriptTable(contracts) + service = StateMachine(accounts, validators, assets, contracts, storages, wb) + + engine = ApplicationEngine( + trigger_type=TriggerType.Application, + container=tx, + table=script_table, + service=service, + gas=tx.Gas, + testMode=False + ) + + engine.LoadScript(tx.Script) + + try: + success = engine.Execute() + service.ExecutionCompleted(engine, success) + + except Exception as e: + service.ExecutionCompleted(engine, False, e) + + to_dispatch = to_dispatch + service.events_to_dispatch + else: + + if tx.Type != b'\x00' and tx.Type != 128: + logger.info("TX Not Found %s " % tx.Type) + + # do save all the accounts, unspent, coins, validators, assets, etc + # now sawe the current sys block + + # filter out accounts to delete then commit + for key, account in accounts.Current.items(): + if not account.IsFrozen and len(account.Votes) == 0 and account.AllBalancesZeroOrLess(): + accounts.Remove(key) + + accounts.Commit() + + # filte out unspent coins to delete then commit + for key, unspent in unspentcoins.Current.items(): + if unspent.IsAllSpent: + unspentcoins.Remove(key) + unspentcoins.Commit() + + # filter out spent coins to delete then commit to db + for key, spent in spentcoins.Current.items(): + if len(spent.Items) == 0: + spentcoins.Remove(key) + + spentcoins.Commit() + validators.Commit() + assets.Commit() + contracts.Commit() + + wb.put(DBPrefix.SYS_CurrentBlock, block.Hash.ToBytes() + block.IndexBytes()) + self._current_block_height = block.Index + self._persisting_block = None + + self.TXProcessed += len(block.Transactions) + + self._db.dropBatch() + + for event in to_dispatch: + events.emit(event.event_type, event) + + def PersistBlocks(self, limit=None): + ctr = 0 + if not self._paused: + # TODO + while not self._disposed: + + if len(self._header_index) <= self._current_block_height + 1: + break + + hash = self._header_index[self._current_block_height + 1] + + if hash not in self._block_cache: + self.BlockSearchTries += 1 + break + + self.BlockSearchTries = 0 + block = self._block_cache[hash] + + try: + self.Persist(block) + del self._block_cache[hash] + except Exception as e: + logger.info(f"Could not persist block {block.Index} reason: {e}") + raise e + + try: + self.OnPersistCompleted(block) + except Exception as e: + logger.debug(f"Failed to broadcast OnPersistCompleted event, reason: {e}") + raise e + + ctr += 1 + # give the reactor the opportunity to preempt + if limit and ctr == limit: + break + + def Dispose(self): + self._db.closeDB() + self._disposed = True + @staticmethod def RegisterBlockchain(blockchain): """ diff --git a/neo/Settings.py b/neo/Settings.py index 47cee8ffa..e7af234ed 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -68,6 +68,8 @@ def check_depdendencies(): raise SystemCheckError("Required dependency %s is not installed. Please run 'pip install -e .'" % dep) + + class SettingsHolder: """ This class holds all the settings. Needs to be setup with one of the @@ -143,6 +145,17 @@ def debug_storage_leveldb_path(self): self.check_chain_dir_exists() return os.path.abspath(os.path.join(self.DATA_DIR_PATH, self.DEBUG_STORAGE_PATH)) + def database_properties(self): + return {'blockchain': {'path': self.chain_leveldb_path, + 'backend': 'leveldb'}, + + 'notification': {'path': self.notification_leveldb_path, + 'backend': 'leveldb'}, + + 'debug': {'path': self.debug_storage_leveldb_path, + 'backend': 'leveldb'} + } + # Helpers @property def is_mainnet(self): diff --git a/neo/SmartContract/StateMachine.py b/neo/SmartContract/StateMachine.py index 160b81d39..d6dea9af7 100644 --- a/neo/SmartContract/StateMachine.py +++ b/neo/SmartContract/StateMachine.py @@ -1,7 +1,6 @@ import sys from neo.Core.State.ContractState import ContractState from neo.Core.State.AssetState import AssetState -from neo.Core.Blockchain import Blockchain from neo.Core.FunctionCode import FunctionCode from neo.Core.State.StorageItem import StorageItem from neo.Core.State.StorageKey import StorageKey @@ -25,20 +24,19 @@ class StateMachine(StateReader): _validators = None - _wb = None _contracts_created = {} - def __init__(self, accounts, validators, assets, contracts, storages, wb): + def __init__(self, accounts, validators, assets, contracts, storages, chain): super(StateMachine, self).__init__() + self._chain = chain self._accounts = accounts self._validators = validators self._assets = assets self._contracts = contracts self._storages = storages - self._wb = wb self._accounts.MarkForReset() self._validators.MarkForReset() @@ -86,12 +84,11 @@ def ExecutionCompleted(self, engine, success, error=None): super(StateMachine, self).ExecutionCompleted(engine, success, error) def Commit(self): - if self._wb is not None: - self._accounts.Commit(self._wb, False) - self._validators.Commit(self._wb, False) - self._assets.Commit(self._wb, False) - self._contracts.Commit(self._wb, False) - self._storages.Commit(self._wb, False) + self._accounts.Commit(False) + self._validators.Commit(False) + self._assets.Commit(False) + self._contracts.Commit(False) + self._storages.Commit(False) def ResetState(self): self._accounts.Reset() @@ -102,7 +99,7 @@ def ResetState(self): def TestCommit(self): if self._storages.DebugStorage: - self._storages.Commit(self._wb, False) + self._storages.Commit(False) def Deprecated_Method(self, engine): logger.debug("Method No Longer operational") @@ -166,7 +163,7 @@ def Asset_Create(self, engine: ExecutionEngine): asset_id=tx.Hash, asset_type=asset_type, name=name, amount=amount, available=Fixed8.Zero(), precision=precision, fee_mode=0, fee=Fixed8.Zero(), fee_addr=UInt160(), owner=owner, admin=admin, issuer=issuer, - expiration=Blockchain.Default().Height + 1 + 2000000, is_frozen=False + expiration=self._chain.Default().Height + 1 + 2000000, is_frozen=False ) asset = self._assets.ReplaceOrAdd(tx.Hash.ToBytes(), new_asset) @@ -189,8 +186,8 @@ def Asset_Renew(self, engine: ExecutionEngine): asset = self._assets.GetAndChange(current_asset.AssetId.ToBytes()) - if asset.Expiration < Blockchain.Default().Height + 1: - asset.Expiration = Blockchain.Default().Height + 1 + if asset.Expiration < self._chain.Default().Height + 1: + asset.Expiration = self._chain.Default().Height + 1 try: @@ -259,7 +256,7 @@ def Contract_Create(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_CREATED, ContractParameter(ContractParameterType.InteropInterface, contract), - hash, Blockchain.Default().Height + 1, + hash, self._chain.Default().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) return True @@ -327,7 +324,7 @@ def Contract_Migrate(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_MIGRATED, ContractParameter(ContractParameterType.InteropInterface, contract), - hash, Blockchain.Default().Height + 1, + hash, self._chain.Default().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) @@ -367,7 +364,7 @@ def Contract_Destroy(self, engine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_DESTROY, ContractParameter(ContractParameterType.InteropInterface, contract), - hash, Blockchain.Default().Height + 1, + hash, self._chain.Default().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) return True @@ -408,7 +405,7 @@ def Storage_Put(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.STORAGE_PUT, ContractParameter(ContractParameterType.String, '%s -> %s' % (keystr, valStr)), - context.ScriptHash, Blockchain.Default().Height + 1, + context.ScriptHash, self._chain.Default().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) @@ -430,7 +427,7 @@ def Storage_Delete(self, engine: ExecutionEngine): keystr = Crypto.ToAddress(UInt160(data=key)) self.events_to_dispatch.append(SmartContractEvent(SmartContractEvent.STORAGE_DELETE, ContractParameter(ContractParameterType.String, keystr), - context.ScriptHash, Blockchain.Default().Height + 1, + context.ScriptHash, self._chain.Default().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) diff --git a/neo/Storage/Common/CachedScriptTable.py b/neo/Storage/Common/CachedScriptTable.py new file mode 100644 index 000000000..4c73ada35 --- /dev/null +++ b/neo/Storage/Common/CachedScriptTable.py @@ -0,0 +1,24 @@ +from neo.VM.Mixins import ScriptTableMixin + + +class CachedScriptTable(ScriptTableMixin): + + contracts = None + + def __init__(self, contracts): + self.contracts = contracts + + def GetScript(self, script_hash): + + contract = self.contracts.TryGet(script_hash) + + if contract is not None: + return contract.Code.Script + + return None + + def GetContractState(self, script_hash): + + contract = self.contracts.TryGet(script_hash) + + return contract diff --git a/neo/Storage/Common/DBPrefix.py b/neo/Storage/Common/DBPrefix.py new file mode 100644 index 000000000..2ea0a0819 --- /dev/null +++ b/neo/Storage/Common/DBPrefix.py @@ -0,0 +1,18 @@ +class DBPrefix: + + DATA_Block = b'\x01' + DATA_Transaction = b'\x02' + + ST_Account = b'\x40' + ST_Coin = b'\x44' + ST_SpentCoin = b'\x45' + ST_Validator = b'\x48' + ST_Asset = b'\x4c' + ST_Contract = b'\x50' + ST_Storage = b'\x70' + + IX_HeaderHashList = b'\x80' + + SYS_CurrentBlock = b'\xc0' + SYS_CurrentHeader = b'\xc1' + SYS_Version = b'\xf0' diff --git a/neo/Storage/Common/DebugStorage.py b/neo/Storage/Common/DebugStorage.py new file mode 100644 index 000000000..19014e00d --- /dev/null +++ b/neo/Storage/Common/DebugStorage.py @@ -0,0 +1,40 @@ +from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix +from neo.Blockchain import GetBlockchain +import plyvel +from neo.Settings import settings +from neo.logging import log_manager + +logger = log_manager.getLogger('db') + + +class DebugStorage: + __instance = None + + @property + def db(self): + return self._db + + def reset(self): + for key in self._db.iterator(prefix=DBPrefix.ST_Storage, include_value=False): + self._db.delete(key) + + def clone_from_live(self): + clone_db = GetBlockchain()._db.snapshot() + for key, value in clone_db.iterator(prefix=DBPrefix.ST_Storage, include_value=True): + self._db.put(key, value) + + def __init__(self): + + try: + # TODO_MERL: generic db support + self._db = plyvel.DB(settings.debug_storage_leveldb_path, create_if_missing=True) + except Exception as e: + logger.info("DEBUG leveldb unavailable, you may already be running this process: %s " % e) + raise Exception('DEBUG Leveldb Unavailable %s ' % e) + + @staticmethod + def instance(): + if not DebugStorage.__instance: + DebugStorage.__instance = DebugStorage() + DebugStorage.__instance.clone_from_live() + return DebugStorage.__instance diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py new file mode 100644 index 000000000..bd1333cc7 --- /dev/null +++ b/neo/Storage/Implementation/DBFactory.py @@ -0,0 +1,81 @@ +from neo.Storage.Interface.AbstractDBInterface import AbstractDBInterface +from neo.Settings import settings +from neo.logging import log_manager + + +"""Module is used to access the different databases. +Import the module and use the getters to access the different databases. +Configuration is done in neo.Settings.DATABASE_PROPS dict. +""" + +logger = log_manager.getLogger('DBFactory') + +BC_CONST = 'blockchain' +NOTIF_CONST = 'notification' +DEBUG_CONST = 'debug' + +print('props ', settings.database_properties()) +DATABASE_PROPS = settings.database_properties() + +_blockchain_db_instance = None + +_notif_db_instance = None + +_debug_db_instance = None + + +def getBlockchainDB(): + return _blockchain_db_instance + + +def getNotificationDB(): + return _notif_db_instance + + +def getDebugStorageDB(): + return _debug_db_instance + + +def _dbFactory(dbType, properties): + + if dbType == 'blockchain': + if properties['backend'] == 'leveldb': + + # import what's needed + import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions + + methods = [x for x in dir(functions) if not x.startswith('__')] + + # build attributes dict + attributes = {methods[i]: getattr( + functions, methods[i]) for i in range(0, len(methods))} + + # add __init__ method + attributes['__init__'] = attributes.pop(functions._init_method) + + print(attributes) + + return type( + properties['backend'].title()+'DBImpl'+dbType.title(), + (AbstractDBInterface,), + attributes) + + if dbType == 'notification': + raise Exception('Not yet implemented') + + if dbType == 'debug': + raise Exception('Not yet implemented') + + +BlockchainDB = _dbFactory(BC_CONST, DATABASE_PROPS[BC_CONST]) + +# NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) + +# DebugStorageDB = _dbFactory(DEBUG_CONST, DATABASE_PROPS[DEBUG_CONST]) + + +_blockchain_db_instance = BlockchainDB(DATABASE_PROPS[BC_CONST]['path']) + +# _notif_db_instance = NotificationDB(DATABASE_PROPS[NOTIF_CONST]) + +# _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]) diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py new file mode 100644 index 000000000..4be62de6f --- /dev/null +++ b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py @@ -0,0 +1,94 @@ +import plyvel +from neo.logging import log_manager + +logger = log_manager.getLogger('LevelDB') + +"""Document me""" + +_init_method = '_db_init' + +_path = None + +_db = None + +_iter = None + +_snapshot = None + +_batch = None + + +@property +def Path(self): + return self._path + + +def _db_init(self, path): + try: + self._path = path + print('path:::: ', path) + self._db = plyvel.DB(path, create_if_missing=True) + logger.info("Created Blockchain DB at %s " % self._path) + except Exception as e: + raise Exception("leveldb exception [ %s ]" % e) + + +def write(self, key, value): + self._db.put(key, value) + + +def writeBatch(self, batch: dict): + with self._db.write_batch() as wb: + for key, value in batch.items(): + wb.put(key, value) + + +def get(self, key, default=None): + _value = self._db.get(key, default) + return _value + + +def delete(self, key): + self._db.delete(key) + + +def deleteBatch(self, batch: dict): + with self._db.write_batch() as wb: + for key in batch: + wb.delete(key) + + +def createSnapshot(self): + self._snapshot = self._db.snapshot() + return self._snapshot + + +def dropSnapshot(self): + self._snapshot.close() + self._snapshot = None + + +def openIter(self, properties, start=None, end=None): + # TODO start implement start and end + + self._iter = self._db.iterator( + properties.prefix, + properties.include_value) + return self._iter + + +def getBatch(self): + self._batch = self._db.write_batch() + + +def dropBatch(self): + self._batch = None + + +def closeIter(self): + self._iter.close() + self._iter = None + + +def closeDB(self): + self._db.close() diff --git a/neo/Storage/Implementation/LevelDB/test_leveldb.py b/neo/Storage/Implementation/LevelDB/test_leveldb.py new file mode 100644 index 000000000..98fb747c3 --- /dev/null +++ b/neo/Storage/Implementation/LevelDB/test_leveldb.py @@ -0,0 +1,8 @@ +from neo.Storage.Implementation.LevelDB.LevelDBImpl import LevelDBImpl + +db = LevelDBImpl(b"/home/enzensbe/source/neo-python/neo/Storage/Implementation/LevelDB/1") +print(db) +db2 = LevelDBImpl(b"/home/enzensbe/source/neo-python/neo/Storage/Implementation/LevelDB/2") +print(db2) +db3 = LevelDBImpl(b"/home/enzensbe/source/neo-python/neo/Storage/Implementation/LevelDB/2") +print(db3) diff --git a/neo/Storage/Implementation/__init__.py b/neo/Storage/Implementation/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/neo/Storage/Implementation/test2.py b/neo/Storage/Implementation/test2.py new file mode 100644 index 000000000..68b2e08d4 --- /dev/null +++ b/neo/Storage/Implementation/test2.py @@ -0,0 +1,4 @@ +from DBFactory import getBlockchainDB + +db2 = getBlockchainDB() +print('db2 ', db2) diff --git a/neo/Storage/Implementation/test_db_factory.py b/neo/Storage/Implementation/test_db_factory.py new file mode 100644 index 000000000..85dbffa71 --- /dev/null +++ b/neo/Storage/Implementation/test_db_factory.py @@ -0,0 +1,15 @@ +from DBFactory import getBlockchainDB +import test2 + + +_db = getBlockchainDB() + +# _db.write(b'1', b'first') + +ret_1 = test2.db2.get(b'1') +ret_default = _db.get(b'2', b'default_value') + +print(ret_1, ret_default) +assert ret_1 == b'first' +assert ret_default == b'default_value' + diff --git a/neo/Storage/Interface/AbstractDBInterface.py b/neo/Storage/Interface/AbstractDBInterface.py new file mode 100644 index 000000000..c7b90e6df --- /dev/null +++ b/neo/Storage/Interface/AbstractDBInterface.py @@ -0,0 +1,25 @@ +from abc import ABC, abstractmethod + + +class AbstractDBInterface(ABC): + + @abstractmethod + def write(self, key, value): raise NotImplementedError + + @abstractmethod + def writeBatch(self, batch): raise NotImplementedError + + @abstractmethod + def get(self, key): raise NotImplementedError + + @abstractmethod + def createSnapshot(self): raise NotImplementedError + + @abstractmethod + def dropSnapshot(self, snapshot): raise NotImplementedError + + @abstractmethod + def openIter(self, properties, start=None, end=None): raise NotImplementedError + + @abstractmethod + def closeIter(self): raise NotImplementedError diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py new file mode 100644 index 000000000..b205eff41 --- /dev/null +++ b/neo/Storage/Interface/DBInterface.py @@ -0,0 +1,242 @@ +import binascii +from neo.SmartContract.Iterable import EnumeratorBase +from neo.logging import log_manager + +logger = log_manager.getLogger('DBCollection') + + +class DBProperties: + + prefix = None + include_value = None + include_key = None + + def __init__(self, prefix, include_value=False, include_key=True): + self.prefix = prefix + self.include_value = include_value + self.include_key = include_key + + +class DBInterface(object): + + _dbInstance = None + + Prefix = None + + ClassRef = None + + Collection = {} + + Changed = [] + Deleted = [] + + _built_keys = False + + _ChangedResetState = None + _DeletedResetState = None + + _batch_changed = {} + + def __init__(self, db, prefix, class_ref): + + self.DB = db + + self.Prefix = prefix + + self.ClassRef = class_ref + + self.Collection = {} + self.Changed = [] + self.Deleted = [] + + self._ChangedResetState = None + self._DeletedResetState = None + + self._batch_changed = {} + + @property + def Keys(self): + if not self._built_keys: + self._BuildCollectionKeys() + + return self.Collection.keys() + + def _BuildCollectionKeys(self): + for key in self.DB.openIter(DBProperties(self.Prefix)): + key = key[1:] + if key not in self.Collection.keys(): + self.Collection[key] = None + + # clean up + self.DB.closeIter() + + def Commit(self, destroy=True): + + if self.Changed: + for keyval in self.Changed: + item = self.Collection[keyval] + if item: + self._batch_changed[self.Prefix + keyval] = self.Collection[keyval].ToByteArray() + self.DB.writeBatch(self._batch_changed) + + if self.Deleted: + self.DB.deleteBatch(self.Prefix + keyval for keyval in self.Deleted) + for keyval in self.Deleted: + self.Collection[keyval] = None + + if destroy: + self.Destroy() + else: + self.Changed = [] + self.Deleted = [] + self._ChangedResetState = None + self._DeletedResetState = None + + def Reset(self): + self.Changed = self._ChangedResetState + self.Deleted = self._DeletedResetState + + self._ChangedResetState = None + self._DeletedResetState = None + + def GetAndChange(self, keyval, new_instance=None, debug_item=False): + + item = self.TryGet(keyval) + + if item is None: + if new_instance is None: + item = self.ClassRef() + else: + item = new_instance + + self.Add(keyval, item) + + self.MarkChanged(keyval) + + return item + + def ReplaceOrAdd(self, keyval, new_instance): + + item = new_instance + + if keyval in self.Deleted: + self.Deleted.remove(keyval) + + self.Add(keyval, item) + + return item + + def GetOrAdd(self, keyval, new_instance): + + existing = self.TryGet(keyval) + + if existing: + return existing + + item = new_instance + + if keyval in self.Deleted: + self.Deleted.remove(keyval) + + self.Add(keyval, item) + + return item + + def TryGet(self, keyval): + + if keyval in self.Deleted: + return None + + if keyval in self.Collection.keys(): + item = self.Collection[keyval] + if item is None: + item = self._GetItem(keyval) + self.MarkChanged(keyval) + return item + + # otherwise, chekc in the database + key = self.DB.get(self.Prefix + keyval) + + # if the key is there, get the item + if key is not None: + self.MarkChanged(keyval) + + item = self._GetItem(keyval) + + return item + + return None + + def _GetItem(self, keyval): + if keyval in self.Deleted: + return None + + try: + buffer = self.DB.get(self.Prefix + keyval) + if buffer: + item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(buffer)) + self.Collection[keyval] = item + return item + return None + except Exception as e: + logger.error("Could not deserialize item from key %s : %s" % (keyval, e)) + + return None + + def Add(self, keyval, item): + self.Collection[keyval] = item + self.MarkChanged(keyval) + + def Remove(self, keyval): + if keyval not in self.Deleted: + self.Deleted.append(keyval) + + def MarkForReset(self): + self._ChangedResetState = self.Changed + self._DeletedResetState = self.Deleted + + def MarkChanged(self, keyval): + if keyval not in self.Changed: + self.Changed.append(keyval) + + def TryFind(self, key_prefix): + candidates = {} + for keyval in self.Collection.keys(): + # See if we find a partial match in the keys that not have been committed yet, excluding those that are to be deleted + if key_prefix in keyval and keyval not in self.Deleted: + candidates[keyval[20:]] = self.Collection[keyval].Value + + db_results = self.Find(key_prefix) + + # {**x, **y} merges two dictionaries, with the values of y overwriting the vals of x + # withouth this merge, you sometimes get 2 results for each key + # then take the dict and make a list of tuples + final_collection = [(k, v) for k, v in {**db_results, **candidates}.items()] + + return EnumeratorBase(iter(final_collection)) + + def Find(self, key_prefix): + key_prefix = self.Prefix + key_prefix + res = {} + for key, val in self.DB.openIter(DBProperties(self.Prefix, include_value=True)): + # we want the storage item, not the raw bytes + item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(val)).Value + # also here we need to skip the 1 byte storage prefix + res_key = key[21:] + res[res_key] = item + + # clean up + self.DB.closeIter() + + return res + + def Destroy(self): + self.DB = None + self.Collection = None + self.ClassRef = None + self.Prefix = None + self.Deleted = None + self.Changed = None + self._ChangedResetState = None + self._DeletedResetState = None + logger = None diff --git a/neo/Storage/Interface/__init__.py b/neo/Storage/Interface/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/neo/Storage/Interface/test_db_interface.py b/neo/Storage/Interface/test_db_interface.py new file mode 100644 index 000000000..3940df7cd --- /dev/null +++ b/neo/Storage/Interface/test_db_interface.py @@ -0,0 +1 @@ +from diff --git a/neo/Storage/__init__.py b/neo/Storage/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/neo/bin/api_server.py b/neo/bin/api_server.py index ceb3e1d55..fa4055a76 100755 --- a/neo/bin/api_server.py +++ b/neo/bin/api_server.py @@ -52,7 +52,7 @@ # neo methods and modules from neo.Core.Blockchain import Blockchain -from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain +from neo.Storage.Implementation.DBFactory import getBlockchainDB from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB from neo.Wallets.utils import to_aes_key from neo.Implementations.Wallets.peewee.UserWallet import UserWallet @@ -259,7 +259,7 @@ def loopingCallErrorHandler(error): logger.info("Error in loop: %s " % error) # Instantiate the blockchain and subscribe to notifications - blockchain = LevelDBBlockchain(settings.chain_leveldb_path) + blockchain = Blockchain(getBlockchainDB()) Blockchain.RegisterBlockchain(blockchain) start_block_persisting() diff --git a/neo/bin/prompt.py b/neo/bin/prompt.py index ff3dc3908..b6c22e813 100755 --- a/neo/bin/prompt.py +++ b/neo/bin/prompt.py @@ -11,7 +11,7 @@ from twisted.internet import reactor, task from neo import __version__ from neo.Core.Blockchain import Blockchain -from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain +from neo.Storage.Implementation.DBFactory import getBlockchainDB from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB from neo.Network.NodeLeader import NodeLeader from neo.Prompt.Commands.Wallet import CommandWallet @@ -298,7 +298,7 @@ def main(): settings.set_max_peers(args.maxpeers) # Instantiate the blockchain and subscribe to notifications - blockchain = LevelDBBlockchain(settings.chain_leveldb_path) + blockchain = Blockchain(getBlockchainDB()) Blockchain.RegisterBlockchain(blockchain) # Try to set up a notification db From 76542b8a2eb4ed4af14198a744bcee81fc67fd76 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Sun, 14 Apr 2019 13:07:20 +0200 Subject: [PATCH 02/23] removed old LevelDB implementation --- .../Blockchains/LevelDB/CachedScriptTable.py | 24 - .../Blockchains/LevelDB/DBCollection.py | 236 ----- .../Blockchains/LevelDB/DBPrefix.py | 18 - .../Blockchains/LevelDB/DebugStorage.py | 39 - .../Blockchains/LevelDB/LevelDBBlockchain.py | 868 ------------------ .../LevelDB/TestLevelDBBlockchain.py | 151 --- .../Blockchains/LevelDB/__init__.py | 0 .../LevelDB/test_LevelDBBlockchain.py | 80 -- .../Blockchains/LevelDB/tests/__init__.py | 0 .../LevelDB/tests/test_initial_db.py | 64 -- .../Blockchains/LevelDB/tests/test_leveldb.py | 14 - neo/Implementations/Blockchains/__init__.py | 0 12 files changed, 1494 deletions(-) delete mode 100644 neo/Implementations/Blockchains/LevelDB/CachedScriptTable.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/DBCollection.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/DBPrefix.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/DebugStorage.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/TestLevelDBBlockchain.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/__init__.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/test_LevelDBBlockchain.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/tests/__init__.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/tests/test_initial_db.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/tests/test_leveldb.py delete mode 100644 neo/Implementations/Blockchains/__init__.py diff --git a/neo/Implementations/Blockchains/LevelDB/CachedScriptTable.py b/neo/Implementations/Blockchains/LevelDB/CachedScriptTable.py deleted file mode 100644 index 4c73ada35..000000000 --- a/neo/Implementations/Blockchains/LevelDB/CachedScriptTable.py +++ /dev/null @@ -1,24 +0,0 @@ -from neo.VM.Mixins import ScriptTableMixin - - -class CachedScriptTable(ScriptTableMixin): - - contracts = None - - def __init__(self, contracts): - self.contracts = contracts - - def GetScript(self, script_hash): - - contract = self.contracts.TryGet(script_hash) - - if contract is not None: - return contract.Code.Script - - return None - - def GetContractState(self, script_hash): - - contract = self.contracts.TryGet(script_hash) - - return contract diff --git a/neo/Implementations/Blockchains/LevelDB/DBCollection.py b/neo/Implementations/Blockchains/LevelDB/DBCollection.py deleted file mode 100644 index b659b43c1..000000000 --- a/neo/Implementations/Blockchains/LevelDB/DBCollection.py +++ /dev/null @@ -1,236 +0,0 @@ -import binascii -from neo.SmartContract.Iterable import EnumeratorBase -from neo.logging import log_manager - -logger = log_manager.getLogger('db') - - -class DBCollection: - DB = None - Prefix = None - - ClassRef = None - - Collection = {} - - Changed = [] - Deleted = [] - - _built_keys = False - - DebugStorage = False - - _ChangedResetState = None - _DeletedResetState = None - - def __init__(self, db, prefix, class_ref): - - self.DB = db - - self.Prefix = prefix - - self.ClassRef = class_ref - - self.Collection = {} - self.Changed = [] - self.Deleted = [] - - self._ChangedResetState = None - self._DeletedResetState = None - - @property - def Keys(self): - if not self._built_keys: - self._BuildCollectionKeys() - - return self.Collection.keys() - - @property - def Current(self): - try: - ret = {} - for key, val in self.Collection.items(): - if val is not None: - ret[key] = val - return ret - except Exception as e: - logger.error("error getting items %s " % e) - - return {} - - def _BuildCollectionKeys(self): - for key in self.DB.iterator(prefix=self.Prefix, include_value=False): - key = key[1:] - if key not in self.Collection.keys(): - self.Collection[key] = None - - def Commit(self, wb, destroy=True): - - for keyval in self.Changed: - item = self.Collection[keyval] - if item: - if not wb: - self.DB.put(self.Prefix + keyval, self.Collection[keyval].ToByteArray()) - else: - wb.put(self.Prefix + keyval, self.Collection[keyval].ToByteArray()) - for keyval in self.Deleted: - if not wb: - self.DB.delete(self.Prefix + keyval) - else: - wb.delete(self.Prefix + keyval) - self.Collection[keyval] = None - if destroy: - self.Destroy() - else: - self.Changed = [] - self.Deleted = [] - self._ChangedResetState = None - self._DeletedResetState = None - - def Reset(self): - self.Changed = self._ChangedResetState - self.Deleted = self._DeletedResetState - - self._ChangedResetState = None - self._DeletedResetState = None - - def GetAndChange(self, keyval, new_instance=None, debug_item=False): - - item = self.TryGet(keyval) - - if item is None: - if new_instance is None: - item = self.ClassRef() - else: - item = new_instance - - self.Add(keyval, item) - - self.MarkChanged(keyval) - - return item - - def ReplaceOrAdd(self, keyval, new_instance): - - item = new_instance - - if keyval in self.Deleted: - self.Deleted.remove(keyval) - - self.Add(keyval, item) - - return item - - def GetOrAdd(self, keyval, new_instance): - - existing = self.TryGet(keyval) - - if existing: - return existing - - item = new_instance - - if keyval in self.Deleted: - self.Deleted.remove(keyval) - - self.Add(keyval, item) - - return item - - def GetItemBy(self, keyval): - return self.GetAndChange(keyval) - - def TryGet(self, keyval): - - if keyval in self.Deleted: - return None - - if keyval in self.Collection.keys(): - item = self.Collection[keyval] - if item is None: - item = self._GetItem(keyval) - self.MarkChanged(keyval) - return item - - # otherwise, chekc in the database - key = self.DB.get(self.Prefix + keyval) - - # if the key is there, get the item - if key is not None: - self.MarkChanged(keyval) - - item = self._GetItem(keyval) - - return item - - return None - - def _GetItem(self, keyval): - if keyval in self.Deleted: - return None - - try: - buffer = self.DB.get(self.Prefix + keyval) - if buffer: - item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(buffer)) - self.Collection[keyval] = item - return item - return None - except Exception as e: - logger.error("Could not deserialize item from key %s : %s" % (keyval, e)) - - return None - - def Add(self, keyval, item): - self.Collection[keyval] = item - self.MarkChanged(keyval) - - def Remove(self, keyval): - if keyval not in self.Deleted: - self.Deleted.append(keyval) - - def MarkForReset(self): - self._ChangedResetState = self.Changed - self._DeletedResetState = self.Deleted - - def MarkChanged(self, keyval): - if keyval not in self.Changed: - self.Changed.append(keyval) - - def TryFind(self, key_prefix): - candidates = {} - for keyval in self.Collection.keys(): - # See if we find a partial match in the keys that not have been committed yet, excluding those that are to be deleted - if key_prefix in keyval and keyval not in self.Deleted: - candidates[keyval[20:]] = self.Collection[keyval].Value - - db_results = self.Find(key_prefix) - - # {**x, **y} merges two dictionaries, with the values of y overwriting the vals of x - # withouth this merge, you sometimes get 2 results for each key - # then take the dict and make a list of tuples - final_collection = [(k, v) for k, v in {**db_results, **candidates}.items()] - - return EnumeratorBase(iter(final_collection)) - - def Find(self, key_prefix): - key_prefix = self.Prefix + key_prefix - res = {} - for key, val in self.DB.iterator(prefix=key_prefix): - # we want the storage item, not the raw bytes - item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(val)).Value - # also here we need to skip the 1 byte storage prefix - res_key = key[21:] - res[res_key] = item - return res - - def Destroy(self): - self.DB = None - self.Collection = None - self.ClassRef = None - self.Prefix = None - self.Deleted = None - self.Changed = None - self._ChangedResetState = None - self._DeletedResetState = None - logger = None diff --git a/neo/Implementations/Blockchains/LevelDB/DBPrefix.py b/neo/Implementations/Blockchains/LevelDB/DBPrefix.py deleted file mode 100644 index 2ea0a0819..000000000 --- a/neo/Implementations/Blockchains/LevelDB/DBPrefix.py +++ /dev/null @@ -1,18 +0,0 @@ -class DBPrefix: - - DATA_Block = b'\x01' - DATA_Transaction = b'\x02' - - ST_Account = b'\x40' - ST_Coin = b'\x44' - ST_SpentCoin = b'\x45' - ST_Validator = b'\x48' - ST_Asset = b'\x4c' - ST_Contract = b'\x50' - ST_Storage = b'\x70' - - IX_HeaderHashList = b'\x80' - - SYS_CurrentBlock = b'\xc0' - SYS_CurrentHeader = b'\xc1' - SYS_Version = b'\xf0' diff --git a/neo/Implementations/Blockchains/LevelDB/DebugStorage.py b/neo/Implementations/Blockchains/LevelDB/DebugStorage.py deleted file mode 100644 index 61da6cb30..000000000 --- a/neo/Implementations/Blockchains/LevelDB/DebugStorage.py +++ /dev/null @@ -1,39 +0,0 @@ -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix -from neo.Blockchain import GetBlockchain -import plyvel -from neo.Settings import settings -from neo.logging import log_manager - -logger = log_manager.getLogger('db') - - -class DebugStorage: - __instance = None - - @property - def db(self): - return self._db - - def reset(self): - for key in self._db.iterator(prefix=DBPrefix.ST_Storage, include_value=False): - self._db.delete(key) - - def clone_from_live(self): - clone_db = GetBlockchain()._db.snapshot() - for key, value in clone_db.iterator(prefix=DBPrefix.ST_Storage, include_value=True): - self._db.put(key, value) - - def __init__(self): - - try: - self._db = plyvel.DB(settings.debug_storage_leveldb_path, create_if_missing=True) - except Exception as e: - logger.info("DEBUG leveldb unavailable, you may already be running this process: %s " % e) - raise Exception('DEBUG Leveldb Unavailable %s ' % e) - - @staticmethod - def instance(): - if not DebugStorage.__instance: - DebugStorage.__instance = DebugStorage() - DebugStorage.__instance.clone_from_live() - return DebugStorage.__instance diff --git a/neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py b/neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py deleted file mode 100644 index 19dbb2e3a..000000000 --- a/neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py +++ /dev/null @@ -1,868 +0,0 @@ -import plyvel -import binascii -from neo.Core.Blockchain import Blockchain -from neo.Core.Header import Header -from neo.Core.Block import Block -from neo.Core.TX.Transaction import Transaction, TransactionType -from neo.Core.IO.BinaryWriter import BinaryWriter -from neo.Core.IO.BinaryReader import BinaryReader -from neo.IO.MemoryStream import StreamManager -from neo.Implementations.Blockchains.LevelDB.DBCollection import DBCollection -from neo.Implementations.Blockchains.LevelDB.CachedScriptTable import CachedScriptTable -from neo.Core.Fixed8 import Fixed8 -from neo.Core.UInt160 import UInt160 -from neo.Core.UInt256 import UInt256 - -from neo.Core.State.UnspentCoinState import UnspentCoinState -from neo.Core.State.AccountState import AccountState -from neo.Core.State.CoinState import CoinState -from neo.Core.State.SpentCoinState import SpentCoinState, SpentCoinItem, SpentCoin -from neo.Core.State.AssetState import AssetState -from neo.Core.State.ValidatorState import ValidatorState -from neo.Core.State.ContractState import ContractState -from neo.Core.State.StorageItem import StorageItem -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix - -from neo.SmartContract.StateMachine import StateMachine -from neo.SmartContract.ApplicationEngine import ApplicationEngine -from neo.SmartContract import TriggerType -from neo.Core.Cryptography.Crypto import Crypto -from neo.Core.BigInteger import BigInteger -from neo.EventHub import events - -from prompt_toolkit import prompt -from neo.logging import log_manager - -logger = log_manager.getLogger('db') - - -class LevelDBBlockchain(Blockchain): - _path = None - _db = None - - _header_index = [] - _block_cache = {} - - _current_block_height = 0 - _stored_header_count = 0 - - _disposed = False - - _verify_blocks = False - - # this is the version of the database - # should not be updated for network version changes - _sysversion = b'schema v.0.6.9' - - _persisting_block = None - - TXProcessed = 0 - - @property - def CurrentBlockHash(self): - try: - return self._header_index[self._current_block_height] - except Exception as e: - logger.info("Could not get current block hash, returning none: %s ", ) - - return None - - @property - def CurrentBlockHashPlusOne(self): - try: - return self._header_index[self._current_block_height + 1] - except Exception as e: - pass - return self.CurrentBlockHash - - @property - def CurrentHeaderHash(self): - return self._header_index[-1] - - @property - def HeaderHeight(self): - height = len(self._header_index) - 1 - return height - - @property - def Height(self): - return self._current_block_height - - @property - def CurrentBlock(self): - if self._persisting_block: - return self._persisting_block - return self.GetBlockByHeight(self.Height) - - @property - def Path(self): - return self._path - - def __init__(self, path, skip_version_check=False, skip_header_check=False): - super(LevelDBBlockchain, self).__init__() - self._path = path - - self._header_index = [] - self._header_index.append(Blockchain.GenesisBlock().Header.Hash.ToBytes()) - - self.TXProcessed = 0 - - try: - self._db = plyvel.DB(self._path, create_if_missing=True) - logger.info("Created Blockchain DB at %s " % self._path) - except Exception as e: - logger.info("leveldb unavailable, you may already be running this process: %s " % e) - raise Exception('Leveldb Unavailable') - - version = self._db.get(DBPrefix.SYS_Version) - - if skip_version_check: - self._db.put(DBPrefix.SYS_Version, self._sysversion) - version = self._sysversion - - if version == self._sysversion: # or in the future, if version doesn't equal the current version... - - ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) - self._current_block_height = int.from_bytes(ba[-4:], 'little') - - if not skip_header_check: - ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) - current_header_height = int.from_bytes(ba[-4:], 'little') - current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') - - hashes = [] - try: - for key, value in self._db.iterator(prefix=DBPrefix.IX_HeaderHashList): - ms = StreamManager.GetStream(value) - reader = BinaryReader(ms) - hlist = reader.Read2000256List() - key = int.from_bytes(key[-4:], 'little') - hashes.append({'k': key, 'v': hlist}) - StreamManager.ReleaseStream(ms) - except Exception as e: - logger.info("Could not get stored header hash list: %s " % e) - - if len(hashes): - hashes.sort(key=lambda x: x['k']) - genstr = Blockchain.GenesisBlock().Hash.ToBytes() - for hlist in hashes: - - for hash in hlist['v']: - if hash != genstr: - self._header_index.append(hash) - self._stored_header_count += 1 - - if self._stored_header_count == 0: - logger.info("Current stored headers empty, re-creating from stored blocks...") - headers = [] - for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): - dbhash = bytearray(value)[8:] - headers.append(Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) - - headers.sort(key=lambda h: h.Index) - for h in headers: - if h.Index > 0: - self._header_index.append(h.Hash.ToBytes()) - - # this will trigger the write of stored headers - if len(headers): - self.OnAddHeader(headers[-1]) - - elif current_header_height > self._stored_header_count: - - try: - hash = current_header_hash - targethash = self._header_index[-1] - - newhashes = [] - while hash != targethash: - header = self.GetHeader(hash) - newhashes.insert(0, header) - hash = header.PrevHash.ToBytes() - - self.AddHeaders(newhashes) - except Exception as e: - pass - - elif version is None: - self.Persist(Blockchain.GenesisBlock()) - self._db.put(DBPrefix.SYS_Version, self._sysversion) - else: - logger.error("\n\n") - logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) - logger.warning("You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain.") - - try: - res = prompt("Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> ") - except KeyboardInterrupt: - res = False - if res == 'continue': - - with self._db.write_batch() as wb: - for key, value in self._db.iterator(): - wb.delete(key) - - self.Persist(Blockchain.GenesisBlock()) - self._db.put(DBPrefix.SYS_Version, self._sysversion) - - else: - raise Exception("Database schema changed") - - def GetStates(self, prefix, classref): - return DBCollection(self._db, prefix, classref) - - def GetAccountState(self, address, print_all_accounts=False): - - if type(address) is str: - try: - address = address.encode('utf-8') - except Exception as e: - logger.info("could not convert argument to bytes :%s " % e) - return None - - accounts = DBCollection(self._db, DBPrefix.ST_Account, AccountState) - acct = accounts.TryGet(keyval=address) - - return acct - - def GetStorageItem(self, storage_key): - storages = DBCollection(self._db, DBPrefix.ST_Storage, StorageItem) - item = storages.TryGet(storage_key.ToArray()) - return item - - def SearchContracts(self, query): - res = [] - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - keys = contracts.Keys - - query = query.casefold() - - for item in keys: - - contract = contracts.TryGet(keyval=item) - try: - if query in contract.Name.decode('utf-8').casefold(): - res.append(contract) - elif query in contract.Author.decode('utf-8').casefold(): - res.append(contract) - elif query in contract.Description.decode('utf-8').casefold(): - res.append(contract) - elif query in contract.Email.decode('utf-8').casefold(): - res.append(contract) - except Exception as e: - logger.info("Could not query contract: %s " % e) - - return res - - def ShowAllContracts(self): - - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - keys = contracts.Keys - return keys - - def GetContract(self, hash): - - if type(hash) is str: - try: - hash = UInt160.ParseString(hash).ToBytes() - except Exception as e: - logger.info("could not convert argument to bytes :%s " % e) - return None - - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - contract = contracts.TryGet(keyval=hash) - return contract - - def GetAllSpentCoins(self): - coins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - - return coins.Keys - - def GetUnspent(self, hash, index): - - coins = DBCollection(self._db, DBPrefix.ST_Coin, UnspentCoinState) - - state = coins.TryGet(hash) - - if state is None: - return None - if index >= len(state.Items): - return None - if state.Items[index] & CoinState.Spent > 0: - return None - tx, height = self.GetTransaction(hash) - - return tx.outputs[index] - - def GetSpentCoins(self, tx_hash): - - if type(tx_hash) is not bytes: - tx_hash = bytes(tx_hash.encode('utf-8')) - - coins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - result = coins.TryGet(keyval=tx_hash) - - return result - - def GetAllUnspent(self, hash): - - unspents = [] - - unspentcoins = DBCollection(self._db, DBPrefix.ST_Coin, UnspentCoinState) - - state = unspentcoins.TryGet(keyval=hash.ToBytes()) - - if state: - tx, height = self.GetTransaction(hash) - - for index, item in enumerate(state.Items): - if item & CoinState.Spent == 0: - unspents.append(tx.outputs[index]) - return unspents - - def GetUnclaimed(self, hash): - - tx, height = self.GetTransaction(hash) - - if tx is None: - return None - - out = {} - coins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - - state = coins.TryGet(keyval=hash.ToBytes()) - - if state: - for item in state.Items: - out[item.index] = SpentCoin(tx.outputs[item.index], height, item.height) - - return out - - def SearchAssetState(self, query): - res = [] - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - keys = assets.Keys - - if query.lower() == "neo": - query = "AntShare" - - if query.lower() in {"gas", "neogas"}: - query = "AntCoin" - - for item in keys: - asset = assets.TryGet(keyval=item) - if query in asset.Name.decode('utf-8'): - res.append(asset) - elif query in Crypto.ToAddress(asset.Issuer): - res.append(asset) - elif query in Crypto.ToAddress(asset.Admin): - res.append(asset) - - return res - - def GetAssetState(self, assetId): - - if type(assetId) is str: - try: - assetId = assetId.encode('utf-8') - except Exception as e: - logger.info("could not convert argument to bytes :%s " % e) - return None - - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - asset = assets.TryGet(assetId) - - return asset - - def ShowAllAssets(self): - - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - keys = assets.Keys - return keys - - def GetTransaction(self, hash): - - if type(hash) is str: - hash = hash.encode('utf-8') - elif type(hash) is UInt256: - hash = hash.ToBytes() - - out = self._db.get(DBPrefix.DATA_Transaction + hash) - if out is not None: - out = bytearray(out) - height = int.from_bytes(out[:4], 'little') - out = out[4:] - outhex = binascii.unhexlify(out) - return Transaction.DeserializeFromBufer(outhex, 0), height - - return None, -1 - - def AddBlockDirectly(self, block, do_persist_complete=True): - # Adds a block when importing, which skips adding - # the block header - if block.Index != self.Height + 1: - raise Exception("Invalid block") - self.Persist(block) - if do_persist_complete: - self.OnPersistCompleted(block) - - def AddBlock(self, block): - - if not block.Hash.ToBytes() in self._block_cache: - self._block_cache[block.Hash.ToBytes()] = block - - header_len = len(self._header_index) - - if block.Index - 1 >= header_len: - return False - - if block.Index == header_len: - - if self._verify_blocks and not block.Verify(): - return False - elif len(block.Transactions) < 1: - return False - self.AddHeader(block.Header) - - return True - - def ContainsBlock(self, index): - if index <= self._current_block_height: - return True - return False - - def ContainsTransaction(self, hash): - tx = self._db.get(DBPrefix.DATA_Transaction + hash.ToBytes()) - return True if tx is not None else False - - def GetHeader(self, hash): - if isinstance(hash, UInt256): - hash = hash.ToString().encode() - - try: - out = bytearray(self._db.get(DBPrefix.DATA_Block + hash)) - out = out[8:] - outhex = binascii.unhexlify(out) - return Header.FromTrimmedData(outhex, 0) - except TypeError as e2: - pass - except Exception as e: - logger.info("OTHER ERRROR %s " % e) - return None - - def GetHeaderBy(self, height_or_hash): - hash = None - - intval = None - try: - intval = int(height_or_hash) - except Exception as e: - pass - - if intval is None and len(height_or_hash) == 64: - bhash = height_or_hash.encode('utf-8') - if bhash in self._header_index: - hash = bhash - - elif intval is None and len(height_or_hash) == 66: - bhash = height_or_hash[2:].encode('utf-8') - if bhash in self._header_index: - hash = bhash - - elif intval is not None and self.GetHeaderHash(intval) is not None: - hash = self.GetHeaderHash(intval) - - if hash is not None: - return self.GetHeader(hash) - - return None - - def GetHeaderByHeight(self, height): - - if len(self._header_index) <= height: - return False - - hash = self._header_index[height] - - return self.GetHeader(hash) - - def GetHeaderHash(self, height): - if height < len(self._header_index) and height >= 0: - return self._header_index[height] - return None - - def GetBlockHash(self, height): - """ - Get the block hash by its block height - Args: - height(int): height of the block to retrieve hash from. - - Returns: - bytes: a non-raw block hash (e.g. b'6dd83ed8a3fc02e322f91f30431bf3662a8c8e8ebe976c3565f0d21c70620991', but not b'\x6d\xd8...etc' - """ - if self._current_block_height < height: - return - - if len(self._header_index) <= height: - return - - return self._header_index[height] - - def GetSysFeeAmount(self, hash): - - if type(hash) is UInt256: - hash = hash.ToBytes() - try: - value = self._db.get(DBPrefix.DATA_Block + hash)[0:8] - amount = int.from_bytes(value, 'little', signed=False) - return amount - except Exception as e: - logger.debug("Could not get sys fee: %s " % e) - - return 0 - - def GetBlockByHeight(self, height): - """ - Get a block by its height. - Args: - height(int): the height of the block to retrieve. - - Returns: - neo.Core.Block: block instance. - """ - hash = self.GetBlockHash(height) - if hash is not None: - return self.GetBlockByHash(hash) - - def GetBlock(self, height_or_hash): - - hash = None - - intval = None - try: - intval = int(height_or_hash) - except Exception as e: - pass - - if intval is None and len(height_or_hash) == 64: - if isinstance(height_or_hash, str): - bhash = height_or_hash.encode('utf-8') - else: - bhash = height_or_hash - - if bhash in self._header_index: - hash = bhash - elif intval is None and len(height_or_hash) == 66: - bhash = height_or_hash[2:].encode('utf-8') - if bhash in self._header_index: - hash = bhash - elif intval is not None and self.GetBlockHash(intval) is not None: - hash = self.GetBlockHash(intval) - - if hash is not None: - return self.GetBlockByHash(hash) - - return None - - def GetBlockByHash(self, hash): - try: - out = bytearray(self._db.get(DBPrefix.DATA_Block + hash)) - out = out[8:] - outhex = binascii.unhexlify(out) - return Block.FromTrimmedData(outhex) - except Exception as e: - logger.info("Could not get block %s " % e) - return None - - def GetNextBlockHash(self, hash): - if isinstance(hash, (UInt256, bytes)): - header = self.GetHeader(hash) - else: - # unclear why this branch exists - header = self.GetHeader(hash.ToBytes()) - - if header: - if header.Index + 1 >= len(self._header_index): - return None - return self._header_index[header.Index + 1] - return None - - def AddHeader(self, header): - self.AddHeaders([header]) - - def AddHeaders(self, headers): - - newheaders = [] - count = 0 - for header in headers: - - if header.Index - 1 >= len(self._header_index) + count: - logger.info( - "header is greater than header index length: %s %s " % (header.Index, len(self._header_index))) - break - - if header.Index < count + len(self._header_index): - continue - if self._verify_blocks and not header.Verify(): - break - - count = count + 1 - - newheaders.append(header) - - if len(newheaders): - self.ProcessNewHeaders(newheaders) - - return True - - def ProcessNewHeaders(self, headers): - - lastheader = headers[-1] - - hashes = [h.Hash.ToBytes() for h in headers] - - self._header_index = self._header_index + hashes - - if lastheader is not None: - self.OnAddHeader(lastheader) - - def OnAddHeader(self, header): - - hHash = header.Hash.ToBytes() - - if hHash not in self._header_index: - self._header_index.append(hHash) - - with self._db.write_batch() as wb: - while header.Index - 2000 >= self._stored_header_count: - ms = StreamManager.GetStream() - w = BinaryWriter(ms) - headers_to_write = self._header_index[self._stored_header_count:self._stored_header_count + 2000] - w.Write2000256List(headers_to_write) - out = ms.ToArray() - StreamManager.ReleaseStream(ms) - wb.put(DBPrefix.IX_HeaderHashList + self._stored_header_count.to_bytes(4, 'little'), out) - - self._stored_header_count += 2000 - - with self._db.write_batch() as wb: - if self._db.get(DBPrefix.DATA_Block + hHash) is None: - wb.put(DBPrefix.DATA_Block + hHash, bytes(8) + header.ToArray()) - wb.put(DBPrefix.SYS_CurrentHeader, hHash + header.Index.to_bytes(4, 'little')) - - @property - def BlockCacheCount(self): - return len(self._block_cache) - - def Persist(self, block): - - self._persisting_block = block - - accounts = DBCollection(self._db, DBPrefix.ST_Account, AccountState) - unspentcoins = DBCollection(self._db, DBPrefix.ST_Coin, UnspentCoinState) - spentcoins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - validators = DBCollection(self._db, DBPrefix.ST_Validator, ValidatorState) - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - storages = DBCollection(self._db, DBPrefix.ST_Storage, StorageItem) - - amount_sysfee = self.GetSysFeeAmount(block.PrevHash) + block.TotalFees().value - amount_sysfee_bytes = amount_sysfee.to_bytes(8, 'little') - - to_dispatch = [] - - with self._db.write_batch() as wb: - - wb.put(DBPrefix.DATA_Block + block.Hash.ToBytes(), amount_sysfee_bytes + block.Trim()) - - for tx in block.Transactions: - - wb.put(DBPrefix.DATA_Transaction + tx.Hash.ToBytes(), block.IndexBytes() + tx.ToArray()) - - # go through all outputs and add unspent coins to them - - unspentcoinstate = UnspentCoinState.FromTXOutputsConfirmed(tx.outputs) - unspentcoins.Add(tx.Hash.ToBytes(), unspentcoinstate) - - # go through all the accounts in the tx outputs - for output in tx.outputs: - account = accounts.GetAndChange(output.AddressBytes, AccountState(output.ScriptHash)) - - if account.HasBalance(output.AssetId): - account.AddToBalance(output.AssetId, output.Value) - else: - account.SetBalanceFor(output.AssetId, output.Value) - - # go through all tx inputs - unique_tx_input_hashes = [] - for input in tx.inputs: - if input.PrevHash not in unique_tx_input_hashes: - unique_tx_input_hashes.append(input.PrevHash) - - for txhash in unique_tx_input_hashes: - prevTx, height = self.GetTransaction(txhash.ToBytes()) - coin_refs_by_hash = [coinref for coinref in tx.inputs if - coinref.PrevHash.ToBytes() == txhash.ToBytes()] - for input in coin_refs_by_hash: - - uns = unspentcoins.GetAndChange(input.PrevHash.ToBytes()) - uns.OrEqValueForItemAt(input.PrevIndex, CoinState.Spent) - - if prevTx.outputs[input.PrevIndex].AssetId.ToBytes() == Blockchain.SystemShare().Hash.ToBytes(): - sc = spentcoins.GetAndChange(input.PrevHash.ToBytes(), - SpentCoinState(input.PrevHash, height, [])) - sc.Items.append(SpentCoinItem(input.PrevIndex, block.Index)) - - output = prevTx.outputs[input.PrevIndex] - acct = accounts.GetAndChange(prevTx.outputs[input.PrevIndex].AddressBytes, - AccountState(output.ScriptHash)) - assetid = prevTx.outputs[input.PrevIndex].AssetId - acct.SubtractFromBalance(assetid, prevTx.outputs[input.PrevIndex].Value) - - # do a whole lotta stuff with tx here... - if tx.Type == TransactionType.RegisterTransaction: - asset = AssetState(tx.Hash, tx.AssetType, tx.Name, tx.Amount, - Fixed8(0), tx.Precision, Fixed8(0), Fixed8(0), UInt160(data=bytearray(20)), - tx.Owner, tx.Admin, tx.Admin, block.Index + 2 * 2000000, False) - - assets.Add(tx.Hash.ToBytes(), asset) - - elif tx.Type == TransactionType.IssueTransaction: - - txresults = [result for result in tx.GetTransactionResults() if result.Amount.value < 0] - for result in txresults: - asset = assets.GetAndChange(result.AssetId.ToBytes()) - asset.Available = asset.Available - result.Amount - - elif tx.Type == TransactionType.ClaimTransaction: - for input in tx.Claims: - - sc = spentcoins.TryGet(input.PrevHash.ToBytes()) - if sc and sc.HasIndex(input.PrevIndex): - sc.DeleteIndex(input.PrevIndex) - spentcoins.GetAndChange(input.PrevHash.ToBytes()) - - elif tx.Type == TransactionType.EnrollmentTransaction: - newvalidator = ValidatorState(pub_key=tx.PublicKey) - validators.GetAndChange(tx.PublicKey.ToBytes(), newvalidator) - elif tx.Type == TransactionType.StateTransaction: - # @TODO Implement persistence for State Descriptors - pass - - elif tx.Type == TransactionType.PublishTransaction: - contract = ContractState(tx.Code, tx.NeedStorage, tx.Name, tx.CodeVersion, - tx.Author, tx.Email, tx.Description) - - contracts.GetAndChange(tx.Code.ScriptHash().ToBytes(), contract) - elif tx.Type == TransactionType.InvocationTransaction: - - script_table = CachedScriptTable(contracts) - service = StateMachine(accounts, validators, assets, contracts, storages, wb) - - engine = ApplicationEngine( - trigger_type=TriggerType.Application, - container=tx, - table=script_table, - service=service, - gas=tx.Gas, - testMode=False - ) - - engine.LoadScript(tx.Script) - - try: - success = engine.Execute() - service.ExecutionCompleted(engine, success) - - except Exception as e: - service.ExecutionCompleted(engine, False, e) - - to_dispatch = to_dispatch + service.events_to_dispatch - else: - - if tx.Type != b'\x00' and tx.Type != 128: - logger.info("TX Not Found %s " % tx.Type) - - # do save all the accounts, unspent, coins, validators, assets, etc - # now sawe the current sys block - - # filter out accounts to delete then commit - for key, account in accounts.Current.items(): - if not account.IsFrozen and len(account.Votes) == 0 and account.AllBalancesZeroOrLess(): - accounts.Remove(key) - - accounts.Commit(wb) - - # filte out unspent coins to delete then commit - for key, unspent in unspentcoins.Current.items(): - if unspent.IsAllSpent: - unspentcoins.Remove(key) - unspentcoins.Commit(wb) - - # filter out spent coins to delete then commit to db - for key, spent in spentcoins.Current.items(): - if len(spent.Items) == 0: - spentcoins.Remove(key) - spentcoins.Commit(wb) - - # commit validators - validators.Commit(wb) - - # commit assets - assets.Commit(wb) - - # commit contracts - contracts.Commit(wb) - - wb.put(DBPrefix.SYS_CurrentBlock, block.Hash.ToBytes() + block.IndexBytes()) - self._current_block_height = block.Index - self._persisting_block = None - - self.TXProcessed += len(block.Transactions) - - for event in to_dispatch: - events.emit(event.event_type, event) - - def PersistBlocks(self, limit=None): - ctr = 0 - if not self._paused: - while not self._disposed: - - if len(self._header_index) <= self._current_block_height + 1: - break - - hash = self._header_index[self._current_block_height + 1] - - if hash not in self._block_cache: - self.BlockSearchTries += 1 - break - - self.BlockSearchTries = 0 - block = self._block_cache[hash] - - try: - self.Persist(block) - del self._block_cache[hash] - except Exception as e: - logger.info(f"Could not persist block {block.Index} reason: {e}") - raise e - - try: - self.OnPersistCompleted(block) - except Exception as e: - logger.debug(f"Failed to broadcast OnPersistCompleted event, reason: {e}") - raise e - - ctr += 1 - # give the reactor the opportunity to preempt - if limit and ctr == limit: - break - - def Resume(self): - self._currently_persisting = False - super(LevelDBBlockchain, self).Resume() - self.PersistBlocks() - - def Dispose(self): - self._db.close() - self._disposed = True diff --git a/neo/Implementations/Blockchains/LevelDB/TestLevelDBBlockchain.py b/neo/Implementations/Blockchains/LevelDB/TestLevelDBBlockchain.py deleted file mode 100644 index 8cd6d638d..000000000 --- a/neo/Implementations/Blockchains/LevelDB/TestLevelDBBlockchain.py +++ /dev/null @@ -1,151 +0,0 @@ -from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain -from neo.Core.Blockchain import Blockchain -from neo.Core.TX.Transaction import TransactionType -from neo.Implementations.Blockchains.LevelDB.DBCollection import DBCollection -from neo.Implementations.Blockchains.LevelDB.CachedScriptTable import CachedScriptTable -from neo.Core.Fixed8 import Fixed8 -from neo.Core.UInt160 import UInt160 - -from neo.Core.State.UnspentCoinState import UnspentCoinState -from neo.Core.State.AccountState import AccountState -from neo.Core.State.CoinState import CoinState -from neo.Core.State.SpentCoinState import SpentCoinState, SpentCoinItem -from neo.Core.State.AssetState import AssetState -from neo.Core.State.ValidatorState import ValidatorState -from neo.Core.State.ContractState import ContractState -from neo.Core.State.StorageItem import StorageItem -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix - -from neo.SmartContract.StateMachine import StateMachine -from neo.SmartContract.ApplicationEngine import ApplicationEngine -from neo.SmartContract import TriggerType - - -class TestLevelDBBlockchain(LevelDBBlockchain): - - def Persist(self, block): - - accounts = DBCollection(self._db, DBPrefix.ST_Account, AccountState) - unspentcoins = DBCollection(self._db, DBPrefix.ST_Coin, UnspentCoinState) - spentcoins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - validators = DBCollection(self._db, DBPrefix.ST_Validator, ValidatorState) - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - storages = DBCollection(self._db, DBPrefix.ST_Storage, StorageItem) - - amount_sysfee = self.GetSysFeeAmount(block.PrevHash) + block.TotalFees().value - amount_sysfee_bytes = amount_sysfee.to_bytes(8, 'little') - - with self._db.write_batch() as wb: - for tx in block.Transactions: - - unspentcoinstate = UnspentCoinState.FromTXOutputsConfirmed(tx.outputs) - unspentcoins.Add(tx.Hash.ToBytes(), unspentcoinstate) - - # go through all the accounts in the tx outputs - for output in tx.outputs: - account = accounts.GetAndChange(output.AddressBytes, AccountState(output.ScriptHash)) - - if account.HasBalance(output.AssetId): - account.AddToBalance(output.AssetId, output.Value) - else: - account.SetBalanceFor(output.AssetId, output.Value) - - # go through all tx inputs - unique_tx_input_hashes = [] - for input in tx.inputs: - if input.PrevHash not in unique_tx_input_hashes: - unique_tx_input_hashes.append(input.PrevHash) - - for txhash in unique_tx_input_hashes: - prevTx, height = self.GetTransaction(txhash.ToBytes()) - coin_refs_by_hash = [coinref for coinref in tx.inputs if - coinref.PrevHash.ToBytes() == txhash.ToBytes()] - for input in coin_refs_by_hash: - - uns = unspentcoins.GetAndChange(input.PrevHash.ToBytes()) - uns.OrEqValueForItemAt(input.PrevIndex, CoinState.Spent) - - if prevTx.outputs[input.PrevIndex].AssetId.ToBytes() == Blockchain.SystemShare().Hash.ToBytes(): - sc = spentcoins.GetAndChange(input.PrevHash.ToBytes(), - SpentCoinState(input.PrevHash, height, [])) - sc.Items.append(SpentCoinItem(input.PrevIndex, block.Index)) - - output = prevTx.outputs[input.PrevIndex] - acct = accounts.GetAndChange(prevTx.outputs[input.PrevIndex].AddressBytes, - AccountState(output.ScriptHash)) - assetid = prevTx.outputs[input.PrevIndex].AssetId - acct.SubtractFromBalance(assetid, prevTx.outputs[input.PrevIndex].Value) - - # do a whole lotta stuff with tx here... - if tx.Type == TransactionType.RegisterTransaction: - - asset = AssetState(tx.Hash, tx.AssetType, tx.Name, tx.Amount, - Fixed8(0), tx.Precision, Fixed8(0), Fixed8(0), UInt160(data=bytearray(20)), - tx.Owner, tx.Admin, tx.Admin, block.Index + 2 * 2000000, False) - - assets.Add(tx.Hash.ToBytes(), asset) - - elif tx.Type == TransactionType.IssueTransaction: - - txresults = [result for result in tx.GetTransactionResults() if result.Amount.value < 0] - for result in txresults: - asset = assets.GetAndChange(result.AssetId.ToBytes()) - asset.Available = asset.Available - result.Amount - - elif tx.Type == TransactionType.ClaimTransaction: - - for input in tx.Claims: - - sc = spentcoins.TryGet(input.PrevHash.ToBytes()) - if sc and sc.HasIndex(input.PrevIndex): - sc.DeleteIndex(input.PrevIndex) - spentcoins.GetAndChange(input.PrevHash.ToBytes()) - - elif tx.Type == TransactionType.EnrollmentTransaction: - - validator = validators.GetAndChange(tx.PublicKey, ValidatorState(pub_key=tx.PublicKey)) - # logger.info("VALIDATOR %s " % validator.ToJson()) - - elif tx.Type == TransactionType.StateTransaction: - # @TODO Implement persistence for State Descriptors - pass - - elif tx.Type == TransactionType.PublishTransaction: - - contract = ContractState(tx.Code, tx.NeedStorage, tx.Name, tx.CodeVersion, - tx.Author, tx.Email, tx.Description) - - contracts.GetAndChange(tx.Code.ScriptHash().ToBytes(), contract) - - elif tx.Type == TransactionType.InvocationTransaction: - - script_table = CachedScriptTable(contracts) - service = StateMachine(accounts, validators, assets, contracts, storages, wb=wb) - - engine = ApplicationEngine( - trigger_type=TriggerType.Application, - container=tx, - table=script_table, - service=service, - gas=tx.Gas, - testMode=True - ) - - engine.LoadScript(tx.Script) - - # normally, this function does not return true/false - # for testing purposes, we try to execute and if an exception is raised - # we will return false, otherwise if success return true - - # this is different than the 'success' bool returned by engine.Execute() - # the 'success' bool returned by engine.Execute() is a value indicating - # wether or not the invocation was successful, and if so, we then commit - # the changes made by the contract to the database - try: - success = engine.Execute() - # service.ExecutionCompleted(engine, success) - return True - except Exception as e: - # service.ExecutionCompleted(self, False, e) - return False diff --git a/neo/Implementations/Blockchains/LevelDB/__init__.py b/neo/Implementations/Blockchains/LevelDB/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/neo/Implementations/Blockchains/LevelDB/test_LevelDBBlockchain.py b/neo/Implementations/Blockchains/LevelDB/test_LevelDBBlockchain.py deleted file mode 100644 index 84baee683..000000000 --- a/neo/Implementations/Blockchains/LevelDB/test_LevelDBBlockchain.py +++ /dev/null @@ -1,80 +0,0 @@ -from neo.Utils.BlockchainFixtureTestCase import BlockchainFixtureTestCase -from neo.Core.Blockchain import Blockchain -from neo.Settings import settings -import os - - -class LevelDBBlockchainTest(BlockchainFixtureTestCase): - @classmethod - def leveldb_testpath(cls): - return os.path.join(settings.DATA_DIR_PATH, 'fixtures/test_chain') - - # test need to be updated whenever we change the fixtures - def test_initial_setup(self): - self.assertEqual(self._blockchain.Height, 12349) - - def test_GetBlockHash(self): - # test requested block height exceeding blockchain current_height - invalid_bc_height = self._blockchain.Height + 1 - result = self._blockchain.GetBlockHash(invalid_bc_height) - self.assertEqual(result, None) - - # test header index length mismatch - # save index to restore later - saved = self._blockchain._header_index - self._blockchain._header_index = self._blockchain._header_index[:10] - result = self._blockchain.GetBlockHash(100) - self.assertEqual(result, None) - self._blockchain._header_index = saved - - # finally test correct retrieval - result = self._blockchain.GetBlockHash(100) - self.assertEqual(result, self._blockchain._header_index[100]) - - def test_GetBlockByHeight(self): - # test correct retrieval - block = self._blockchain.GetBlockByHeight(100) - self.assertEqual(block.GetHashCode().ToString(), self._blockchain.GetBlockHash(100).decode('utf-8')) - - # and also a invalid retrieval - invalid_bc_height = self._blockchain.Height + 1 - block = self._blockchain.GetBlockByHeight(invalid_bc_height) - self.assertEqual(block, None) - - def test_GetAccountState(self): - # test passing an address - addr = "AK2nJJpJr6o664CWJKi1QRXjqeic2zRp8y" - acct = Blockchain.Default().GetAccountState(addr) - acct = acct.ToJson() - self.assertIn('balances', acct.keys()) - - # test failure - addr = "AK2nJJpJr6o664CWJKi1QRXjqeic2zRp81" - acct = Blockchain.Default().GetAccountState(addr) - self.assertIsNone(acct) - - def test_GetHeaderBy(self): - # test correct retrieval with hash - blockheader = self._blockchain.GetHeaderBy("2b1c78633dae7ab81f64362e0828153079a17b018d779d0406491f84c27b086f") - self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) - - # test correct retrieval with 0x hash - blockheader = self._blockchain.GetHeaderBy("0x2b1c78633dae7ab81f64362e0828153079a17b018d779d0406491f84c27b086f") - self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) - - # test correct retrieval with str height - blockheader = self._blockchain.GetHeaderBy("11") - self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) - - # test correct retrieval with int height - blockheader = self._blockchain.GetHeaderBy(11) - self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) - - # test incorrect retrieval - invalid_bc_height = self._blockchain.Height + 1 - block = self._blockchain.GetHeaderBy(invalid_bc_height) - self.assertEqual(block, None) - - def test_ShowAllAssets(self): - assets = Blockchain.Default().ShowAllAssets() - self.assertEqual(len(assets), 2) diff --git a/neo/Implementations/Blockchains/LevelDB/tests/__init__.py b/neo/Implementations/Blockchains/LevelDB/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/neo/Implementations/Blockchains/LevelDB/tests/test_initial_db.py b/neo/Implementations/Blockchains/LevelDB/tests/test_initial_db.py deleted file mode 100644 index d23764d95..000000000 --- a/neo/Implementations/Blockchains/LevelDB/tests/test_initial_db.py +++ /dev/null @@ -1,64 +0,0 @@ -from neo.Utils.NeoTestCase import NeoTestCase -from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain -from neo.Core.Blockchain import Blockchain -from neo.IO.Helper import Helper -from neo.Settings import settings -import shutil -import binascii -import os - - -class LevelDBTest(NeoTestCase): - - LEVELDB_TESTPATH = os.path.join(settings.DATA_DIR_PATH, 'UnitTestChain') - - _blockchain = None - - _genesis = None - - block_one_raw = b'00000000ef1f8f66a16fba100ed760f4ac6aa5a0d0bb8f4a0e92705b106761ef181718b3d0765298ceb5f57de7d2b0dab00ed25be4134706ada2d90adb8b7e3aba323a8e1abd125901000000d11f7a289214bdaff3812db982f3b0089a21a278988efeec6a027b2501fd450140884037dd265cb5f5a54802f53c2c8593b31d5b8a9c0bad4c7e366b153d878989d168080ac36b930036a9eb966b48c70bb41792e698fa021116f27c09643563b840e83ab14404d964a91dbac45f5460e88ad57196b1779478e3475334af8c1b49cd9f0213257895c60b5b92a4800eb32d785cbb39ae1f022528943909fd37deba63403677848bf98cc9dbd8fbfd7f2e4f34471866ea82ca6bffbf0f778b6931483700c17829b4bd066eb04983d3aac0bd46b9c8d03a73a8e714d3119de93cd9522e314054d16853b22014190063f77d9edf6fbccefcf71fffd1234f688823b4e429ae5fa639d0a664c842fbdfcb4d6e21f39d81c23563b92cffa09696d93c95bc4893a6401a43071d00d3e854f7f1f321afa7d5301d36f2195dc1e2643463f34ae637d2b02ae0eb11d4256c507a4f8304cea6396a7fce640f50acb301c2f6336d27717e84f155210209e7fd41dfb5c2f8dc72eb30358ac100ea8c72da18847befe06eade68cebfcb9210327da12b5c40200e9f65569476bbff2218da4f32548ff43b6387ec1416a231ee821034ff5ceeac41acf22cd5ed2da17a6df4dd8358fcb2bfb1a43208ad0feaab2746b21026ce35b29147ad09e4afe4ec4a7319095f08198fa8babbe3c56e970b143528d2221038dddc06ce687677a53d54f096d2591ba2302068cf123c1f2d75c2dddc542557921039dafd8571a641058ccc832c5e2111ea39b09c0bde36050914384f7a48bce9bf92102d02b1873a0863cd042cc717da31cea0d7cf9db32b74d4c72c01b0011503e2e2257ae010000d11f7a2800000000' - block_one_hash = b'0012f8566567a9d7ddf25acb5cf98286c9703297de675d01ba73fbfe6bcb841c' - - @classmethod - def setUpClass(cls): - settings.setup_unittest_net() - Blockchain.DeregisterBlockchain() - cls._blockchain = LevelDBBlockchain(path=cls.LEVELDB_TESTPATH, skip_version_check=True) - Blockchain.RegisterBlockchain(cls._blockchain) - cls._genesis = Blockchain.GenesisBlock() - - @classmethod - def tearDownClass(cls): - cls._blockchain.Dispose() - shutil.rmtree(cls.LEVELDB_TESTPATH) - - def test__initial_state(self): - - self.assertEqual(self._blockchain.CurrentBlockHash, self._genesis.Hash.ToBytes()) - - self.assertEqual(self._blockchain.CurrentHeaderHash, self._genesis.Header.Hash.ToBytes()) - - self.assertEqual(self._blockchain.CurrentHeaderHash, self._genesis.Header.Hash.ToBytes()) - - self.assertEqual(self._blockchain.HeaderHeight, 0) - - self.assertEqual(self._blockchain.Height, 0) - - def test_add_header(self): - hexdata = binascii.unhexlify(self.block_one_raw) - block_one = Helper.AsSerializableWithType(hexdata, 'neo.Core.Block.Block') - - if settings.MAGIC == 56753: - self.assertEqual(self._blockchain.CurrentHeaderHash, b'996e37358dc369912041f966f8c5d8d3a8255ba5dcbd3447f8a82b55db869099') - else: - self.assertEqual(self._blockchain.CurrentHeaderHash, b'd42561e3d30e15be6400b6df2f328e02d2bf6354c41dce433bc57687c82144bf') - - self.assertEqual(self._blockchain.HeaderHeight, 0) - - self._blockchain.AddBlock(block_one) - self.assertEqual(self._blockchain.HeaderHeight, 1) - - def test_sys_block_fees(self): - - block_num = 14103 - fee_should_be = 435 diff --git a/neo/Implementations/Blockchains/LevelDB/tests/test_leveldb.py b/neo/Implementations/Blockchains/LevelDB/tests/test_leveldb.py deleted file mode 100644 index cac3eb902..000000000 --- a/neo/Implementations/Blockchains/LevelDB/tests/test_leveldb.py +++ /dev/null @@ -1,14 +0,0 @@ -from neo.Utils.BlockchainFixtureTestCase import BlockchainFixtureTestCase -from neo.Settings import settings -import os - - -class LevelDBTest(BlockchainFixtureTestCase): - - @classmethod - def leveldb_testpath(cls): - return os.path.join(settings.DATA_DIR_PATH, 'fixtures/test_chain') - - # test need to be updated whenever we change the fixtures - def test_a_initial_setup(self): - self.assertEqual(self._blockchain.Height, 12349) diff --git a/neo/Implementations/Blockchains/__init__.py b/neo/Implementations/Blockchains/__init__.py deleted file mode 100644 index e69de29bb..000000000 From 4d3946924fcc6ecdc7a1aa006af4e0e17bb1dad4 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Sun, 14 Apr 2019 15:35:44 +0200 Subject: [PATCH 03/23] feat, reworked blockchain db layer - still a WIP, tests not yet done - blockchain logic was moved from LevelDBBlockchain to Blockchain class - chain syncs successfully - notification db not yet done --- neo/Core/Blockchain.py | 415 +++++++++++------- neo/Core/Helper.py | 8 +- neo/Network/NodeLeader.py | 4 - neo/Prompt/Commands/Invoke.py | 35 +- neo/Prompt/Commands/SC.py | 2 +- neo/SmartContract/ApplicationEngine.py | 28 +- neo/SmartContract/StateMachine.py | 33 +- neo/SmartContract/StateReader.py | 3 +- neo/Storage/Common/DebugStorage.py | 19 +- neo/Storage/Implementation/DBFactory.py | 57 +-- .../LevelDB/LevelDBClassMethods.py | 49 ++- neo/Storage/Interface/AbstractDBInterface.py | 33 +- neo/Storage/Interface/DBInterface.py | 78 ++-- neo/bin/api_server.py | 3 + neo/bin/prompt.py | 5 +- 15 files changed, 445 insertions(+), 327 deletions(-) diff --git a/neo/Core/Blockchain.py b/neo/Core/Blockchain.py index 04f6a0180..c60f4055a 100644 --- a/neo/Core/Blockchain.py +++ b/neo/Core/Blockchain.py @@ -21,29 +21,30 @@ from neo.Core.State.ContractState import ContractState from neo.Core.State.StorageItem import StorageItem from neo.Core.State.SpentCoinState import SpentCoinState, SpentCoinItem, SpentCoin +from neo.Core.State.AssetState import AssetState from neo.Core.State.ValidatorState import ValidatorState +from neo.Core.IO.BinaryReader import BinaryReader +# from neo.SmartContract.StateMachine import StateMachine from neo.EventHub import events from neo.IO.MemoryStream import StreamManager from neo.logging import log_manager from neo.Settings import settings -from collections import Counter from neo.Core.Fixed8 import Fixed8 from neo.Core.Cryptography.ECCurve import ECDSA from neo.Core.UInt256 import UInt256 +from neo.Core.UInt160 import UInt160 from neo.Core.IO.BinaryWriter import BinaryWriter from neo.SmartContract.Contract import Contract from neo.SmartContract.ApplicationEngine import ApplicationEngine from neo.SmartContract import TriggerType from neo.Storage.Common.DBPrefix import DBPrefix from neo.Storage.Common.CachedScriptTable import CachedScriptTable -from neo.Storage.Interface.DBInterface import DBInterface +from neo.Storage.Interface.DBInterface import DBInterface, DBProperties from neo.VM.OpCode import PUSHF, PUSHT +from prompt_toolkit import prompt +import neo.Storage.Implementation.DBFactory as DBFactory -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from neo.Core.State import AssetState logger = log_manager.getLogger('Blockchain') @@ -71,16 +72,23 @@ class Blockchain: _disposed = False + _verify_blocks = False + _header_index = [] _block_cache = {} _current_block_height = 0 + _stored_header_count = 0 _persisting_block = None + TXProcessed = 0 + BlockSearchTries = 0 + _sysversion = b'schema v.0.6.9' + CACHELIM = 4000 CMISSLIM = 5 LOOPTIME = .1 @@ -89,8 +97,108 @@ class Blockchain: Notify = Events() - def __init__(self, db): + # debug: + _previous_blockid = None + + def __init__(self, db, skip_version_check=False, skip_header_check=False): self._db = db + self._header_index = [] + + self._header_index.append(Blockchain.GenesisBlock().Header.Hash.ToBytes()) + + self.TXProcessed = 0 + version = self._db.get(DBPrefix.SYS_Version) + + if skip_version_check: + self._db.write(DBPrefix.SYS_Version, self._sysversion) + version = self._sysversion + + if version == self._sysversion: # or in the future, if version doesn't equal the current version... + ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) + self._current_block_height = int.from_bytes(ba[-4:], 'little') + + if not skip_header_check: + ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) + current_header_height = int.from_bytes(ba[-4:], 'little') + current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') + + hashes = [] + try: + with self._db.openIter(DBProperties(DBPrefix.IX_HeaderHashList)) as iterator: + for key, value in iterator: + ms = StreamManager.GetStream(value) + reader = BinaryReader(ms) + hlist = reader.Read2000256List() + key = int.from_bytes(key[-4:], 'little') + hashes.append({'k': key, 'v': hlist}) + StreamManager.ReleaseStream(ms) + except Exception as e: + logger.info("Could not get stored header hash list: %s " % e) + self._db.closeIter() + + if len(hashes): + hashes.sort(key=lambda x: x['k']) + genstr = Blockchain.GenesisBlock().Hash.ToBytes() + for hlist in hashes: + + for hash in hlist['v']: + if hash != genstr: + self._header_index.append(hash) + self._stored_header_count += 1 + + if self._stored_header_count == 0: + logger.info("Current stored headers empty, re-creating from stored blocks...") + headers = [] + logger.info('Recreate headers') + with self._db.openIter(DBProperties(DBProperties(DBPrefix.DATA_Block))) as iterator: + for key, value in iterator: + dbhash = bytearray(value)[8:] + headers.append(Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) + + headers.sort(key=lambda h: h.Index) + for h in headers: + if h.Index > 0: + self._header_index.append(h.Hash.ToBytes()) + + if len(headers): + self.OnAddHeader(headers[-1]) + + elif current_header_height > self._stored_header_count: + try: + hash = current_header_hash + targethash = self._header_index[-1] + + newhashes = [] + while hash != targethash: + header = self.GetHeader(hash) + newhashes.insert(0, header) + hash = header.PrevHash.ToBytes() + + self.AddHeaders(newhashes) + except Exception as e: + pass + + elif version is None: + self.Persist(Blockchain.GenesisBlock()) + self._db.write(DBPrefix.SYS_Version, self._sysversion) + else: + logger.error("\n\n") + logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) + logger.warning("You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain.") + + res = prompt("Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> ") + if res == 'continue': + + with self._db.getBatch() as wb: + with self._db.openIter() as iterator: + for key, value in iterator: + wb.delete(key) + + self.Persist(Blockchain.GenesisBlock()) + self._db.write(DBPrefix.SYS_Version, self._sysversion) + + else: + raise Exception("Database schema changed") @staticmethod def StandbyValidators(): @@ -173,6 +281,11 @@ def GenesisBlock() -> Block: [mt, Blockchain.SystemShare(), Blockchain.SystemCoin(), it], True) + def GetDB(self): + if self._db is not None: + return self._db + raise('Database not defined') + @staticmethod def Default() -> 'Blockchain': """ @@ -182,7 +295,7 @@ def Default() -> 'Blockchain': obj: Blockchain based on the configured database backend. """ if Blockchain._instance is None: - Blockchain._instance = Blockchain() + Blockchain._instance = Blockchain(DBFactory.getBlockchainDB()) Blockchain.GenesisBlock().RebuildMerkleRoot() return Blockchain._instance @@ -258,6 +371,7 @@ def AddHeaders(self, headers): if header.Index < count + len(self._header_index): continue + if self._verify_blocks and not header.Verify(): break @@ -285,28 +399,21 @@ def OnAddHeader(self, header): if hHash not in self._header_index: self._header_index.append(hHash) - wb = self._db.getBatch() - while header.Index - 2000 >= self._stored_header_count: - ms = StreamManager.GetStream() - w = BinaryWriter(ms) - headers_to_write = self._header_index[self._stored_header_count:self._stored_header_count + 2000] - w.Write2000256List(headers_to_write) - out = ms.ToArray() - StreamManager.ReleaseStream(ms) - wb.put(DBPrefix.IX_HeaderHashList + self._stored_header_count.to_bytes(4, 'little'), out) - - self._stored_header_count += 2000 + with self._db.getBatch() as wb: + while header.Index - 2000 >= self._stored_header_count: + ms = StreamManager.GetStream() + w = BinaryWriter(ms) + headers_to_write = self._header_index[self._stored_header_count:self._stored_header_count + 2000] + w.Write2000256List(headers_to_write) + out = ms.ToArray() + StreamManager.ReleaseStream(ms) + wb.put(DBPrefix.IX_HeaderHashList + self._stored_header_count.to_bytes(4, 'little'), out) - # clean up - self._db.dropBatch() + self._stored_header_count += 2000 - wb = self._db.getBatch() - if self._db.get(DBPrefix.DATA_Block + hHash) is None: - wb.put(DBPrefix.DATA_Block + hHash, bytes(8) + header.ToArray()) - wb.put(DBPrefix.SYS_CurrentHeader, hHash + header.Index.to_bytes(4, 'little')) - - # clean up - self._db.dropBatch() + if self._db.get(DBPrefix.DATA_Block + hHash) is None: + wb.put(DBPrefix.DATA_Block + hHash, bytes(8) + header.ToArray()) + wb.put(DBPrefix.SYS_CurrentHeader, hHash + header.Index.to_bytes(4, 'little')) @property def BlockRequests(self): @@ -442,7 +549,6 @@ def GetHeader(self, hash): logger.info("OTHER ERRROR %s " % e) return None - # TODO refactor function def GetHeaderBy(self, height_or_hash): hash = None @@ -857,167 +963,138 @@ def Persist(self, block): assets = DBInterface(self._db, DBPrefix.ST_Asset, AssetState) validators = DBInterface(self._db, DBPrefix.ST_Validator, ValidatorState) contracts = DBInterface(self._db, DBPrefix.ST_Contract, ContractState) - storages = DBInterface(self._db, DBPrefix.ST_Storage, StorageItem) amount_sysfee = self.GetSysFeeAmount(block.PrevHash) + block.TotalFees().value amount_sysfee_bytes = amount_sysfee.to_bytes(8, 'little') - to_dispatch = [] + with self._db.getBatch() as wb: + wb.put(DBPrefix.DATA_Block + block.Hash.ToBytes(), amount_sysfee_bytes + block.Trim()) - wb = self._db.getBatch() + for tx in block.Transactions: - wb.put(DBPrefix.DATA_Block + block.Hash.ToBytes(), amount_sysfee_bytes + block.Trim()) + wb.put(DBPrefix.DATA_Transaction + tx.Hash.ToBytes(), block.IndexBytes() + tx.ToArray()) - for tx in block.Transactions: + # go through all outputs and add unspent coins to them - wb.put(DBPrefix.DATA_Transaction + tx.Hash.ToBytes(), block.IndexBytes() + tx.ToArray()) + unspentcoinstate = UnspentCoinState.FromTXOutputsConfirmed(tx.outputs) + unspentcoins.Add(tx.Hash.ToBytes(), unspentcoinstate) - # go through all outputs and add unspent coins to them + # go through all the accounts in the tx outputs + for output in tx.outputs: + account = accounts.GetAndChange(output.AddressBytes, AccountState(output.ScriptHash)) - unspentcoinstate = UnspentCoinState.FromTXOutputsConfirmed(tx.outputs) - unspentcoins.Add(tx.Hash.ToBytes(), unspentcoinstate) - - # go through all the accounts in the tx outputs - for output in tx.outputs: - account = accounts.GetAndChange(output.AddressBytes, AccountState(output.ScriptHash)) - - if account.HasBalance(output.AssetId): - account.AddToBalance(output.AssetId, output.Value) + if account.HasBalance(output.AssetId): + account.AddToBalance(output.AssetId, output.Value) + else: + account.SetBalanceFor(output.AssetId, output.Value) + + # go through all tx inputs + unique_tx_input_hashes = [] + for input in tx.inputs: + if input.PrevHash not in unique_tx_input_hashes: + unique_tx_input_hashes.append(input.PrevHash) + + for txhash in unique_tx_input_hashes: + prevTx, height = self.GetTransaction(txhash.ToBytes()) + coin_refs_by_hash = [coinref for coinref in tx.inputs if + coinref.PrevHash.ToBytes() == txhash.ToBytes()] + for input in coin_refs_by_hash: + + uns = unspentcoins.GetAndChange(input.PrevHash.ToBytes()) + uns.OrEqValueForItemAt(input.PrevIndex, CoinState.Spent) + + if prevTx.outputs[input.PrevIndex].AssetId.ToBytes() == Blockchain.SystemShare().Hash.ToBytes(): + sc = spentcoins.GetAndChange(input.PrevHash.ToBytes(), + SpentCoinState(input.PrevHash, height, [])) + sc.Items.append(SpentCoinItem(input.PrevIndex, block.Index)) + + output = prevTx.outputs[input.PrevIndex] + acct = accounts.GetAndChange(prevTx.outputs[input.PrevIndex].AddressBytes, + AccountState(output.ScriptHash)) + assetid = prevTx.outputs[input.PrevIndex].AssetId + acct.SubtractFromBalance(assetid, prevTx.outputs[input.PrevIndex].Value) + + # do a whole lotta stuff with tx here... + if tx.Type == TransactionType.RegisterTransaction: + asset = AssetState(tx.Hash, tx.AssetType, tx.Name, tx.Amount, + Fixed8(0), tx.Precision, Fixed8(0), + Fixed8(0), UInt160(data=bytearray(20)), + tx.Owner, tx.Admin, tx.Admin, + block.Index + 2 * 2000000, False) + + assets.Add(tx.Hash.ToBytes(), asset) + + elif tx.Type == TransactionType.IssueTransaction: + + txresults = [result for result in tx.GetTransactionResults() if result.Amount.value < 0] + for result in txresults: + asset = assets.GetAndChange(result.AssetId.ToBytes()) + asset.Available = asset.Available - result.Amount + + elif tx.Type == TransactionType.ClaimTransaction: + for input in tx.Claims: + + sc = spentcoins.TryGet(input.PrevHash.ToBytes()) + if sc and sc.HasIndex(input.PrevIndex): + sc.DeleteIndex(input.PrevIndex) + spentcoins.GetAndChange(input.PrevHash.ToBytes()) + + elif tx.Type == TransactionType.EnrollmentTransaction: + newvalidator = ValidatorState(pub_key=tx.PublicKey) + validators.GetAndChange(tx.PublicKey.ToBytes(), newvalidator) + elif tx.Type == TransactionType.StateTransaction: + # @TODO Implement persistence for State Descriptors + pass + + elif tx.Type == TransactionType.PublishTransaction: + contract = ContractState(tx.Code, tx.NeedStorage, tx.Name, tx.CodeVersion, + tx.Author, tx.Email, tx.Description) + + contracts.GetAndChange(tx.Code.ScriptHash().ToBytes(), contract) + elif tx.Type == TransactionType.InvocationTransaction: + ApplicationEngine.Run(tx.Script, tx, False, tx.Gas, False, wb) else: - account.SetBalanceFor(output.AssetId, output.Value) - - # go through all tx inputs - unique_tx_input_hashes = [] - for input in tx.inputs: - if input.PrevHash not in unique_tx_input_hashes: - unique_tx_input_hashes.append(input.PrevHash) - - for txhash in unique_tx_input_hashes: - prevTx, height = self.GetTransaction(txhash.ToBytes()) - coin_refs_by_hash = [coinref for coinref in tx.inputs if - coinref.PrevHash.ToBytes() == txhash.ToBytes()] - for input in coin_refs_by_hash: - - uns = unspentcoins.GetAndChange(input.PrevHash.ToBytes()) - uns.OrEqValueForItemAt(input.PrevIndex, CoinState.Spent) - - if prevTx.outputs[input.PrevIndex].AssetId.ToBytes() == Blockchain.SystemShare().Hash.ToBytes(): - sc = spentcoins.GetAndChange(input.PrevHash.ToBytes(), - SpentCoinState(input.PrevHash, height, [])) - sc.Items.append(SpentCoinItem(input.PrevIndex, block.Index)) - - output = prevTx.outputs[input.PrevIndex] - acct = accounts.GetAndChange(prevTx.outputs[input.PrevIndex].AddressBytes, - AccountState(output.ScriptHash)) - assetid = prevTx.outputs[input.PrevIndex].AssetId - acct.SubtractFromBalance(assetid, prevTx.outputs[input.PrevIndex].Value) - - # do a whole lotta stuff with tx here... - if tx.Type == TransactionType.RegisterTransaction: - asset = AssetState(tx.Hash, tx.AssetType, tx.Name, tx.Amount, - Fixed8(0), tx.Precision, Fixed8(0), Fixed8(0), UInt160(data=bytearray(20)), - tx.Owner, tx.Admin, tx.Admin, block.Index + 2 * 2000000, False) - - assets.Add(tx.Hash.ToBytes(), asset) - - elif tx.Type == TransactionType.IssueTransaction: - - txresults = [result for result in tx.GetTransactionResults() if result.Amount.value < 0] - for result in txresults: - asset = assets.GetAndChange(result.AssetId.ToBytes()) - asset.Available = asset.Available - result.Amount - - elif tx.Type == TransactionType.ClaimTransaction: - for input in tx.Claims: - - sc = spentcoins.TryGet(input.PrevHash.ToBytes()) - if sc and sc.HasIndex(input.PrevIndex): - sc.DeleteIndex(input.PrevIndex) - spentcoins.GetAndChange(input.PrevHash.ToBytes()) - - elif tx.Type == TransactionType.EnrollmentTransaction: - newvalidator = ValidatorState(pub_key=tx.PublicKey) - validators.GetAndChange(tx.PublicKey.ToBytes(), newvalidator) - elif tx.Type == TransactionType.StateTransaction: - # @TODO Implement persistence for State Descriptors - pass - - elif tx.Type == TransactionType.PublishTransaction: - contract = ContractState(tx.Code, tx.NeedStorage, tx.Name, tx.CodeVersion, - tx.Author, tx.Email, tx.Description) - - contracts.GetAndChange(tx.Code.ScriptHash().ToBytes(), contract) - elif tx.Type == TransactionType.InvocationTransaction: - from neo.SmartContract.StateMachine import StateMachine - - script_table = CachedScriptTable(contracts) - service = StateMachine(accounts, validators, assets, contracts, storages, wb) - - engine = ApplicationEngine( - trigger_type=TriggerType.Application, - container=tx, - table=script_table, - service=service, - gas=tx.Gas, - testMode=False - ) - engine.LoadScript(tx.Script) + if tx.Type != b'\x00' and tx.Type != 128: + logger.info("TX Not Found %s " % tx.Type) - try: - success = engine.Execute() - service.ExecutionCompleted(engine, success) + # do save all the accounts, unspent, coins, validators, assets, etc + # now sawe the current sys block - except Exception as e: - service.ExecutionCompleted(engine, False, e) - - to_dispatch = to_dispatch + service.events_to_dispatch - else: - - if tx.Type != b'\x00' and tx.Type != 128: - logger.info("TX Not Found %s " % tx.Type) - - # do save all the accounts, unspent, coins, validators, assets, etc - # now sawe the current sys block + # filter out accounts to delete then commit + for key, account in accounts.Current.items(): + if not account.IsFrozen and len(account.Votes) == 0 and account.AllBalancesZeroOrLess(): + accounts.Remove(key) - # filter out accounts to delete then commit - for key, account in accounts.Current.items(): - if not account.IsFrozen and len(account.Votes) == 0 and account.AllBalancesZeroOrLess(): - accounts.Remove(key) + accounts.Commit(wb) - accounts.Commit() + # filte out unspent coins to delete then commit + for key, unspent in unspentcoins.Current.items(): + if unspent.IsAllSpent: + unspentcoins.Remove(key) + unspentcoins.Commit(wb) - # filte out unspent coins to delete then commit - for key, unspent in unspentcoins.Current.items(): - if unspent.IsAllSpent: - unspentcoins.Remove(key) - unspentcoins.Commit() + # filter out spent coins to delete then commit to db + for key, spent in spentcoins.Current.items(): + if len(spent.Items) == 0: + spentcoins.Remove(key) - # filter out spent coins to delete then commit to db - for key, spent in spentcoins.Current.items(): - if len(spent.Items) == 0: - spentcoins.Remove(key) + spentcoins.Commit(wb) + validators.Commit(wb) + assets.Commit(wb) + contracts.Commit(wb) - spentcoins.Commit() - validators.Commit() - assets.Commit() - contracts.Commit() + wb.put(DBPrefix.SYS_CurrentBlock, block.Hash.ToBytes() + block.IndexBytes()) + self._current_block_height = block.Index + self._persisting_block = None - wb.put(DBPrefix.SYS_CurrentBlock, block.Hash.ToBytes() + block.IndexBytes()) - self._current_block_height = block.Index - self._persisting_block = None + self.TXProcessed += len(block.Transactions) - self.TXProcessed += len(block.Transactions) - - self._db.dropBatch() - - for event in to_dispatch: - events.emit(event.event_type, event) + # logger.info('done with block %d ' % block.Index) def PersistBlocks(self, limit=None): ctr = 0 if not self._paused: - # TODO while not self._disposed: if len(self._header_index) <= self._current_block_height + 1: @@ -1033,7 +1110,15 @@ def PersistBlocks(self, limit=None): block = self._block_cache[hash] try: + # logger.info('persist block: %d', block.Index) + if self._previous_blockid is not None and self._previous_blockid + 1 != block.Index: + logger.info('block jump prev: %d current: %d') + if self._previous_blockid is None: + self._previous_blockid = block.Index + self.Persist(block) + + self._previous_blockid = block.Index del self._block_cache[hash] except Exception as e: logger.info(f"Could not persist block {block.Index} reason: {e}") diff --git a/neo/Core/Helper.py b/neo/Core/Helper.py index 44111a806..dde2ad2e3 100644 --- a/neo/Core/Helper.py +++ b/neo/Core/Helper.py @@ -1,9 +1,9 @@ from base58 import b58decode import binascii from neo.Blockchain import GetBlockchain, GetStateReader -from neo.Implementations.Blockchains.LevelDB.CachedScriptTable import CachedScriptTable -from neo.Implementations.Blockchains.LevelDB.DBCollection import DBCollection -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix +from neo.Storage.Interface.DBInterface import DBInterface +from neo.Storage.Common.CachedScriptTable import CachedScriptTable +from neo.Storage.Common.DBPrefix import DBPrefix from neo.Core.State.ContractState import ContractState from neo.Core.State.AssetState import AssetState from neo.Core.Cryptography.Crypto import Crypto @@ -201,7 +201,7 @@ def VerifyScripts(verifiable): return False state_reader = GetStateReader() - script_table = CachedScriptTable(DBCollection(blockchain._db, DBPrefix.ST_Contract, ContractState)) + script_table = CachedScriptTable(DBInterface(blockchain._db, DBPrefix.ST_Contract, ContractState)) engine = ApplicationEngine(TriggerType.Verification, verifiable, script_table, state_reader, Fixed8.Zero()) engine.LoadScript(verification) diff --git a/neo/Network/NodeLeader.py b/neo/Network/NodeLeader.py index 2332ca0a7..74e5ddcf3 100644 --- a/neo/Network/NodeLeader.py +++ b/neo/Network/NodeLeader.py @@ -3,7 +3,6 @@ from typing import List from neo.Core.Block import Block from neo.Core.Blockchain import Blockchain as BC -from neo.Implementations.Blockchains.LevelDB.TestLevelDBBlockchain import TestLevelDBBlockchain from neo.Core.TX.Transaction import Transaction from neo.Core.TX.MinerTransaction import MinerTransaction from neo.Network.NeoNode import NeoNode, HEARTBEAT_BLOCKS @@ -542,9 +541,6 @@ def RelayDirectly(self, inventory): relayed |= peer.Relay(inventory) if len(self.Peers) == 0: - if type(BC.Default()) is TestLevelDBBlockchain: - # mock a true result for tests - return True logger.info("no connected peers") diff --git a/neo/Prompt/Commands/Invoke.py b/neo/Prompt/Commands/Invoke.py index dfb1081ab..e8cba894c 100644 --- a/neo/Prompt/Commands/Invoke.py +++ b/neo/Prompt/Commands/Invoke.py @@ -5,10 +5,11 @@ from neo.VM.InteropService import InteropInterface from neo.Network.NodeLeader import NodeLeader from neo.Prompt import Utils as PromptUtils -from neo.Implementations.Blockchains.LevelDB.DBCollection import DBCollection -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix -from neo.Implementations.Blockchains.LevelDB.CachedScriptTable import CachedScriptTable -from neo.Implementations.Blockchains.LevelDB.DebugStorage import DebugStorage +from neo.Storage.Interface.DBInterface import DBInterface +from neo.Storage.Common.CachedScriptTable import CachedScriptTable +from neo.Storage.Common.DBPrefix import DBPrefix +from neo.Storage.Common.DebugStorage import DebugStorage + from neo.Core.State.AccountState import AccountState from neo.Core.State.AssetState import AssetState @@ -280,11 +281,11 @@ def test_invoke(script, wallet, outputs, withdrawal_tx=None, bc = GetBlockchain() - accounts = DBCollection(bc._db, DBPrefix.ST_Account, AccountState) - assets = DBCollection(bc._db, DBPrefix.ST_Asset, AssetState) - validators = DBCollection(bc._db, DBPrefix.ST_Validator, ValidatorState) - contracts = DBCollection(bc._db, DBPrefix.ST_Contract, ContractState) - storages = DBCollection(bc._db, DBPrefix.ST_Storage, StorageItem) + accounts = DBInterface(bc._db, DBPrefix.ST_Account, AccountState) + assets = DBInterface(bc._db, DBPrefix.ST_Asset, AssetState) + validators = DBInterface(bc._db, DBPrefix.ST_Validator, ValidatorState) + contracts = DBInterface(bc._db, DBPrefix.ST_Contract, ContractState) + storages = DBInterface(bc._db, DBPrefix.ST_Storage, StorageItem) # if we are using a withdrawal tx, don't recreate the invocation tx # also, we don't want to reset the inputs / outputs @@ -303,7 +304,7 @@ def test_invoke(script, wallet, outputs, withdrawal_tx=None, tx.Attributes = [] if invoke_attrs is None else deepcopy(invoke_attrs) script_table = CachedScriptTable(contracts) - service = StateMachine(accounts, validators, assets, contracts, storages, None) + service = StateMachine(accounts, validators, assets, contracts, storages, None, bc) if len(outputs) < 1: contract = wallet.GetDefaultContract() @@ -412,15 +413,15 @@ def test_deploy_and_invoke(deploy_script, invoke_args, wallet, debug_map=None, invoke_attrs=None, owners=None): bc = GetBlockchain() - accounts = DBCollection(bc._db, DBPrefix.ST_Account, AccountState) - assets = DBCollection(bc._db, DBPrefix.ST_Asset, AssetState) - validators = DBCollection(bc._db, DBPrefix.ST_Validator, ValidatorState) - contracts = DBCollection(bc._db, DBPrefix.ST_Contract, ContractState) - storages = DBCollection(bc._db, DBPrefix.ST_Storage, StorageItem) + accounts = DBInterface(bc._db, DBPrefix.ST_Account, AccountState) + assets = DBInterface(bc._db, DBPrefix.ST_Asset, AssetState) + validators = DBInterface(bc._db, DBPrefix.ST_Validator, ValidatorState) + contracts = DBInterface(bc._db, DBPrefix.ST_Contract, ContractState) + storages = DBInterface(bc._db, DBPrefix.ST_Storage, StorageItem) if settings.USE_DEBUG_STORAGE: debug_storage = DebugStorage.instance() - storages = DBCollection(debug_storage.db, DBPrefix.ST_Storage, StorageItem) + storages = DBInterface(debug_storage.db, DBPrefix.ST_Storage, StorageItem) storages.DebugStorage = True dtx = InvocationTransaction() @@ -443,7 +444,7 @@ def test_deploy_and_invoke(deploy_script, invoke_args, wallet, dtx.scripts = context.GetScripts() script_table = CachedScriptTable(contracts) - service = StateMachine(accounts, validators, assets, contracts, storages, None) + service = StateMachine(accounts, validators, assets, contracts, storages, None, bc) contract = wallet.GetDefaultContract() dtx.Attributes = [TransactionAttribute(usage=TransactionAttributeUsage.Script, data=Crypto.ToScriptHash(contract.Script, unhex=False))] diff --git a/neo/Prompt/Commands/SC.py b/neo/Prompt/Commands/SC.py index c15cd6298..cb9a6eefd 100644 --- a/neo/Prompt/Commands/SC.py +++ b/neo/Prompt/Commands/SC.py @@ -10,7 +10,7 @@ from neo.SmartContract.ContractParameterType import ContractParameterType from prompt_toolkit import prompt from neo.Core.Fixed8 import Fixed8 -from neo.Implementations.Blockchains.LevelDB.DebugStorage import DebugStorage +from neo.Storage.Common.DebugStorage import DebugStorage from distutils import util from neo.Settings import settings from neo.Prompt.PromptPrinter import prompt_print as print diff --git a/neo/SmartContract/ApplicationEngine.py b/neo/SmartContract/ApplicationEngine.py index 785e43d13..b51aaadf5 100644 --- a/neo/SmartContract/ApplicationEngine.py +++ b/neo/SmartContract/ApplicationEngine.py @@ -11,10 +11,11 @@ from neo.Core.State.ContractState import ContractState from neo.Core.State.StorageItem import StorageItem from neo.Core.State.ValidatorState import ValidatorState -from neo.Implementations.Blockchains.LevelDB.CachedScriptTable import CachedScriptTable -from neo.Implementations.Blockchains.LevelDB.DBCollection import DBCollection +from neo.Storage.Common.CachedScriptTable import CachedScriptTable +from neo.Storage.Interface.DBInterface import DBInterface + # used for ApplicationEngine.Run -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix +from neo.Storage.Common.DBPrefix import DBPrefix from neo.Settings import settings from neo.SmartContract import TriggerType from neo.VM import OpCode @@ -264,7 +265,7 @@ def GetPriceForSysCall(self): return 1 @staticmethod - def Run(script, container=None, exit_on_error=False, gas=Fixed8.Zero(), test_mode=True): + def Run(script, container=None, exit_on_error=False, gas=Fixed8.Zero(), test_mode=True, wb=None): """ Runs a script in a test invoke environment @@ -282,14 +283,14 @@ def Run(script, container=None, exit_on_error=False, gas=Fixed8.Zero(), test_mod bc = Blockchain.Default() - accounts = DBCollection(bc._db, DBPrefix.ST_Account, AccountState) - assets = DBCollection(bc._db, DBPrefix.ST_Asset, AssetState) - validators = DBCollection(bc._db, DBPrefix.ST_Validator, ValidatorState) - contracts = DBCollection(bc._db, DBPrefix.ST_Contract, ContractState) - storages = DBCollection(bc._db, DBPrefix.ST_Storage, StorageItem) + accounts = DBInterface(bc._db, DBPrefix.ST_Account, AccountState) + assets = DBInterface(bc._db, DBPrefix.ST_Asset, AssetState) + validators = DBInterface(bc._db, DBPrefix.ST_Validator, ValidatorState) + contracts = DBInterface(bc._db, DBPrefix.ST_Contract, ContractState) + storages = DBInterface(bc._db, DBPrefix.ST_Storage, StorageItem) script_table = CachedScriptTable(contracts) - service = StateMachine(accounts, validators, assets, contracts, storages, None) + service = StateMachine(accounts, validators, assets, contracts, storages, wb, bc) engine = ApplicationEngine( trigger_type=TriggerType.Application, @@ -301,9 +302,12 @@ def Run(script, container=None, exit_on_error=False, gas=Fixed8.Zero(), test_mod exit_on_error=exit_on_error ) - script = binascii.unhexlify(script) + try: + _script = binascii.unhexlify(script) + except: + _script = script - engine.LoadScript(script) + engine.LoadScript(_script) try: success = engine.Execute() diff --git a/neo/SmartContract/StateMachine.py b/neo/SmartContract/StateMachine.py index d6dea9af7..b5f2764ce 100644 --- a/neo/SmartContract/StateMachine.py +++ b/neo/SmartContract/StateMachine.py @@ -27,16 +27,17 @@ class StateMachine(StateReader): _contracts_created = {} - def __init__(self, accounts, validators, assets, contracts, storages, chain): + def __init__(self, accounts, validators, assets, contracts, storages, wb, chain): super(StateMachine, self).__init__() - self._chain = chain self._accounts = accounts self._validators = validators self._assets = assets self._contracts = contracts self._storages = storages + self._wb = wb + self._chain = chain self._accounts.MarkForReset() self._validators.MarkForReset() @@ -84,11 +85,11 @@ def ExecutionCompleted(self, engine, success, error=None): super(StateMachine, self).ExecutionCompleted(engine, success, error) def Commit(self): - self._accounts.Commit(False) - self._validators.Commit(False) - self._assets.Commit(False) - self._contracts.Commit(False) - self._storages.Commit(False) + self._accounts.Commit(self._wb, False) + self._validators.Commit(self._wb, False) + self._assets.Commit(self._wb, False) + self._contracts.Commit(self._wb, False) + self._storages.Commit(self._wb, False) def ResetState(self): self._accounts.Reset() @@ -99,7 +100,7 @@ def ResetState(self): def TestCommit(self): if self._storages.DebugStorage: - self._storages.Commit(False) + self._storages.Commit(self._wb, False) def Deprecated_Method(self, engine): logger.debug("Method No Longer operational") @@ -163,7 +164,7 @@ def Asset_Create(self, engine: ExecutionEngine): asset_id=tx.Hash, asset_type=asset_type, name=name, amount=amount, available=Fixed8.Zero(), precision=precision, fee_mode=0, fee=Fixed8.Zero(), fee_addr=UInt160(), owner=owner, admin=admin, issuer=issuer, - expiration=self._chain.Default().Height + 1 + 2000000, is_frozen=False + expiration=self._chain.Height + 1 + 2000000, is_frozen=False ) asset = self._assets.ReplaceOrAdd(tx.Hash.ToBytes(), new_asset) @@ -186,8 +187,8 @@ def Asset_Renew(self, engine: ExecutionEngine): asset = self._assets.GetAndChange(current_asset.AssetId.ToBytes()) - if asset.Expiration < self._chain.Default().Height + 1: - asset.Expiration = self._chain.Default().Height + 1 + if asset.Expiration < self._chain.Height + 1: + asset.Expiration = self._chain.Height + 1 try: @@ -256,7 +257,7 @@ def Contract_Create(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_CREATED, ContractParameter(ContractParameterType.InteropInterface, contract), - hash, self._chain.Default().Height + 1, + hash, self._chain.Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) return True @@ -324,7 +325,7 @@ def Contract_Migrate(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_MIGRATED, ContractParameter(ContractParameterType.InteropInterface, contract), - hash, self._chain.Default().Height + 1, + hash, self._chain.Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) @@ -364,7 +365,7 @@ def Contract_Destroy(self, engine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_DESTROY, ContractParameter(ContractParameterType.InteropInterface, contract), - hash, self._chain.Default().Height + 1, + hash, self._chain.Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) return True @@ -405,7 +406,7 @@ def Storage_Put(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.STORAGE_PUT, ContractParameter(ContractParameterType.String, '%s -> %s' % (keystr, valStr)), - context.ScriptHash, self._chain.Default().Height + 1, + context.ScriptHash, self._chain.Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) @@ -427,7 +428,7 @@ def Storage_Delete(self, engine: ExecutionEngine): keystr = Crypto.ToAddress(UInt160(data=key)) self.events_to_dispatch.append(SmartContractEvent(SmartContractEvent.STORAGE_DELETE, ContractParameter(ContractParameterType.String, keystr), - context.ScriptHash, self._chain.Default().Height + 1, + context.ScriptHash, self._chain.Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) diff --git a/neo/SmartContract/StateReader.py b/neo/SmartContract/StateReader.py index 78be62c7c..d125cd7c5 100644 --- a/neo/SmartContract/StateReader.py +++ b/neo/SmartContract/StateReader.py @@ -21,7 +21,8 @@ from neo.SmartContract.Iterable.Wrapper import ArrayWrapper, MapWrapper from neo.SmartContract.Iterable import KeysWrapper, ValuesWrapper from neo.SmartContract.Iterable.ConcatenatedEnumerator import ConcatenatedEnumerator -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix +from neo.Storage.Common.DBPrefix import DBPrefix + from neo.Core.State.ContractState import ContractState from neo.Core.State.AccountState import AccountState from neo.Core.State.AssetState import AssetState diff --git a/neo/Storage/Common/DebugStorage.py b/neo/Storage/Common/DebugStorage.py index 19014e00d..7fc8b105b 100644 --- a/neo/Storage/Common/DebugStorage.py +++ b/neo/Storage/Common/DebugStorage.py @@ -1,7 +1,7 @@ -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix from neo.Blockchain import GetBlockchain -import plyvel -from neo.Settings import settings +from neo.Storage.Common.DBPrefix import DBPrefix +import neo.Storage.Implementation.DBFactory as DBFactory +from neo.Storage.Interface.DBInterface import DBProperties from neo.logging import log_manager logger = log_manager.getLogger('db') @@ -15,19 +15,15 @@ def db(self): return self._db def reset(self): - for key in self._db.iterator(prefix=DBPrefix.ST_Storage, include_value=False): + for key in self._db.openIter( + DBProperties(prefix=DBPrefix.ST_Storage, include_value=False)): self._db.delete(key) - def clone_from_live(self): - clone_db = GetBlockchain()._db.snapshot() - for key, value in clone_db.iterator(prefix=DBPrefix.ST_Storage, include_value=True): - self._db.put(key, value) - def __init__(self): try: - # TODO_MERL: generic db support - self._db = plyvel.DB(settings.debug_storage_leveldb_path, create_if_missing=True) + self._db = GetBlockchain().Default().GetDB().cloneDatabase( + DBFactory.getDebugStorageDB()) except Exception as e: logger.info("DEBUG leveldb unavailable, you may already be running this process: %s " % e) raise Exception('DEBUG Leveldb Unavailable %s ' % e) @@ -36,5 +32,4 @@ def __init__(self): def instance(): if not DebugStorage.__instance: DebugStorage.__instance = DebugStorage() - DebugStorage.__instance.clone_from_live() return DebugStorage.__instance diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index bd1333cc7..637d071f6 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -17,6 +17,7 @@ print('props ', settings.database_properties()) DATABASE_PROPS = settings.database_properties() + _blockchain_db_instance = None _notif_db_instance = None @@ -25,6 +26,9 @@ def getBlockchainDB(): + logger.info('Creating BlockchainDB') + BlockchainDB = _dbFactory(BC_CONST, DATABASE_PROPS[BC_CONST]) + _blockchain_db_instance = BlockchainDB(DATABASE_PROPS[BC_CONST]['path']) return _blockchain_db_instance @@ -33,49 +37,34 @@ def getNotificationDB(): def getDebugStorageDB(): + logger.info('Creating DebugDB') + DebugStorageDB = _dbFactory(DEBUG_CONST, DATABASE_PROPS[DEBUG_CONST]) + _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]['path']) return _debug_db_instance def _dbFactory(dbType, properties): - if dbType == 'blockchain': - if properties['backend'] == 'leveldb': - - # import what's needed - import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions - - methods = [x for x in dir(functions) if not x.startswith('__')] - - # build attributes dict - attributes = {methods[i]: getattr( - functions, methods[i]) for i in range(0, len(methods))} - - # add __init__ method - attributes['__init__'] = attributes.pop(functions._init_method) - - print(attributes) - - return type( - properties['backend'].title()+'DBImpl'+dbType.title(), - (AbstractDBInterface,), - attributes) - - if dbType == 'notification': - raise Exception('Not yet implemented') - - if dbType == 'debug': - raise Exception('Not yet implemented') - + if properties['backend'] == 'leveldb': -BlockchainDB = _dbFactory(BC_CONST, DATABASE_PROPS[BC_CONST]) + # import what's needed + import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions -# NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) + methods = [x for x in dir(functions) if not x.startswith('__')] -# DebugStorageDB = _dbFactory(DEBUG_CONST, DATABASE_PROPS[DEBUG_CONST]) + # build attributes dict + attributes = {methods[i]: getattr( + functions, methods[i]) for i in range(0, len(methods))} + # add __init__ method + attributes['__init__'] = attributes.pop(functions._init_method) -_blockchain_db_instance = BlockchainDB(DATABASE_PROPS[BC_CONST]['path']) + # print(attributes) -# _notif_db_instance = NotificationDB(DATABASE_PROPS[NOTIF_CONST]) + return type( + properties['backend'].title()+'DBImpl'+dbType.title(), + (AbstractDBInterface,), + attributes) -# _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]) + # NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) + # _notif_db_instance = NotificationDB(DATABASE_PROPS[NOTIF_CONST]) diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py index 4be62de6f..e72a27eb1 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py @@ -1,6 +1,14 @@ import plyvel +import threading + +from contextlib import contextmanager + +from neo.Core.Blockchain import Blockchain +from neo.Storage.Common.DBPrefix import DBPrefix +from neo.Storage.Interface.DBInterface import DBProperties from neo.logging import log_manager + logger = log_manager.getLogger('LevelDB') """Document me""" @@ -17,6 +25,8 @@ _batch = None +_lock = threading.Lock() + @property def Path(self): @@ -26,7 +36,6 @@ def Path(self): def _db_init(self, path): try: self._path = path - print('path:::: ', path) self._db = plyvel.DB(path, create_if_missing=True) logger.info("Created Blockchain DB at %s " % self._path) except Exception as e: @@ -58,36 +67,36 @@ def deleteBatch(self, batch: dict): wb.delete(key) +def cloneDatabase(self, clone_db): + db_snapshot = self.createSnapshot() + for key, value in db_snapshot.iterator(prefix=DBPrefix.ST_Storage, include_value=True): + clone_db.write(key, value) + return clone_db + + def createSnapshot(self): self._snapshot = self._db.snapshot() return self._snapshot -def dropSnapshot(self): - self._snapshot.close() - self._snapshot = None - - -def openIter(self, properties, start=None, end=None): +@contextmanager +def openIter(self, properties): # TODO start implement start and end self._iter = self._db.iterator( - properties.prefix, - properties.include_value) - return self._iter + prefix=properties.prefix, + include_value=properties.include_value, + include_key=properties.include_key) + yield self._iter + self._iter.close() +@contextmanager def getBatch(self): - self._batch = self._db.write_batch() - - -def dropBatch(self): - self._batch = None - - -def closeIter(self): - self._iter.close() - self._iter = None + with _lock: + self._batch = self._db.write_batch() + yield self._batch + self._batch.write() def closeDB(self): diff --git a/neo/Storage/Interface/AbstractDBInterface.py b/neo/Storage/Interface/AbstractDBInterface.py index c7b90e6df..e1e6f3960 100644 --- a/neo/Storage/Interface/AbstractDBInterface.py +++ b/neo/Storage/Interface/AbstractDBInterface.py @@ -4,22 +4,41 @@ class AbstractDBInterface(ABC): @abstractmethod - def write(self, key, value): raise NotImplementedError + def write(self, key, value): + raise NotImplementedError @abstractmethod - def writeBatch(self, batch): raise NotImplementedError + def writeBatch(self, batch): + raise NotImplementedError @abstractmethod - def get(self, key): raise NotImplementedError + def get(self, key): + raise NotImplementedError @abstractmethod - def createSnapshot(self): raise NotImplementedError + def delete(self, key): + raise NotImplementedError @abstractmethod - def dropSnapshot(self, snapshot): raise NotImplementedError + def deleteBatch(self, batch: dict): + raise NotImplementedError @abstractmethod - def openIter(self, properties, start=None, end=None): raise NotImplementedError + def cloneDatabase(self, clone_db): + raise NotImplementedError @abstractmethod - def closeIter(self): raise NotImplementedError + def createSnapshot(self): + raise NotImplementedError + + @abstractmethod + def openIter(self, properties): + raise NotImplementedError + + @abstractmethod + def getBatch(self): + raise NotImplementedError + + @abstractmethod + def closeDB(self): + raise NotImplementedError diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index b205eff41..ba3ae320b 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -2,7 +2,7 @@ from neo.SmartContract.Iterable import EnumeratorBase from neo.logging import log_manager -logger = log_manager.getLogger('DBCollection') +logger = log_manager.getLogger('DBInterface') class DBProperties: @@ -11,7 +11,7 @@ class DBProperties: include_value = None include_key = None - def __init__(self, prefix, include_value=False, include_key=True): + def __init__(self, prefix, include_value=True, include_key=True): self.prefix = prefix self.include_value = include_value self.include_key = include_key @@ -54,6 +54,19 @@ def __init__(self, db, prefix, class_ref): self._batch_changed = {} + @property + def Current(self): + try: + ret = {} + for key, val in self.Collection.items(): + if val is not None: + ret[key] = val + return ret + except Exception as e: + logger.error("error getting items %s " % e) + + return {} + @property def Keys(self): if not self._built_keys: @@ -62,28 +75,26 @@ def Keys(self): return self.Collection.keys() def _BuildCollectionKeys(self): - for key in self.DB.openIter(DBProperties(self.Prefix)): - key = key[1:] - if key not in self.Collection.keys(): - self.Collection[key] = None - - # clean up - self.DB.closeIter() - - def Commit(self, destroy=True): - - if self.Changed: - for keyval in self.Changed: - item = self.Collection[keyval] - if item: - self._batch_changed[self.Prefix + keyval] = self.Collection[keyval].ToByteArray() - self.DB.writeBatch(self._batch_changed) - - if self.Deleted: - self.DB.deleteBatch(self.Prefix + keyval for keyval in self.Deleted) - for keyval in self.Deleted: - self.Collection[keyval] = None - + with self.DB.openIter(DBProperties(self.Prefix)) as iterator: + for key in iterator: + key = key[1:] + if key not in self.Collection.keys(): + self.Collection[key] = None + + def Commit(self, wb, destroy=True): + for keyval in self.Changed: + item = self.Collection[keyval] + if item: + if not wb: + self.DB.write(self.Prefix + keyval, self.Collection[keyval].ToByteArray()) + else: + wb.put(self.Prefix + keyval, self.Collection[keyval].ToByteArray()) + for keyval in self.Deleted: + if not wb: + self.DB.delete(self.Prefix + keyval) + else: + wb.delete(self.Prefix + keyval) + self.Collection[keyval] = None if destroy: self.Destroy() else: @@ -218,15 +229,16 @@ def TryFind(self, key_prefix): def Find(self, key_prefix): key_prefix = self.Prefix + key_prefix res = {} - for key, val in self.DB.openIter(DBProperties(self.Prefix, include_value=True)): - # we want the storage item, not the raw bytes - item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(val)).Value - # also here we need to skip the 1 byte storage prefix - res_key = key[21:] - res[res_key] = item - - # clean up - self.DB.closeIter() + with self.DB.openIter(DBProperties(self.Prefix, include_value=True)) as iterator: + for key, val in iterator: + logger.info('in iterator %s %s ' % key, val) + # we want the storage item, not the raw bytes + item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(val)).Value + # also here we need to skip the 1 byte storage prefix + res_key = key[21:] + res[res_key] = item + + logger.info('finished') return res diff --git a/neo/bin/api_server.py b/neo/bin/api_server.py index fa4055a76..730d6b117 100755 --- a/neo/bin/api_server.py +++ b/neo/bin/api_server.py @@ -103,6 +103,7 @@ def stop_block_persisting(): def persist_done(value): """persist callback. Value is unused""" if continue_persisting: + sleep(0.5) start_block_persisting() else: block_deferred.cancel() @@ -155,6 +156,8 @@ def main(): # host parser.add_argument("--host", action="store", type=str, help="Hostname ( for example 127.0.0.1)", default="0.0.0.0") + # rollback + parser.add_argument("--rollback", action="store", type=int, help="Block id to rollback the chain to") # Now parse args = parser.parse_args() # print(args) diff --git a/neo/bin/prompt.py b/neo/bin/prompt.py index b6c22e813..fe307e157 100755 --- a/neo/bin/prompt.py +++ b/neo/bin/prompt.py @@ -26,6 +26,9 @@ from neo.logging import log_manager from neo.Prompt.PromptPrinter import prompt_print, token_style +import neo.Storage.Implementation.DBFactory as DBFactory + + logger = log_manager.getLogger() @@ -298,7 +301,7 @@ def main(): settings.set_max_peers(args.maxpeers) # Instantiate the blockchain and subscribe to notifications - blockchain = Blockchain(getBlockchainDB()) + blockchain = Blockchain(DBFactory.getBlockchainDB()) Blockchain.RegisterBlockchain(blockchain) # Try to set up a notification db From bd02494c8215290d32c05bb468727990c0f98679 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Sun, 14 Apr 2019 18:39:12 +0200 Subject: [PATCH 04/23] feat, NotificationDB now uses the new layer too - still WIP - made linter happy --- .../Notifications/LevelDB/__init__.py | 0 .../{LevelDB => }/NotificationDB.py | 159 +++++++++--------- .../{LevelDB => }/test_notification_db.py | 2 +- neo/Prompt/Commands/Show.py | 2 +- neo/Prompt/Commands/Tokens.py | 2 +- neo/Settings.py | 2 - neo/SmartContract/ApplicationEngine.py | 2 +- neo/Storage/Implementation/DBFactory.py | 11 +- .../LevelDB/LevelDBClassMethods.py | 11 +- neo/Storage/Implementation/test_db_factory.py | 1 - neo/Storage/Interface/AbstractDBInterface.py | 4 + neo/Utils/BlockchainFixtureTestCase.py | 2 +- neo/api/REST/RestApi.py | 2 +- neo/api/REST/test_rest_api.py | 2 +- neo/bin/api_server.py | 2 +- neo/bin/import_blocks.py | 2 +- neo/bin/prompt.py | 2 +- 17 files changed, 105 insertions(+), 103 deletions(-) delete mode 100644 neo/Implementations/Notifications/LevelDB/__init__.py rename neo/Implementations/Notifications/{LevelDB => }/NotificationDB.py (60%) rename neo/Implementations/Notifications/{LevelDB => }/test_notification_db.py (98%) diff --git a/neo/Implementations/Notifications/LevelDB/__init__.py b/neo/Implementations/Notifications/LevelDB/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/neo/Implementations/Notifications/LevelDB/NotificationDB.py b/neo/Implementations/Notifications/NotificationDB.py similarity index 60% rename from neo/Implementations/Notifications/LevelDB/NotificationDB.py rename to neo/Implementations/Notifications/NotificationDB.py index c4d85344f..4f7476efe 100644 --- a/neo/Implementations/Notifications/LevelDB/NotificationDB.py +++ b/neo/Implementations/Notifications/NotificationDB.py @@ -1,6 +1,8 @@ import plyvel from neo.EventHub import events from neo.SmartContract.SmartContractEvent import SmartContractEvent, NotifyEvent, NotifyType +from neo.Storage.Implementation.DBFactory import getNotificationDB +from neo.Storage.Interface.DBInterface import DBProperties from neo.Core.State.ContractState import ContractState from neo.Settings import settings from neo.Core.Blockchain import Blockchain @@ -50,7 +52,7 @@ def close(): Closes the database if it is open """ if NotificationDB.__instance: - NotificationDB.__instance.db.close() + NotificationDB.__instance.db.closeDB() NotificationDB.__instance = None @property @@ -69,7 +71,7 @@ def current_events(self): def __init__(self, path): try: - self._db = plyvel.DB(path, create_if_missing=True) + self._db = getNotificationDB() logger.info("Created Notification DB At %s " % path) except Exception as e: logger.info("Notification leveldb unavailable, you may already be running this process: %s " % e) @@ -126,89 +128,82 @@ def on_persist_completed(self, block): """ if len(self._events_to_write): - addr_db = self.db.prefixed_db(NotificationPrefix.PREFIX_ADDR) - block_db = self.db.prefixed_db(NotificationPrefix.PREFIX_BLOCK) - contract_db = self.db.prefixed_db(NotificationPrefix.PREFIX_CONTRACT) - - block_write_batch = block_db.write_batch() - contract_write_batch = contract_db.write_batch() + addr_db = self.db.getPrefixedDB(NotificationPrefix.PREFIX_ADDR) + block_db = self.db.getPrefixedDB(NotificationPrefix.PREFIX_BLOCK) + contract_db = self.db.getPrefixedDB(NotificationPrefix.PREFIX_CONTRACT) block_count = 0 block_bytes = self._events_to_write[0].block_number.to_bytes(4, 'little') - for evt in self._events_to_write: # type:NotifyEvent - - # write the event for both or one of the addresses involved in the transfer - write_both = True - hash_data = evt.ToByteArray() - - bytes_to = bytes(evt.addr_to.Data) - bytes_from = bytes(evt.addr_from.Data) - - if bytes_to == bytes_from: - write_both = False - - total_bytes_to = addr_db.get(bytes_to + NotificationPrefix.PREFIX_COUNT) - total_bytes_from = addr_db.get(bytes_from + NotificationPrefix.PREFIX_COUNT) - - if not total_bytes_to: - total_bytes_to = b'\x00' - - if not total_bytes_from: - total_bytes_from = b'x\00' - - addr_to_key = bytes_to + total_bytes_to - addr_from_key = bytes_from + total_bytes_from - - with addr_db.write_batch() as b: - b.put(addr_to_key, hash_data) - if write_both: - b.put(addr_from_key, hash_data) - total_bytes_to = int.from_bytes(total_bytes_to, 'little') + 1 - total_bytes_from = int.from_bytes(total_bytes_from, 'little') + 1 - new_bytes_to = total_bytes_to.to_bytes(4, 'little') - new_bytes_from = total_bytes_from.to_bytes(4, 'little') - b.put(bytes_to + NotificationPrefix.PREFIX_COUNT, new_bytes_to) - if write_both: - b.put(bytes_from + NotificationPrefix.PREFIX_COUNT, new_bytes_from) - - # write the event to the per-block database - per_block_key = block_bytes + block_count.to_bytes(4, 'little') - block_write_batch.put(per_block_key, hash_data) - block_count += 1 - - # write the event to the per-contract database - contract_bytes = bytes(evt.contract_hash.Data) - count_for_contract = contract_db.get(contract_bytes + NotificationPrefix.PREFIX_COUNT) - if not count_for_contract: - count_for_contract = b'\x00' - contract_event_key = contract_bytes + count_for_contract - contract_count_int = int.from_bytes(count_for_contract, 'little') + 1 - new_contract_count = contract_count_int.to_bytes(4, 'little') - contract_write_batch.put(contract_bytes + NotificationPrefix.PREFIX_COUNT, new_contract_count) - contract_write_batch.put(contract_event_key, hash_data) - - # finish off the per-block write batch and contract write batch - block_write_batch.write() - contract_write_batch.write() + with block_db.getBatch() as block_write_batch: + with contract_db.getBatch() as contract_write_batch: + for evt in self._events_to_write: # type:NotifyEvent + # write the event for both or one of the addresses involved in the transfer + write_both = True + hash_data = evt.ToByteArray() + + bytes_to = bytes(evt.addr_to.Data) + bytes_from = bytes(evt.addr_from.Data) + + if bytes_to == bytes_from: + write_both = False + + total_bytes_to = addr_db.get(bytes_to + NotificationPrefix.PREFIX_COUNT) + total_bytes_from = addr_db.get(bytes_from + NotificationPrefix.PREFIX_COUNT) + + if not total_bytes_to: + total_bytes_to = b'\x00' + + if not total_bytes_from: + total_bytes_from = b'x\00' + + addr_to_key = bytes_to + total_bytes_to + addr_from_key = bytes_from + total_bytes_from + + with addr_db.getBatch() as b: + b.put(addr_to_key, hash_data) + if write_both: + b.put(addr_from_key, hash_data) + total_bytes_to = int.from_bytes(total_bytes_to, 'little') + 1 + total_bytes_from = int.from_bytes(total_bytes_from, 'little') + 1 + new_bytes_to = total_bytes_to.to_bytes(4, 'little') + new_bytes_from = total_bytes_from.to_bytes(4, 'little') + b.put(bytes_to + NotificationPrefix.PREFIX_COUNT, new_bytes_to) + if write_both: + b.put(bytes_from + NotificationPrefix.PREFIX_COUNT, new_bytes_from) + + # write the event to the per-block database + per_block_key = block_bytes + block_count.to_bytes(4, 'little') + block_write_batch.put(per_block_key, hash_data) + block_count += 1 + + # write the event to the per-contract database + contract_bytes = bytes(evt.contract_hash.Data) + count_for_contract = contract_db.get(contract_bytes + NotificationPrefix.PREFIX_COUNT) + if not count_for_contract: + count_for_contract = b'\x00' + contract_event_key = contract_bytes + count_for_contract + contract_count_int = int.from_bytes(count_for_contract, 'little') + 1 + new_contract_count = contract_count_int.to_bytes(4, 'little') + contract_write_batch.put(contract_bytes + NotificationPrefix.PREFIX_COUNT, new_contract_count) + contract_write_batch.put(contract_event_key, hash_data) self._events_to_write = [] if len(self._new_contracts_to_write): - token_db = self.db.prefixed_db(NotificationPrefix.PREFIX_TOKEN) + token_db = self.db.getPrefixedDB(NotificationPrefix.PREFIX_TOKEN) token_write_batch = token_db.write_batch() - for token_event in self._new_contracts_to_write: - try: - hash_data = token_event.ToByteArray() # used to fail here - hash_key = token_event.contract.Code.ScriptHash().ToBytes() - token_write_batch.put(hash_key, hash_data) - except Exception as e: - logger.debug(f"Failed to write new contract, reason: {e}") - - token_write_batch.write() + with token_db.getBatch() as token_write_batch: + for token_event in self._new_contracts_to_write: + try: + hash_data = token_event.ToByteArray() # used to fail here + hash_key = token_event.contract.Code.ScriptHash().ToBytes() + token_write_batch.put(hash_key, hash_data) + except Exception as e: + logger.debug(f"Failed to write new contract, reason: {e}") self._new_contracts_to_write = [] @@ -221,10 +216,10 @@ def get_by_block(self, block_number): Returns: list: a list of notifications """ - blocklist_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_BLOCK).snapshot() + blocklist_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_BLOCK).createSnapshot() block_bytes = block_number.to_bytes(4, 'little') results = [] - for val in blocklist_snapshot.iterator(prefix=block_bytes, include_key=False): + for val in blocklist_snapshot.openIter(DBProperties(prefix=block_bytes, include_key=False)): event = SmartContractEvent.FromByteArray(val) results.append(event) @@ -246,10 +241,10 @@ def get_by_addr(self, address): if not isinstance(addr, UInt160): raise Exception("Incorrect address format") - addrlist_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_ADDR).snapshot() + addrlist_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_ADDR).createSnapshot() results = [] - for val in addrlist_snapshot.iterator(prefix=bytes(addr.Data), include_key=False): + for val in addrlist_snapshot.openIter(DBProperties(prefix=bytes(addr.Data), include_key=False)): if len(val) > 4: try: event = SmartContractEvent.FromByteArray(val) @@ -274,10 +269,10 @@ def get_by_contract(self, contract_hash): if not isinstance(hash, UInt160): raise Exception("Incorrect address format") - contractlist_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_CONTRACT).snapshot() + contractlist_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_CONTRACT).createSnapshot() results = [] - for val in contractlist_snapshot.iterator(prefix=bytes(hash.Data), include_key=False): + for val in contractlist_snapshot.openIter(DBProperties(prefix=bytes(hash.Data), include_key=False)): if len(val) > 4: try: event = SmartContractEvent.FromByteArray(val) @@ -292,9 +287,9 @@ def get_tokens(self): Returns: list: A list of smart contract events with contracts that are NEP5 Tokens """ - tokens_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_TOKEN).snapshot() + tokens_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_TOKEN).createSnapshot() results = [] - for val in tokens_snapshot.iterator(include_key=False): + for val in tokens_snapshot.openIter(DBProperties(include_key=False)): event = SmartContractEvent.FromByteArray(val) results.append(event) return results @@ -308,7 +303,7 @@ def get_token(self, hash): Returns: SmartContractEvent: A smart contract event with a contract that is an NEP5 Token """ - tokens_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_TOKEN).snapshot() + tokens_snapshot = self.db.getPrefixedDB(DBProperties(NotificationPrefix.PREFIX_TOKEN)).createSnapshot() try: val = tokens_snapshot.get(hash.ToBytes()) diff --git a/neo/Implementations/Notifications/LevelDB/test_notification_db.py b/neo/Implementations/Notifications/test_notification_db.py similarity index 98% rename from neo/Implementations/Notifications/LevelDB/test_notification_db.py rename to neo/Implementations/Notifications/test_notification_db.py index eb81e073e..8819253b2 100644 --- a/neo/Implementations/Notifications/LevelDB/test_notification_db.py +++ b/neo/Implementations/Notifications/test_notification_db.py @@ -8,7 +8,7 @@ import shutil import os -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.Core.BigInteger import BigInteger diff --git a/neo/Prompt/Commands/Show.py b/neo/Prompt/Commands/Show.py index cdd89b8da..e79419517 100644 --- a/neo/Prompt/Commands/Show.py +++ b/neo/Prompt/Commands/Show.py @@ -9,7 +9,7 @@ from neo.Core.UInt160 import UInt160 from neo.IO.MemoryStream import StreamManager from neo.Network.NodeLeader import NodeLeader -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.logging import log_manager from neo.Prompt.PromptPrinter import prompt_print as print import json diff --git a/neo/Prompt/Commands/Tokens.py b/neo/Prompt/Commands/Tokens.py index 415f21781..2bca547e1 100644 --- a/neo/Prompt/Commands/Tokens.py +++ b/neo/Prompt/Commands/Tokens.py @@ -10,7 +10,7 @@ from neo.Prompt.PromptData import PromptData from neo.Prompt import Utils as PromptUtils from neo.Implementations.Wallets.peewee.Models import NEP5Token as ModelNEP5Token -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.Core.TX.TransactionAttribute import TransactionAttributeUsage from neo.Core.Utils import isValidPublicAddress import peewee diff --git a/neo/Settings.py b/neo/Settings.py index e7af234ed..456770bf0 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -68,8 +68,6 @@ def check_depdendencies(): raise SystemCheckError("Required dependency %s is not installed. Please run 'pip install -e .'" % dep) - - class SettingsHolder: """ This class holds all the settings. Needs to be setup with one of the diff --git a/neo/SmartContract/ApplicationEngine.py b/neo/SmartContract/ApplicationEngine.py index b51aaadf5..9e89654c0 100644 --- a/neo/SmartContract/ApplicationEngine.py +++ b/neo/SmartContract/ApplicationEngine.py @@ -304,7 +304,7 @@ def Run(script, container=None, exit_on_error=False, gas=Fixed8.Zero(), test_mod try: _script = binascii.unhexlify(script) - except: + except Exception as e: _script = script engine.LoadScript(_script) diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index 637d071f6..7222382ae 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -14,10 +14,8 @@ NOTIF_CONST = 'notification' DEBUG_CONST = 'debug' -print('props ', settings.database_properties()) DATABASE_PROPS = settings.database_properties() - _blockchain_db_instance = None _notif_db_instance = None @@ -33,6 +31,9 @@ def getBlockchainDB(): def getNotificationDB(): + logger.info('Creating NotificationDB') + NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) + _notif_db_instance = NotificationDB(DATABASE_PROPS[NOTIF_CONST]['path']) return _notif_db_instance @@ -62,9 +63,9 @@ def _dbFactory(dbType, properties): # print(attributes) return type( - properties['backend'].title()+'DBImpl'+dbType.title(), - (AbstractDBInterface,), - attributes) + properties['backend'].title() + 'DBImpl' + dbType.title(), + (AbstractDBInterface,), + attributes) # NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) # _notif_db_instance = NotificationDB(DATABASE_PROPS[NOTIF_CONST]) diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py index e72a27eb1..a64dc97cc 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py @@ -84,9 +84,10 @@ def openIter(self, properties): # TODO start implement start and end self._iter = self._db.iterator( - prefix=properties.prefix, - include_value=properties.include_value, - include_key=properties.include_key) + prefix=properties.prefix, + include_value=properties.include_value, + include_key=properties.include_key) + yield self._iter self._iter.close() @@ -99,5 +100,9 @@ def getBatch(self): self._batch.write() +def getPrefixedDB(self, prefix): + return self._db.prefixed_db(prefix) + + def closeDB(self): self._db.close() diff --git a/neo/Storage/Implementation/test_db_factory.py b/neo/Storage/Implementation/test_db_factory.py index 85dbffa71..0c3c6cf2a 100644 --- a/neo/Storage/Implementation/test_db_factory.py +++ b/neo/Storage/Implementation/test_db_factory.py @@ -12,4 +12,3 @@ print(ret_1, ret_default) assert ret_1 == b'first' assert ret_default == b'default_value' - diff --git a/neo/Storage/Interface/AbstractDBInterface.py b/neo/Storage/Interface/AbstractDBInterface.py index e1e6f3960..bef764baf 100644 --- a/neo/Storage/Interface/AbstractDBInterface.py +++ b/neo/Storage/Interface/AbstractDBInterface.py @@ -42,3 +42,7 @@ def getBatch(self): @abstractmethod def closeDB(self): raise NotImplementedError + + @abstractmethod + def getPrefixedDB(self, prefix): + raise NotImplementedError diff --git a/neo/Utils/BlockchainFixtureTestCase.py b/neo/Utils/BlockchainFixtureTestCase.py index ef30557a7..6904a8267 100644 --- a/neo/Utils/BlockchainFixtureTestCase.py +++ b/neo/Utils/BlockchainFixtureTestCase.py @@ -6,7 +6,7 @@ from neo.Utils.NeoTestCase import NeoTestCase from neo.Implementations.Blockchains.LevelDB.TestLevelDBBlockchain import TestLevelDBBlockchain from neo.Core.Blockchain import Blockchain -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.Settings import settings from neo.logging import log_manager from neo.Network.NodeLeader import NodeLeader diff --git a/neo/api/REST/RestApi.py b/neo/api/REST/RestApi.py index caaee5a2c..013294146 100644 --- a/neo/api/REST/RestApi.py +++ b/neo/api/REST/RestApi.py @@ -9,7 +9,7 @@ from logzero import logger from neo.Network.NodeLeader import NodeLeader -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.Core.Blockchain import Blockchain from neo.Core.UInt160 import UInt160 from neo.Core.UInt256 import UInt256 diff --git a/neo/api/REST/test_rest_api.py b/neo/api/REST/test_rest_api.py index c5e20b017..ec7f0ee33 100644 --- a/neo/api/REST/test_rest_api.py +++ b/neo/api/REST/test_rest_api.py @@ -8,7 +8,7 @@ from neo.api.REST.RestApi import RestApi -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from klein.test.test_resource import requestMock diff --git a/neo/bin/api_server.py b/neo/bin/api_server.py index 730d6b117..72845758d 100755 --- a/neo/bin/api_server.py +++ b/neo/bin/api_server.py @@ -53,7 +53,7 @@ # neo methods and modules from neo.Core.Blockchain import Blockchain from neo.Storage.Implementation.DBFactory import getBlockchainDB -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.Wallets.utils import to_aes_key from neo.Implementations.Wallets.peewee.UserWallet import UserWallet diff --git a/neo/bin/import_blocks.py b/neo/bin/import_blocks.py index 6def20ea1..14815c718 100644 --- a/neo/bin/import_blocks.py +++ b/neo/bin/import_blocks.py @@ -14,7 +14,7 @@ import shutil from tqdm import trange from prompt_toolkit import prompt -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB def main(): diff --git a/neo/bin/prompt.py b/neo/bin/prompt.py index fe307e157..63d46e876 100755 --- a/neo/bin/prompt.py +++ b/neo/bin/prompt.py @@ -12,7 +12,7 @@ from neo import __version__ from neo.Core.Blockchain import Blockchain from neo.Storage.Implementation.DBFactory import getBlockchainDB -from neo.Implementations.Notifications.LevelDB.NotificationDB import NotificationDB +from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.Network.NodeLeader import NodeLeader from neo.Prompt.Commands.Wallet import CommandWallet from neo.Prompt.Commands.Show import CommandShow From 9ebdde60d5afe0d65ace965cda8d8d2408bffcd9 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Wed, 24 Apr 2019 18:13:41 +0000 Subject: [PATCH 05/23] feat, factory for prefixed db and snapshot. DB unit tests, NotificationDB fixes --- .../Notifications/NotificationDB.py | 54 ++++++++----- .../LevelDB/PrefixedDBFactory.py | 21 +++++ neo/Storage/Implementation/test/__init__.py | 0 .../test/test_LevelDBBlockchain.py | 80 +++++++++++++++++++ .../Implementation/test/test_initial_db.py | 65 +++++++++++++++ .../Implementation/test/test_leveldb.py | 14 ++++ 6 files changed, 213 insertions(+), 21 deletions(-) create mode 100644 neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py create mode 100644 neo/Storage/Implementation/test/__init__.py create mode 100644 neo/Storage/Implementation/test/test_LevelDBBlockchain.py create mode 100644 neo/Storage/Implementation/test/test_initial_db.py create mode 100644 neo/Storage/Implementation/test/test_leveldb.py diff --git a/neo/Implementations/Notifications/NotificationDB.py b/neo/Implementations/Notifications/NotificationDB.py index 4f7476efe..c439e0d74 100644 --- a/neo/Implementations/Notifications/NotificationDB.py +++ b/neo/Implementations/Notifications/NotificationDB.py @@ -71,7 +71,7 @@ def current_events(self): def __init__(self, path): try: - self._db = getNotificationDB() + self._db = getNotificationDB(path) logger.info("Created Notification DB At %s " % path) except Exception as e: logger.info("Notification leveldb unavailable, you may already be running this process: %s " % e) @@ -207,6 +207,12 @@ def on_persist_completed(self, block): self._new_contracts_to_write = [] + results = [] + with block_db.openIter(DBProperties(prefix=block_bytes, include_key=False)) as iterator: + for val in iterator: + event = SmartContractEvent.FromByteArray(val) + results.append(event) + def get_by_block(self, block_number): """ Look up notifications for a block @@ -217,11 +223,13 @@ def get_by_block(self, block_number): list: a list of notifications """ blocklist_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_BLOCK).createSnapshot() + block_bytes = block_number.to_bytes(4, 'little') results = [] - for val in blocklist_snapshot.openIter(DBProperties(prefix=block_bytes, include_key=False)): - event = SmartContractEvent.FromByteArray(val) - results.append(event) + with blocklist_snapshot.openIter(DBProperties(prefix=block_bytes, include_key=False)) as iterator: + for val in iterator: + event = SmartContractEvent.FromByteArray(val) + results.append(event) return results @@ -244,13 +252,14 @@ def get_by_addr(self, address): addrlist_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_ADDR).createSnapshot() results = [] - for val in addrlist_snapshot.openIter(DBProperties(prefix=bytes(addr.Data), include_key=False)): - if len(val) > 4: - try: - event = SmartContractEvent.FromByteArray(val) - results.append(event) - except Exception as e: - logger.error("could not parse event: %s %s" % (e, val)) + with addrlist_snapshot.openIter(DBProperties(prefix=bytes(addr.Data), include_key=False)) as iterator: + for val in iterator: + if len(val) > 4: + try: + event = SmartContractEvent.FromByteArray(val) + results.append(event) + except Exception as e: + logger.error("could not parse event: %s %s" % (e, val)) return results def get_by_contract(self, contract_hash): @@ -272,13 +281,14 @@ def get_by_contract(self, contract_hash): contractlist_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_CONTRACT).createSnapshot() results = [] - for val in contractlist_snapshot.openIter(DBProperties(prefix=bytes(hash.Data), include_key=False)): - if len(val) > 4: - try: - event = SmartContractEvent.FromByteArray(val) - results.append(event) - except Exception as e: - logger.error("could not parse event: %s %s" % (e, val)) + with contractlist_snapshot.openIter(DBProperties(prefix=bytes(hash.Data), include_key=False)) as iterator: + for val in iterator: + if len(val) > 4: + try: + event = SmartContractEvent.FromByteArray(val) + results.append(event) + except Exception as e: + logger.error("could not parse event: %s %s" % (e, val)) return results def get_tokens(self): @@ -289,9 +299,11 @@ def get_tokens(self): """ tokens_snapshot = self.db.getPrefixedDB(NotificationPrefix.PREFIX_TOKEN).createSnapshot() results = [] - for val in tokens_snapshot.openIter(DBProperties(include_key=False)): - event = SmartContractEvent.FromByteArray(val) - results.append(event) + + with tokens_snapshot.openIter(DBProperties(include_key=False)) as iterator: + for val in iterator: + event = SmartContractEvent.FromByteArray(val) + results.append(event) return results def get_token(self, hash): diff --git a/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py b/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py new file mode 100644 index 000000000..603c4dba6 --- /dev/null +++ b/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py @@ -0,0 +1,21 @@ +from neo.Storage.Interface.AbstractDBInterface import AbstractDBInterface + + +def internalDBFactory(classPrefix): + + # import what's needed + import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions + + methods = [x for x in dir(functions) if not x.startswith('__')] + + # build attributes dict + attributes = {methods[i]: getattr( + functions, methods[i]) for i in range(0, len(methods))} + + # add __init__ method + attributes['__init__'] = attributes.pop(functions._prefix_init_method) + + return type( + classPrefix.title() + 'DBImpl', + (AbstractDBInterface,), + attributes) diff --git a/neo/Storage/Implementation/test/__init__.py b/neo/Storage/Implementation/test/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/neo/Storage/Implementation/test/test_LevelDBBlockchain.py b/neo/Storage/Implementation/test/test_LevelDBBlockchain.py new file mode 100644 index 000000000..84baee683 --- /dev/null +++ b/neo/Storage/Implementation/test/test_LevelDBBlockchain.py @@ -0,0 +1,80 @@ +from neo.Utils.BlockchainFixtureTestCase import BlockchainFixtureTestCase +from neo.Core.Blockchain import Blockchain +from neo.Settings import settings +import os + + +class LevelDBBlockchainTest(BlockchainFixtureTestCase): + @classmethod + def leveldb_testpath(cls): + return os.path.join(settings.DATA_DIR_PATH, 'fixtures/test_chain') + + # test need to be updated whenever we change the fixtures + def test_initial_setup(self): + self.assertEqual(self._blockchain.Height, 12349) + + def test_GetBlockHash(self): + # test requested block height exceeding blockchain current_height + invalid_bc_height = self._blockchain.Height + 1 + result = self._blockchain.GetBlockHash(invalid_bc_height) + self.assertEqual(result, None) + + # test header index length mismatch + # save index to restore later + saved = self._blockchain._header_index + self._blockchain._header_index = self._blockchain._header_index[:10] + result = self._blockchain.GetBlockHash(100) + self.assertEqual(result, None) + self._blockchain._header_index = saved + + # finally test correct retrieval + result = self._blockchain.GetBlockHash(100) + self.assertEqual(result, self._blockchain._header_index[100]) + + def test_GetBlockByHeight(self): + # test correct retrieval + block = self._blockchain.GetBlockByHeight(100) + self.assertEqual(block.GetHashCode().ToString(), self._blockchain.GetBlockHash(100).decode('utf-8')) + + # and also a invalid retrieval + invalid_bc_height = self._blockchain.Height + 1 + block = self._blockchain.GetBlockByHeight(invalid_bc_height) + self.assertEqual(block, None) + + def test_GetAccountState(self): + # test passing an address + addr = "AK2nJJpJr6o664CWJKi1QRXjqeic2zRp8y" + acct = Blockchain.Default().GetAccountState(addr) + acct = acct.ToJson() + self.assertIn('balances', acct.keys()) + + # test failure + addr = "AK2nJJpJr6o664CWJKi1QRXjqeic2zRp81" + acct = Blockchain.Default().GetAccountState(addr) + self.assertIsNone(acct) + + def test_GetHeaderBy(self): + # test correct retrieval with hash + blockheader = self._blockchain.GetHeaderBy("2b1c78633dae7ab81f64362e0828153079a17b018d779d0406491f84c27b086f") + self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) + + # test correct retrieval with 0x hash + blockheader = self._blockchain.GetHeaderBy("0x2b1c78633dae7ab81f64362e0828153079a17b018d779d0406491f84c27b086f") + self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) + + # test correct retrieval with str height + blockheader = self._blockchain.GetHeaderBy("11") + self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) + + # test correct retrieval with int height + blockheader = self._blockchain.GetHeaderBy(11) + self.assertEqual(blockheader.GetHashCode().ToString(), self._blockchain.GetBlockHash(11).decode('utf-8')) + + # test incorrect retrieval + invalid_bc_height = self._blockchain.Height + 1 + block = self._blockchain.GetHeaderBy(invalid_bc_height) + self.assertEqual(block, None) + + def test_ShowAllAssets(self): + assets = Blockchain.Default().ShowAllAssets() + self.assertEqual(len(assets), 2) diff --git a/neo/Storage/Implementation/test/test_initial_db.py b/neo/Storage/Implementation/test/test_initial_db.py new file mode 100644 index 000000000..60f553ba2 --- /dev/null +++ b/neo/Storage/Implementation/test/test_initial_db.py @@ -0,0 +1,65 @@ +from neo.Utils.NeoTestCase import NeoTestCase +from neo.Storage.Implementation.DBFactory import getBlockchainDB +from neo.Core.Blockchain import Blockchain +from neo.IO.Helper import Helper +from neo.Settings import settings +import shutil +import binascii +import os + + +class LevelDBTest(NeoTestCase): + + LEVELDB_TESTPATH = os.path.join(settings.DATA_DIR_PATH, 'UnitTestChain') + + _blockchain = None + + _genesis = None + + block_one_raw = b'00000000ef1f8f66a16fba100ed760f4ac6aa5a0d0bb8f4a0e92705b106761ef181718b3d0765298ceb5f57de7d2b0dab00ed25be4134706ada2d90adb8b7e3aba323a8e1abd125901000000d11f7a289214bdaff3812db982f3b0089a21a278988efeec6a027b2501fd450140884037dd265cb5f5a54802f53c2c8593b31d5b8a9c0bad4c7e366b153d878989d168080ac36b930036a9eb966b48c70bb41792e698fa021116f27c09643563b840e83ab14404d964a91dbac45f5460e88ad57196b1779478e3475334af8c1b49cd9f0213257895c60b5b92a4800eb32d785cbb39ae1f022528943909fd37deba63403677848bf98cc9dbd8fbfd7f2e4f34471866ea82ca6bffbf0f778b6931483700c17829b4bd066eb04983d3aac0bd46b9c8d03a73a8e714d3119de93cd9522e314054d16853b22014190063f77d9edf6fbccefcf71fffd1234f688823b4e429ae5fa639d0a664c842fbdfcb4d6e21f39d81c23563b92cffa09696d93c95bc4893a6401a43071d00d3e854f7f1f321afa7d5301d36f2195dc1e2643463f34ae637d2b02ae0eb11d4256c507a4f8304cea6396a7fce640f50acb301c2f6336d27717e84f155210209e7fd41dfb5c2f8dc72eb30358ac100ea8c72da18847befe06eade68cebfcb9210327da12b5c40200e9f65569476bbff2218da4f32548ff43b6387ec1416a231ee821034ff5ceeac41acf22cd5ed2da17a6df4dd8358fcb2bfb1a43208ad0feaab2746b21026ce35b29147ad09e4afe4ec4a7319095f08198fa8babbe3c56e970b143528d2221038dddc06ce687677a53d54f096d2591ba2302068cf123c1f2d75c2dddc542557921039dafd8571a641058ccc832c5e2111ea39b09c0bde36050914384f7a48bce9bf92102d02b1873a0863cd042cc717da31cea0d7cf9db32b74d4c72c01b0011503e2e2257ae010000d11f7a2800000000' + block_one_hash = b'0012f8566567a9d7ddf25acb5cf98286c9703297de675d01ba73fbfe6bcb841c' + + @classmethod + def setUpClass(cls): + settings.setup_unittest_net() + Blockchain.DeregisterBlockchain() + cls._blockchain = Blockchain(getBlockchainDB(cls.LEVELDB_TESTPATH), + skip_version_check=True) + Blockchain.RegisterBlockchain(cls._blockchain) + cls._genesis = Blockchain.GenesisBlock() + + @classmethod + def tearDownClass(cls): + cls._blockchain.Dispose() + shutil.rmtree(cls.LEVELDB_TESTPATH) + + def test__initial_state(self): + + self.assertEqual(self._blockchain.CurrentBlockHash, self._genesis.Hash.ToBytes()) + + self.assertEqual(self._blockchain.CurrentHeaderHash, self._genesis.Header.Hash.ToBytes()) + + self.assertEqual(self._blockchain.CurrentHeaderHash, self._genesis.Header.Hash.ToBytes()) + + self.assertEqual(self._blockchain.HeaderHeight, 0) + + self.assertEqual(self._blockchain.Height, 0) + + def test_add_header(self): + hexdata = binascii.unhexlify(self.block_one_raw) + block_one = Helper.AsSerializableWithType(hexdata, 'neo.Core.Block.Block') + + if settings.MAGIC == 56753: + self.assertEqual(self._blockchain.CurrentHeaderHash, b'996e37358dc369912041f966f8c5d8d3a8255ba5dcbd3447f8a82b55db869099') + else: + self.assertEqual(self._blockchain.CurrentHeaderHash, b'd42561e3d30e15be6400b6df2f328e02d2bf6354c41dce433bc57687c82144bf') + + self.assertEqual(self._blockchain.HeaderHeight, 0) + + self._blockchain.AddBlock(block_one) + self.assertEqual(self._blockchain.HeaderHeight, 1) + + def test_sys_block_fees(self): + + block_num = 14103 + fee_should_be = 435 diff --git a/neo/Storage/Implementation/test/test_leveldb.py b/neo/Storage/Implementation/test/test_leveldb.py new file mode 100644 index 000000000..cac3eb902 --- /dev/null +++ b/neo/Storage/Implementation/test/test_leveldb.py @@ -0,0 +1,14 @@ +from neo.Utils.BlockchainFixtureTestCase import BlockchainFixtureTestCase +from neo.Settings import settings +import os + + +class LevelDBTest(BlockchainFixtureTestCase): + + @classmethod + def leveldb_testpath(cls): + return os.path.join(settings.DATA_DIR_PATH, 'fixtures/test_chain') + + # test need to be updated whenever we change the fixtures + def test_a_initial_setup(self): + self.assertEqual(self._blockchain.Height, 12349) From d3aead87d2e4b75d383e8fc499f2b46ce3e126fb Mon Sep 17 00:00:00 2001 From: Merl111 Date: Wed, 24 Apr 2019 18:15:27 +0000 Subject: [PATCH 06/23] fix, fixed a few things to make UTs pass --- neo/Core/Blockchain.py | 3 +- neo/Network/NodeLeader.py | 2 + .../Commands/tests/test_show_commands.py | 2 +- neo/Settings.py | 1 + neo/SmartContract/ApplicationEngine.py | 6 +- neo/SmartContract/StateMachine.py | 16 +- .../tests/test_smart_contract.py | 4 +- .../tests/test_smart_contract2.py | 4 +- .../tests/test_smart_contract3.py | 16 +- neo/Storage/Common/DebugStorage.py | 8 +- neo/Storage/Implementation/DBFactory.py | 25 ++- .../LevelDB/LevelDBClassMethods.py | 49 +++-- .../Implementation/LevelDB/test_leveldb.py | 8 - neo/Storage/Implementation/test2.py | 4 - neo/Storage/Implementation/test_db_factory.py | 14 -- neo/Storage/Interface/DBInterface.py | 13 +- neo/Storage/Interface/test_db_interface.py | 1 - neo/Utils/BlockchainFixtureTestCase.py | 201 +++++++++++++++++- neo/Utils/VerifiableTestCase.py | 4 +- 19 files changed, 290 insertions(+), 91 deletions(-) delete mode 100644 neo/Storage/Implementation/LevelDB/test_leveldb.py delete mode 100644 neo/Storage/Implementation/test2.py delete mode 100644 neo/Storage/Implementation/test_db_factory.py delete mode 100644 neo/Storage/Interface/test_db_interface.py diff --git a/neo/Core/Blockchain.py b/neo/Core/Blockchain.py index c60f4055a..0e50d20b7 100644 --- a/neo/Core/Blockchain.py +++ b/neo/Core/Blockchain.py @@ -46,7 +46,7 @@ import neo.Storage.Implementation.DBFactory as DBFactory -logger = log_manager.getLogger('Blockchain') +logger = log_manager.getLogger() class Blockchain: @@ -134,7 +134,6 @@ def __init__(self, db, skip_version_check=False, skip_header_check=False): StreamManager.ReleaseStream(ms) except Exception as e: logger.info("Could not get stored header hash list: %s " % e) - self._db.closeIter() if len(hashes): hashes.sort(key=lambda x: x['k']) diff --git a/neo/Network/NodeLeader.py b/neo/Network/NodeLeader.py index 74e5ddcf3..ac2906b3a 100644 --- a/neo/Network/NodeLeader.py +++ b/neo/Network/NodeLeader.py @@ -541,6 +541,8 @@ def RelayDirectly(self, inventory): relayed |= peer.Relay(inventory) if len(self.Peers) == 0: + if BC.Default().UT: + return True logger.info("no connected peers") diff --git a/neo/Prompt/Commands/tests/test_show_commands.py b/neo/Prompt/Commands/tests/test_show_commands.py index a753b76e7..63281e219 100644 --- a/neo/Prompt/Commands/tests/test_show_commands.py +++ b/neo/Prompt/Commands/tests/test_show_commands.py @@ -170,7 +170,7 @@ def test_show_notifications(self): wallet_1_addr = 'AJQ6FoaSXDFzA6wLnyZ1nFN7SGSN2oNTc3' # test with no NotificationDB - with patch('neo.Implementations.Notifications.LevelDB.NotificationDB.NotificationDB.instance', return_value=None): + with patch('neo.Implementations.Notifications.NotificationDB.NotificationDB.instance', return_value=None): args = ['notifications', wallet_1_addr] res = CommandShow().execute(args) self.assertFalse(res) diff --git a/neo/Settings.py b/neo/Settings.py index 456770bf0..bac8597ed 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -145,6 +145,7 @@ def debug_storage_leveldb_path(self): def database_properties(self): return {'blockchain': {'path': self.chain_leveldb_path, + 'skip_version_check': False, 'backend': 'leveldb'}, 'notification': {'path': self.notification_leveldb_path, diff --git a/neo/SmartContract/ApplicationEngine.py b/neo/SmartContract/ApplicationEngine.py index 9e89654c0..f19133797 100644 --- a/neo/SmartContract/ApplicationEngine.py +++ b/neo/SmartContract/ApplicationEngine.py @@ -302,9 +302,11 @@ def Run(script, container=None, exit_on_error=False, gas=Fixed8.Zero(), test_mod exit_on_error=exit_on_error ) - try: + # maybe not the best solution + # but one for now + if not wb: _script = binascii.unhexlify(script) - except Exception as e: + else: _script = script engine.LoadScript(_script) diff --git a/neo/SmartContract/StateMachine.py b/neo/SmartContract/StateMachine.py index b5f2764ce..e0c5c1427 100644 --- a/neo/SmartContract/StateMachine.py +++ b/neo/SmartContract/StateMachine.py @@ -24,6 +24,7 @@ class StateMachine(StateReader): _validators = None + _wb = None _contracts_created = {} @@ -85,11 +86,12 @@ def ExecutionCompleted(self, engine, success, error=None): super(StateMachine, self).ExecutionCompleted(engine, success, error) def Commit(self): - self._accounts.Commit(self._wb, False) - self._validators.Commit(self._wb, False) - self._assets.Commit(self._wb, False) - self._contracts.Commit(self._wb, False) - self._storages.Commit(self._wb, False) + if self._wb is not None: + self._accounts.Commit(self._wb, False) + self._validators.Commit(self._wb, False) + self._assets.Commit(self._wb, False) + self._contracts.Commit(self._wb, False) + self._storages.Commit(self._wb, False) def ResetState(self): self._accounts.Reset() @@ -360,8 +362,8 @@ def Contract_Destroy(self, engine): if contract.HasStorage: - for pair in self._storages.Find(hash.ToBytes()): - self._storages.Remove(pair.Key) + for key in self._storages.Find(hash.ToBytes()): + self._storages.Remove(key) self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_DESTROY, ContractParameter(ContractParameterType.InteropInterface, contract), diff --git a/neo/SmartContract/tests/test_smart_contract.py b/neo/SmartContract/tests/test_smart_contract.py index 6bb273eb8..7cf32fdc0 100644 --- a/neo/SmartContract/tests/test_smart_contract.py +++ b/neo/SmartContract/tests/test_smart_contract.py @@ -45,6 +45,8 @@ def test_a_run_sc(self): block = Helper.AsSerializableWithType(hexdata, 'neo.Core.Block.Block') - result = self._blockchain.Persist(block) + result = False + with BlockchainFixtureTestCase.MPPersist(): + result = self._blockchain.Persist(block) self.assertTrue(result) diff --git a/neo/SmartContract/tests/test_smart_contract2.py b/neo/SmartContract/tests/test_smart_contract2.py index d86164eef..47033c11e 100644 --- a/neo/SmartContract/tests/test_smart_contract2.py +++ b/neo/SmartContract/tests/test_smart_contract2.py @@ -23,6 +23,8 @@ def test_b_invocation(self): self.assertIsNotNone(json) - result = self._blockchain.Persist(block) + result = False + with BlockchainFixtureTestCase.MPPersist(): + result = self._blockchain.Persist(block) self.assertTrue(result) diff --git a/neo/SmartContract/tests/test_smart_contract3.py b/neo/SmartContract/tests/test_smart_contract3.py index f49665385..f90034fb7 100644 --- a/neo/SmartContract/tests/test_smart_contract3.py +++ b/neo/SmartContract/tests/test_smart_contract3.py @@ -3,8 +3,8 @@ from neo.Utils.BlockchainFixtureTestCase import BlockchainFixtureTestCase from neo.IO.Helper import Helper from neo.Core.Blockchain import Blockchain -from neo.Implementations.Blockchains.LevelDB.DBCollection import DBCollection -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix +from neo.Storage.Interface.DBInterface import DBInterface +from neo.Storage.Common.DBPrefix import DBPrefix from neo.Core.State.ContractState import ContractState from neo.Core.State.AssetState import AssetState from neo.Core.UInt256 import UInt256 @@ -31,11 +31,13 @@ def test_contract_create_block(self): self.assertEqual(block.Index, self.contract_block_index) - result = Blockchain.Default().Persist(block) + result = False + with BlockchainFixtureTestCase.MPPersist(): + result = Blockchain.Default().Persist(block) self.assertTrue(result) - contracts = DBCollection(Blockchain.Default()._db, DBPrefix.ST_Contract, ContractState) + contracts = DBInterface(Blockchain.Default()._db, DBPrefix.ST_Contract, ContractState) contract_added = contracts.TryGet(self.contract_hash) @@ -72,12 +74,14 @@ def test_invocation_assetcreate_block(self): self.assertEqual(block.Index, self.asset_create_index) - result = Blockchain.Default().Persist(block) + result = False + with BlockchainFixtureTestCase.MPPersist(): + result = Blockchain.Default().Persist(block) self.assertTrue(result) # now the asset that was created should be there - assets = DBCollection(Blockchain.Default()._db, DBPrefix.ST_Asset, AssetState) + assets = DBInterface(Blockchain.Default()._db, DBPrefix.ST_Asset, AssetState) newasset = assets.TryGet(self.asset_create_id) diff --git a/neo/Storage/Common/DebugStorage.py b/neo/Storage/Common/DebugStorage.py index 7fc8b105b..4e33ed6cc 100644 --- a/neo/Storage/Common/DebugStorage.py +++ b/neo/Storage/Common/DebugStorage.py @@ -15,9 +15,11 @@ def db(self): return self._db def reset(self): - for key in self._db.openIter( - DBProperties(prefix=DBPrefix.ST_Storage, include_value=False)): - self._db.delete(key) + with self._db.openIter( + DBProperties(prefix=DBPrefix.ST_Storage, + include_value=False)) as iterator: + for key in iterator: + self._db.delete(key) def __init__(self): diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index 7222382ae..73d7de497 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -8,7 +8,8 @@ Configuration is done in neo.Settings.DATABASE_PROPS dict. """ -logger = log_manager.getLogger('DBFactory') +# logger = log_manager.getLogger('DBFactory') +logger = log_manager.getLogger() BC_CONST = 'blockchain' NOTIF_CONST = 'notification' @@ -23,22 +24,27 @@ _debug_db_instance = None -def getBlockchainDB(): - logger.info('Creating BlockchainDB') +def getBlockchainDB(path=None): + + if not path: + path = DATABASE_PROPS[BC_CONST]['path'] + BlockchainDB = _dbFactory(BC_CONST, DATABASE_PROPS[BC_CONST]) - _blockchain_db_instance = BlockchainDB(DATABASE_PROPS[BC_CONST]['path']) + _blockchain_db_instance = BlockchainDB(path) return _blockchain_db_instance -def getNotificationDB(): - logger.info('Creating NotificationDB') +def getNotificationDB(path=None): + + if not path: + path = DATABASE_PROPS[NOTIF_CONST]['path'] + NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) - _notif_db_instance = NotificationDB(DATABASE_PROPS[NOTIF_CONST]['path']) + _notif_db_instance = NotificationDB(path) return _notif_db_instance def getDebugStorageDB(): - logger.info('Creating DebugDB') DebugStorageDB = _dbFactory(DEBUG_CONST, DATABASE_PROPS[DEBUG_CONST]) _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]['path']) return _debug_db_instance @@ -66,6 +72,3 @@ def _dbFactory(dbType, properties): properties['backend'].title() + 'DBImpl' + dbType.title(), (AbstractDBInterface,), attributes) - - # NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) - # _notif_db_instance = NotificationDB(DATABASE_PROPS[NOTIF_CONST]) diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py index a64dc97cc..92b5c7060 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py @@ -1,5 +1,5 @@ import plyvel -import threading +# import threading from contextlib import contextmanager @@ -9,11 +9,12 @@ from neo.logging import log_manager -logger = log_manager.getLogger('LevelDB') +logger = log_manager.getLogger() """Document me""" _init_method = '_db_init' +_prefix_init_method = '_prefix_db_init' _path = None @@ -21,11 +22,9 @@ _iter = None -_snapshot = None - _batch = None -_lock = threading.Lock() +# _lock = threading.Lock() @property @@ -33,11 +32,18 @@ def Path(self): return self._path +def _prefix_db_init(self, _prefixdb): + try: + self._db = _prefixdb + except Exception as e: + raise Exception("leveldb exception [ %s ]" % e) + + def _db_init(self, path): try: self._path = path self._db = plyvel.DB(path, create_if_missing=True) - logger.info("Created Blockchain DB at %s " % self._path) + logger.info("Created DB at %s " % self._path) except Exception as e: raise Exception("leveldb exception [ %s ]" % e) @@ -53,8 +59,7 @@ def writeBatch(self, batch: dict): def get(self, key, default=None): - _value = self._db.get(key, default) - return _value + return self._db.get(key, default) def delete(self, key): @@ -69,20 +74,22 @@ def deleteBatch(self, batch: dict): def cloneDatabase(self, clone_db): db_snapshot = self.createSnapshot() - for key, value in db_snapshot.iterator(prefix=DBPrefix.ST_Storage, include_value=True): - clone_db.write(key, value) + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: + for key, value in iterator: + clone_db.write(key, value) return clone_db def createSnapshot(self): - self._snapshot = self._db.snapshot() - return self._snapshot + # check if snapshot db has to be closed + from neo.Storage.Implementation.LevelDB.PrefixedDBFactory import internalDBFactory + + SnapshotDB = internalDBFactory('Snapshot') + return SnapshotDB(self._db.snapshot()) @contextmanager def openIter(self, properties): - # TODO start implement start and end - self._iter = self._db.iterator( prefix=properties.prefix, include_value=properties.include_value, @@ -94,14 +101,18 @@ def openIter(self, properties): @contextmanager def getBatch(self): - with _lock: - self._batch = self._db.write_batch() - yield self._batch - self._batch.write() + self._batch = self._db.write_batch() + yield self._batch + self._batch.write() def getPrefixedDB(self, prefix): - return self._db.prefixed_db(prefix) + + # check if prefix db has to be closed + from neo.Storage.Implementation.LevelDB.PrefixedDBFactory import internalDBFactory + + PrefixedDB = internalDBFactory('Prefixed') + return PrefixedDB(self._db.prefixed_db(prefix)) def closeDB(self): diff --git a/neo/Storage/Implementation/LevelDB/test_leveldb.py b/neo/Storage/Implementation/LevelDB/test_leveldb.py deleted file mode 100644 index 98fb747c3..000000000 --- a/neo/Storage/Implementation/LevelDB/test_leveldb.py +++ /dev/null @@ -1,8 +0,0 @@ -from neo.Storage.Implementation.LevelDB.LevelDBImpl import LevelDBImpl - -db = LevelDBImpl(b"/home/enzensbe/source/neo-python/neo/Storage/Implementation/LevelDB/1") -print(db) -db2 = LevelDBImpl(b"/home/enzensbe/source/neo-python/neo/Storage/Implementation/LevelDB/2") -print(db2) -db3 = LevelDBImpl(b"/home/enzensbe/source/neo-python/neo/Storage/Implementation/LevelDB/2") -print(db3) diff --git a/neo/Storage/Implementation/test2.py b/neo/Storage/Implementation/test2.py deleted file mode 100644 index 68b2e08d4..000000000 --- a/neo/Storage/Implementation/test2.py +++ /dev/null @@ -1,4 +0,0 @@ -from DBFactory import getBlockchainDB - -db2 = getBlockchainDB() -print('db2 ', db2) diff --git a/neo/Storage/Implementation/test_db_factory.py b/neo/Storage/Implementation/test_db_factory.py deleted file mode 100644 index 0c3c6cf2a..000000000 --- a/neo/Storage/Implementation/test_db_factory.py +++ /dev/null @@ -1,14 +0,0 @@ -from DBFactory import getBlockchainDB -import test2 - - -_db = getBlockchainDB() - -# _db.write(b'1', b'first') - -ret_1 = test2.db2.get(b'1') -ret_default = _db.get(b'2', b'default_value') - -print(ret_1, ret_default) -assert ret_1 == b'first' -assert ret_default == b'default_value' diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index ba3ae320b..ec7c6c282 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -2,7 +2,7 @@ from neo.SmartContract.Iterable import EnumeratorBase from neo.logging import log_manager -logger = log_manager.getLogger('DBInterface') +logger = log_manager.getLogger() class DBProperties: @@ -11,7 +11,7 @@ class DBProperties: include_value = None include_key = None - def __init__(self, prefix, include_value=True, include_key=True): + def __init__(self, prefix=None, include_value=True, include_key=True): self.prefix = prefix self.include_value = include_value self.include_key = include_key @@ -30,6 +30,8 @@ class DBInterface(object): Changed = [] Deleted = [] + DebugStorage = False + _built_keys = False _ChangedResetState = None @@ -75,7 +77,7 @@ def Keys(self): return self.Collection.keys() def _BuildCollectionKeys(self): - with self.DB.openIter(DBProperties(self.Prefix)) as iterator: + with self.DB.openIter(DBProperties(self.Prefix, include_value=False)) as iterator: for key in iterator: key = key[1:] if key not in self.Collection.keys(): @@ -229,17 +231,14 @@ def TryFind(self, key_prefix): def Find(self, key_prefix): key_prefix = self.Prefix + key_prefix res = {} - with self.DB.openIter(DBProperties(self.Prefix, include_value=True)) as iterator: + with self.DB.openIter(DBProperties(key_prefix, include_value=True)) as iterator: for key, val in iterator: - logger.info('in iterator %s %s ' % key, val) # we want the storage item, not the raw bytes item = self.ClassRef.DeserializeFromDB(binascii.unhexlify(val)).Value # also here we need to skip the 1 byte storage prefix res_key = key[21:] res[res_key] = item - logger.info('finished') - return res def Destroy(self): diff --git a/neo/Storage/Interface/test_db_interface.py b/neo/Storage/Interface/test_db_interface.py deleted file mode 100644 index 3940df7cd..000000000 --- a/neo/Storage/Interface/test_db_interface.py +++ /dev/null @@ -1 +0,0 @@ -from diff --git a/neo/Utils/BlockchainFixtureTestCase.py b/neo/Utils/BlockchainFixtureTestCase.py index 6904a8267..515ce7d70 100644 --- a/neo/Utils/BlockchainFixtureTestCase.py +++ b/neo/Utils/BlockchainFixtureTestCase.py @@ -3,17 +3,192 @@ import shutil import os import neo +from contextlib import contextmanager from neo.Utils.NeoTestCase import NeoTestCase -from neo.Implementations.Blockchains.LevelDB.TestLevelDBBlockchain import TestLevelDBBlockchain +from neo.Storage.Implementation.DBFactory import getBlockchainDB +from neo.Storage.Interface.DBInterface import DBInterface +from neo.Storage.Common.DBPrefix import DBPrefix +from neo.SmartContract.ApplicationEngine import ApplicationEngine from neo.Core.Blockchain import Blockchain +from neo.Core.Fixed8 import Fixed8 from neo.Implementations.Notifications.NotificationDB import NotificationDB from neo.Settings import settings from neo.logging import log_manager from neo.Network.NodeLeader import NodeLeader +from neo.Storage.Common.CachedScriptTable import CachedScriptTable +from neo.Core.State.CoinState import CoinState +from neo.Core.State.AccountState import AccountState +from neo.Core.State.UnspentCoinState import UnspentCoinState +from neo.Core.State.SpentCoinState import SpentCoinState +from neo.Core.State.AssetState import AssetState +from neo.Core.State.ContractState import ContractPropertyState +from neo.Core.State.ContractState import ContractState +from neo.Core.State.StorageItem import StorageItem +from neo.Core.State.ValidatorState import ValidatorState +from neo.Core.TX.Transaction import Transaction, TransactionType logger = log_manager.getLogger() +def MonkeyPatchPersist(self, block): + + accounts = DBInterface(self._db, DBPrefix.ST_Account, AccountState) + unspentcoins = DBInterface(self._db, DBPrefix.ST_Coin, UnspentCoinState) + spentcoins = DBInterface(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) + assets = DBInterface(self._db, DBPrefix.ST_Asset, AssetState) + validators = DBInterface(self._db, DBPrefix.ST_Validator, ValidatorState) + contracts = DBInterface(self._db, DBPrefix.ST_Contract, ContractState) + storages = DBInterface(self._db, DBPrefix.ST_Storage, StorageItem) + + amount_sysfee = self.GetSysFeeAmount(block.PrevHash) + block.TotalFees().value + amount_sysfee_bytes = amount_sysfee.to_bytes(8, 'little') + + with self._db.getBatch() as wb: + for tx in block.Transactions: + + unspentcoinstate = UnspentCoinState.FromTXOutputsConfirmed(tx.outputs) + unspentcoins.Add(tx.Hash.ToBytes(), unspentcoinstate) + + # go through all the accounts in the tx outputs + for output in tx.outputs: + account = accounts.GetAndChange(output.AddressBytes, AccountState(output.ScriptHash)) + + if account.HasBalance(output.AssetId): + account.AddToBalance(output.AssetId, output.Value) + else: + account.SetBalanceFor(output.AssetId, output.Value) + + # go through all tx inputs + unique_tx_input_hashes = [] + for input in tx.inputs: + if input.PrevHash not in unique_tx_input_hashes: + unique_tx_input_hashes.append(input.PrevHash) + + for txhash in unique_tx_input_hashes: + prevTx, height = self.GetTransaction(txhash.ToBytes()) + coin_refs_by_hash = [coinref for coinref in tx.inputs if + coinref.PrevHash.ToBytes() == txhash.ToBytes()] + for input in coin_refs_by_hash: + + uns = unspentcoins.GetAndChange(input.PrevHash.ToBytes()) + uns.OrEqValueForItemAt(input.PrevIndex, CoinState.Spent) + + if prevTx.outputs[input.PrevIndex].AssetId.ToBytes() == Blockchain.SystemShare().Hash.ToBytes(): + sc = spentcoins.GetAndChange(input.PrevHash.ToBytes(), + SpentCoinState(input.PrevHash, height, [])) + sc.Items.append(SpentCoinItem(input.PrevIndex, block.Index)) + + output = prevTx.outputs[input.PrevIndex] + acct = accounts.GetAndChange(prevTx.outputs[input.PrevIndex].AddressBytes, + AccountState(output.ScriptHash)) + assetid = prevTx.outputs[input.PrevIndex].AssetId + acct.SubtractFromBalance(assetid, prevTx.outputs[input.PrevIndex].Value) + + # do a whole lotta stuff with tx here... + if tx.Type == TransactionType.RegisterTransaction: + + asset = AssetState(tx.Hash, tx.AssetType, tx.Name, tx.Amount, + Fixed8(0), tx.Precision, Fixed8(0), Fixed8(0), UInt160(data=bytearray(20)), + tx.Owner, tx.Admin, tx.Admin, block.Index + 2 * 2000000, False) + + assets.Add(tx.Hash.ToBytes(), asset) + + elif tx.Type == TransactionType.IssueTransaction: + + txresults = [result for result in tx.GetTransactionResults() if result.Amount.value < 0] + for result in txresults: + asset = assets.GetAndChange(result.AssetId.ToBytes()) + asset.Available = asset.Available - result.Amount + + elif tx.Type == TransactionType.ClaimTransaction: + + for input in tx.Claims: + + sc = spentcoins.TryGet(input.PrevHash.ToBytes()) + if sc and sc.HasIndex(input.PrevIndex): + sc.DeleteIndex(input.PrevIndex) + spentcoins.GetAndChange(input.PrevHash.ToBytes()) + + elif tx.Type == TransactionType.EnrollmentTransaction: + + validator = validators.GetAndChange(tx.PublicKey, ValidatorState(pub_key=tx.PublicKey)) + # logger.info("VALIDATOR %s " % validator.ToJson()) + + elif tx.Type == TransactionType.StateTransaction: + # @TODO Implement persistence for State Descriptors + pass + + elif tx.Type == TransactionType.PublishTransaction: + + contract = ContractState(tx.Code, tx.NeedStorage, tx.Name, tx.CodeVersion, + tx.Author, tx.Email, tx.Description) + + contracts.GetAndChange(tx.Code.ScriptHash().ToBytes(), contract) + + elif tx.Type == TransactionType.InvocationTransaction: + return ApplicationEngine.Run(tx.Script, tx, False, tx.Gas, True, wb) + + +def MonkeyPatchRun(script, container=None, exit_on_error=False, gas=Fixed8.Zero(), test_mode=True, wb=None): + + from neo.Core.Blockchain import Blockchain + from neo.SmartContract.StateMachine import StateMachine + from neo.EventHub import events + from neo.SmartContract import TriggerType + + bc = Blockchain.Default() + + accounts = DBInterface(bc._db, DBPrefix.ST_Account, AccountState) + assets = DBInterface(bc._db, DBPrefix.ST_Asset, AssetState) + validators = DBInterface(bc._db, DBPrefix.ST_Validator, ValidatorState) + contracts = DBInterface(bc._db, DBPrefix.ST_Contract, ContractState) + storages = DBInterface(bc._db, DBPrefix.ST_Storage, StorageItem) + + script_table = CachedScriptTable(contracts) + service = StateMachine(accounts, validators, assets, contracts, storages, wb, bc) + + engine = ApplicationEngine( + trigger_type=TriggerType.Application, + container=container, + table=script_table, + service=service, + gas=gas, + testMode=test_mode + ) + + try: + _script = binascii.unhexlify(script) + except Exception as e: + _script = script + + engine.LoadScript(_script) + + # normally, this function does not return true/false + # for testing purposes, we try to execute and if an exception is raised + # we will return false, otherwise if success return true + + # this is different than the 'success' bool returned by engine.Execute() + # the 'success' bool returned by engine.Execute() is a value indicating + # wether or not the invocation was successful, and if so, we then commit + # the changes made by the contract to the database + try: + success = engine.Execute() + # service.ExecutionCompleted(engine, success) + if test_mode: + return True + else: + engine.testMode = True + service.ExecutionCompleted(engine, success) + except Exception as e: + # service.ExecutionCompleted(self, False, e) + if test_mode: + return False + else: + engine.testMode = True + service.ExecutionCompleted(engine, False, e) + return engine + + class BlockchainFixtureTestCase(NeoTestCase): FIXTURE_REMOTE_LOC = 'https://s3.us-east-2.amazonaws.com/cityofzion/fixtures/fixtures_v8.tar.gz' FIXTURE_FILENAME = os.path.join(settings.DATA_DIR_PATH, 'Chains/fixtures_v8.tar.gz') @@ -26,6 +201,25 @@ class BlockchainFixtureTestCase(NeoTestCase): wallets_folder = os.path.dirname(neo.__file__) + '/Utils/fixtures/' + _old_persist = None + _old_run = None + + @classmethod + @contextmanager + def MPPersist(cls): + # monkey patch Persist for test: + cls._old_persist = Blockchain.Persist + Blockchain.Persist = MonkeyPatchPersist + + # monkey patch Run for test: + cls._old_run = ApplicationEngine.Run + ApplicationEngine.Run = MonkeyPatchRun + + yield + + Blockchain.Persist = cls._old_persist + ApplicationEngine.Run = cls._old_run + @classmethod def leveldb_testpath(cls): return 'Override Me!' @@ -66,7 +260,9 @@ def setUpClass(cls): settings.setup_unittest_net() - cls._blockchain = TestLevelDBBlockchain(path=cls.leveldb_testpath(), skip_version_check=True) + cls._blockchain = Blockchain(getBlockchainDB(path=cls.leveldb_testpath()), skip_version_check=True) + + cls._blockchain.UT = True Blockchain.RegisterBlockchain(cls._blockchain) # setup Notification DB @@ -101,6 +297,7 @@ def tearDownClass(cls): # tear down Blockchain DB Blockchain.Default().DeregisterBlockchain() if cls._blockchain is not None: + cls._blockchain.UT = False cls._blockchain.Dispose() shutil.rmtree(cls.leveldb_testpath()) diff --git a/neo/Utils/VerifiableTestCase.py b/neo/Utils/VerifiableTestCase.py index 6b7fc5b1d..2b4f80a55 100644 --- a/neo/Utils/VerifiableTestCase.py +++ b/neo/Utils/VerifiableTestCase.py @@ -1,5 +1,5 @@ from neo.Core.Blockchain import Blockchain -from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain +from neo.Storage.Implementation.DBFactory import getBlockchainDB import shutil from neo.Utils.NeoTestCase import NeoTestCase from neo.Settings import settings @@ -18,7 +18,7 @@ def setUpClass(cls): Blockchain.DeregisterBlockchain() os.makedirs(cls.LEVELDB_TESTPATH, exist_ok=True) - cls._blockchain = LevelDBBlockchain(path=cls.LEVELDB_TESTPATH, skip_version_check=True) + cls._blockchain = Blockchain(getBlockchainDB(cls.LEVELDB_TESTPATH), skip_version_check=True) Blockchain.RegisterBlockchain(cls._blockchain) @classmethod From 77711077465f9bf2d5a521b67c44b5fc73398579 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Thu, 25 Apr 2019 05:09:44 +0000 Subject: [PATCH 07/23] fix, added locking, removed unused code --- neo/Implementations/Notifications/NotificationDB.py | 8 -------- .../Implementation/LevelDB/LevelDBClassMethods.py | 11 ++++++----- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/neo/Implementations/Notifications/NotificationDB.py b/neo/Implementations/Notifications/NotificationDB.py index c439e0d74..2cab55190 100644 --- a/neo/Implementations/Notifications/NotificationDB.py +++ b/neo/Implementations/Notifications/NotificationDB.py @@ -194,8 +194,6 @@ def on_persist_completed(self, block): token_db = self.db.getPrefixedDB(NotificationPrefix.PREFIX_TOKEN) - token_write_batch = token_db.write_batch() - with token_db.getBatch() as token_write_batch: for token_event in self._new_contracts_to_write: try: @@ -207,12 +205,6 @@ def on_persist_completed(self, block): self._new_contracts_to_write = [] - results = [] - with block_db.openIter(DBProperties(prefix=block_bytes, include_key=False)) as iterator: - for val in iterator: - event = SmartContractEvent.FromByteArray(val) - results.append(event) - def get_by_block(self, block_number): """ Look up notifications for a block diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py index 92b5c7060..5169a340e 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py @@ -1,5 +1,5 @@ import plyvel -# import threading +import threading from contextlib import contextmanager @@ -24,7 +24,7 @@ _batch = None -# _lock = threading.Lock() +_lock = threading.RLock() @property @@ -101,9 +101,10 @@ def openIter(self, properties): @contextmanager def getBatch(self): - self._batch = self._db.write_batch() - yield self._batch - self._batch.write() + with _lock: + self._batch = self._db.write_batch() + yield self._batch + self._batch.write() def getPrefixedDB(self, prefix): From c912b2be80fc80887e73f403cb9584aa090e993e Mon Sep 17 00:00:00 2001 From: Merl111 Date: Mon, 29 Apr 2019 19:50:06 +0000 Subject: [PATCH 08/23] fixed typo --- neo/Core/Blockchain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neo/Core/Blockchain.py b/neo/Core/Blockchain.py index 0e50d20b7..ec021c0a0 100644 --- a/neo/Core/Blockchain.py +++ b/neo/Core/Blockchain.py @@ -149,7 +149,7 @@ def __init__(self, db, skip_version_check=False, skip_header_check=False): logger.info("Current stored headers empty, re-creating from stored blocks...") headers = [] logger.info('Recreate headers') - with self._db.openIter(DBProperties(DBProperties(DBPrefix.DATA_Block))) as iterator: + with self._db.openIter(DBProperties(DBPrefix.DATA_Block)) as iterator: for key, value in iterator: dbhash = bytearray(value)[8:] headers.append(Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) From 94017a8eb29c2bdc44b8048007de88e414fc5adb Mon Sep 17 00:00:00 2001 From: Merl111 Date: Mon, 29 Apr 2019 21:25:39 +0000 Subject: [PATCH 09/23] moved AbstractDBInterface to AbstractDBImplementation to avoid confusion --- .../AbstractDBImplementation.py} | 26 ++++++++++++------- 1 file changed, 17 insertions(+), 9 deletions(-) rename neo/Storage/{Interface/AbstractDBInterface.py => Implementation/AbstractDBImplementation.py} (55%) diff --git a/neo/Storage/Interface/AbstractDBInterface.py b/neo/Storage/Implementation/AbstractDBImplementation.py similarity index 55% rename from neo/Storage/Interface/AbstractDBInterface.py rename to neo/Storage/Implementation/AbstractDBImplementation.py index bef764baf..61dc4dc00 100644 --- a/neo/Storage/Interface/AbstractDBInterface.py +++ b/neo/Storage/Implementation/AbstractDBImplementation.py @@ -1,14 +1,26 @@ from abc import ABC, abstractmethod +""" +Description: + Abstract class used to ensure the mandatory methods are overwritten + in everxy new database implementation. +Usage: + The dynamically generated class coming from the database factory inherits + from the abstract class, means the generated class cannot be used if not + all methods defined in this class are overwritten. + If this class is extended, make sure you extend also all other database + implementations. -class AbstractDBInterface(ABC): + For a more detailed information on the methods and how to implement a new + database backend check out: + neo.Storage.Implementation.LevelDB.LevelDBClassMethods +""" - @abstractmethod - def write(self, key, value): - raise NotImplementedError + +class AbstractDBImplementation(ABC): @abstractmethod - def writeBatch(self, batch): + def write(self, key, value): raise NotImplementedError @abstractmethod @@ -19,10 +31,6 @@ def get(self, key): def delete(self, key): raise NotImplementedError - @abstractmethod - def deleteBatch(self, batch: dict): - raise NotImplementedError - @abstractmethod def cloneDatabase(self, clone_db): raise NotImplementedError From 7952819118827df48e2e3963ca6b836718142788 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Mon, 29 Apr 2019 21:26:12 +0000 Subject: [PATCH 10/23] Documented new classes and methods to povide information on how to implement a new database backend --- neo/Storage/Implementation/DBFactory.py | 34 +++++-- .../LevelDB/LevelDBClassMethods.py | 98 ++++++++++++++++--- .../LevelDB/PrefixedDBFactory.py | 4 +- neo/Storage/Interface/DBInterface.py | 9 +- 4 files changed, 117 insertions(+), 28 deletions(-) diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index 73d7de497..efa785ba5 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -1,14 +1,20 @@ -from neo.Storage.Interface.AbstractDBInterface import AbstractDBInterface +from neo.Storage.Implementation.AbstractDBImplementation import ( + AbstractDBImplementation +) from neo.Settings import settings from neo.logging import log_manager -"""Module is used to access the different databases. -Import the module and use the getters to access the different databases. +""" +Database factory module + +Note: Module is used to access the different database implementations. +Import the module and use the getters to access the different databases. Configuration is done in neo.Settings.DATABASE_PROPS dict. +Each getter returns an instance of the database. + """ -# logger = log_manager.getLogger('DBFactory') logger = log_manager.getLogger() BC_CONST = 'blockchain' @@ -25,6 +31,9 @@ def getBlockchainDB(path=None): + """ + Returns a database instance used with the blockchain class. + """ if not path: path = DATABASE_PROPS[BC_CONST]['path'] @@ -35,6 +44,9 @@ def getBlockchainDB(path=None): def getNotificationDB(path=None): + """ + Returns a database instance used with the notification class. + """ if not path: path = DATABASE_PROPS[NOTIF_CONST]['path'] @@ -45,6 +57,10 @@ def getNotificationDB(path=None): def getDebugStorageDB(): + """ + Returns a database instance used with the debug storage class. + """ + DebugStorageDB = _dbFactory(DEBUG_CONST, DATABASE_PROPS[DEBUG_CONST]) _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]['path']) return _debug_db_instance @@ -54,21 +70,21 @@ def _dbFactory(dbType, properties): if properties['backend'] == 'leveldb': - # import what's needed + """ + Module implements the methods used by the dynamically generated class. + """ import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions methods = [x for x in dir(functions) if not x.startswith('__')] - # build attributes dict + # build the dict containing all the attributes (methods + members) attributes = {methods[i]: getattr( functions, methods[i]) for i in range(0, len(methods))} # add __init__ method attributes['__init__'] = attributes.pop(functions._init_method) - # print(attributes) - return type( properties['backend'].title() + 'DBImpl' + dbType.title(), - (AbstractDBInterface,), + (AbstractDBImplementation,), attributes) diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py index 5169a340e..61ac176c7 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py @@ -11,17 +11,32 @@ logger = log_manager.getLogger() -"""Document me""" - +""" +Description: + All the methods are used within the dynamicallz generated class from the + database factory. +Usage: + For a new database implementation all methods defined in + AbstractDBImplementation have to be implemented. +""" + +# name of the init method for a generic database class _init_method = '_db_init' + +# name of the init method for a prefixed database class _prefix_init_method = '_prefix_db_init' +# path where the data files are stored _path = None +# either the real database class, in this case a LevelDB instance, +# or a prefixed db or snapshot _db = None +# iterator instance created within openIter(self, properties) _iter = None +# batch instance to perform batch operations on the database _batch = None _lock = threading.RLock() @@ -33,6 +48,12 @@ def Path(self): def _prefix_db_init(self, _prefixdb): + """ + init method used within the internalDBFactory, slightly different from the + init method as we don't have to open a new database but store a snapshot or + a prefixed db. + """ + try: self._db = _prefixdb except Exception as e: @@ -40,6 +61,10 @@ def _prefix_db_init(self, _prefixdb): def _db_init(self, path): + """ + init method used within the DBFactory, opens a new or existing database. + """ + try: self._path = path self._db = plyvel.DB(path, create_if_missing=True) @@ -52,12 +77,6 @@ def write(self, key, value): self._db.put(key, value) -def writeBatch(self, batch: dict): - with self._db.write_batch() as wb: - for key, value in batch.items(): - wb.put(key, value) - - def get(self, key, default=None): return self._db.get(key, default) @@ -66,22 +85,29 @@ def delete(self, key): self._db.delete(key) -def deleteBatch(self, batch: dict): - with self._db.write_batch() as wb: - for key in batch: - wb.delete(key) - - def cloneDatabase(self, clone_db): + """ + Clones the current database into "clone_db" + """ + db_snapshot = self.createSnapshot() - with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, + include_value=True)) as iterator: for key, value in iterator: clone_db.write(key, value) return clone_db def createSnapshot(self): - # check if snapshot db has to be closed + """ + Creates a snapshot of the current database, used for DebugStorage and + NotificationDB. To keep the snapshot compatible to the current design it's + created through a factory which returns basically the same class we use + for the real database and all the methods that can be used on the real db + can also be used on the snapshot. + """ + + # TODO check if snapshot db has to be closed from neo.Storage.Implementation.LevelDB.PrefixedDBFactory import internalDBFactory SnapshotDB = internalDBFactory('Snapshot') @@ -90,6 +116,20 @@ def createSnapshot(self): @contextmanager def openIter(self, properties): + """ + Opens an iterator within a context manager. + + Usage: + Due to the fact that a context manager is used the returned iterator has + to be used within a with block. It's then closed after it returnes from + the scope it's used in. + Example from cloneDatabase method: + + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, + include_value=True)) as iterator: + + """ + self._iter = self._db.iterator( prefix=properties.prefix, include_value=properties.include_value, @@ -101,6 +141,21 @@ def openIter(self, properties): @contextmanager def getBatch(self): + """ + Yields a batch instance which can be used to perform atomic updates on the + database. + As it's used within a context, getBatch has to called within a with block. + + Example: + with self._db.getBatch() as batch: + batch.put(b'key1', b'value') + batch.put(b'key2', b'value') + batch.delete(b'key2') + + If a database backend is implemented that does not support batches you have + to implement an object that mimics a batches the behaviour. + """ + with _lock: self._batch = self._db.write_batch() yield self._batch @@ -108,6 +163,17 @@ def getBatch(self): def getPrefixedDB(self, prefix): + """ + Returns a prefixed db instance, which is basically the same as a real + database but exists only in memory and contains only the data with the + given prefix. + + A prefixed db is currently only used for the NotificationDB. + + If a database backend is implemented that does not support a prefixed + database you have to implement a data structure/class that mimics its + behaviour. + """ # check if prefix db has to be closed from neo.Storage.Implementation.LevelDB.PrefixedDBFactory import internalDBFactory diff --git a/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py b/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py index 603c4dba6..cfdaae206 100644 --- a/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py +++ b/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py @@ -1,4 +1,4 @@ -from neo.Storage.Interface.AbstractDBInterface import AbstractDBInterface +from neo.Storage.Implementation.AbstractDBImplementation import AbstractDBImplementation def internalDBFactory(classPrefix): @@ -17,5 +17,5 @@ def internalDBFactory(classPrefix): return type( classPrefix.title() + 'DBImpl', - (AbstractDBInterface,), + (AbstractDBImplementation,), attributes) diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index ec7c6c282..f8d7e4f8b 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -7,10 +7,17 @@ class DBProperties: + """ + Description: + Used to pass the relevant information to + no.Storage.Implementation.[BACKEND].[BACKEND]DBClassMethods.openIter + """ + prefix = None include_value = None include_key = None - + + def __init__(self, prefix=None, include_value=True, include_key=True): self.prefix = prefix self.include_value = include_value From 631228f33950baaa8b0a70db75a2ecaf0b9dce50 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Fri, 10 May 2019 17:55:26 +0200 Subject: [PATCH 11/23] extended doc strings, made linter happy --- neo/Storage/Implementation/DBFactory.py | 56 +++++++++++--- .../LevelDB/LevelDBClassMethods.py | 75 +++++++++++++++---- .../LevelDB/PrefixedDBFactory.py | 14 ++++ neo/Storage/Interface/DBInterface.py | 13 +++- 4 files changed, 129 insertions(+), 29 deletions(-) diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index efa785ba5..afe278c72 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -5,13 +5,25 @@ from neo.logging import log_manager -""" -Database factory module +"""Database factory module -Note: Module is used to access the different database implementations. -Import the module and use the getters to access the different databases. -Configuration is done in neo.Settings.DATABASE_PROPS dict. -Each getter returns an instance of the database. +Note: + Module is used to access the different database implementations. + Import the module and use the getters to access the different databases. + Configuration is done in neo.Settings.DATABASE_PROPS dict. + Each getter returns an instance of the database. + +Constants: + BC_CONST (str): defines the key within the settings.database_properties() + to retrieve the blockchain db properties. + + NOTIF_CONST (str): defines the key within the settings.database_properties() + to retrieve the notification db properties. + + DEBUG_CONST (str): defines the key within the settings.database_properties() + to retrieve the debug storage properties. + + DATABASE_PROPS (dict): The properties defined within the settings module. """ @@ -23,16 +35,16 @@ DATABASE_PROPS = settings.database_properties() -_blockchain_db_instance = None - -_notif_db_instance = None - -_debug_db_instance = None - def getBlockchainDB(path=None): """ Returns a database instance used with the blockchain class. + + Args: + path (str, optional): the full path to the blockchain database directory. + + Returns: + _blockchain_db_instance (object): A new blockchain database instance. """ if not path: @@ -46,6 +58,12 @@ def getBlockchainDB(path=None): def getNotificationDB(path=None): """ Returns a database instance used with the notification class. + + Args: + path (str, optional): the full path to the notification database directory. + + Returns: + _notif_db_instance (object): A new notification database instance. """ if not path: @@ -59,6 +77,9 @@ def getNotificationDB(path=None): def getDebugStorageDB(): """ Returns a database instance used with the debug storage class. + + Returns: + _debug_db_instance (object): A new debug storage instance. """ DebugStorageDB = _dbFactory(DEBUG_CONST, DATABASE_PROPS[DEBUG_CONST]) @@ -67,6 +88,17 @@ def getDebugStorageDB(): def _dbFactory(dbType, properties): + """ Method to generate a database class. + + Args: + dbType (str): Type of the database (Blockchain, Notification, Debug). + properties (dict): The properties defined within the settings module. + + Returns: + New database class to instantiate a new database. + + + """ if properties['backend'] == 'leveldb': diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py index 61ac176c7..8391ee67a 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py @@ -13,10 +13,10 @@ """ Description: - All the methods are used within the dynamicallz generated class from the + All the methods are used within the dynamically generated class from the database factory. Usage: - For a new database implementation all methods defined in + For a new database implementation all methods defined in AbstractDBImplementation have to be implemented. """ @@ -44,14 +44,19 @@ @property def Path(self): + """str: full path to the database""" return self._path def _prefix_db_init(self, _prefixdb): """ - init method used within the internalDBFactory, slightly different from the + Init method used within the internalDBFactory, slightly different from the init method as we don't have to open a new database but store a snapshot or a prefixed db. + + Args: + _prefixdb (object): the prefixed db instance + """ try: @@ -61,8 +66,17 @@ def _prefix_db_init(self, _prefixdb): def _db_init(self, path): + """ - init method used within the DBFactory, opens a new or existing database. + Init method used within the DBFactory, opens a new or existing database. + + Args: + path (str): full path to the database directory. + + Attributes: + path (str): full path to the database directory. + _db (object): the database instance + """ try: @@ -87,11 +101,18 @@ def delete(self, key): def cloneDatabase(self, clone_db): """ - Clones the current database into "clone_db" + Clones the current database into "clone_db" + + Args: + clone_db (object): the instance of the database to clone to. + + Returns: + clone_db (object): returns a cloned db instance + """ db_snapshot = self.createSnapshot() - with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: for key, value in iterator: clone_db.write(key, value) @@ -100,11 +121,18 @@ def cloneDatabase(self, clone_db): def createSnapshot(self): """ - Creates a snapshot of the current database, used for DebugStorage and - NotificationDB. To keep the snapshot compatible to the current design it's - created through a factory which returns basically the same class we use + Creates a snapshot of the current database, used for DebugStorage and + NotificationDB. To keep the snapshot compatible to the current design it's + created through a factory which returns basically the same class we use for the real database and all the methods that can be used on the real db can also be used on the snapshot. + + Args: + None + + Returns: + SnapshotDB (object): a new instance of a snapshot DB. + """ # TODO check if snapshot db has to be closed @@ -121,13 +149,20 @@ def openIter(self, properties): Usage: Due to the fact that a context manager is used the returned iterator has - to be used within a with block. It's then closed after it returnes from + to be used within a with block. It's then closed after it returnes from the scope it's used in. Example from cloneDatabase method: - with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: + Args: + properties (DBProperties): object containing the different properties + used to open an iterator. + + Yields: + _iter (LevelDB iterator): yields an iterator which is closed after the + with block is done. """ self._iter = self._db.iterator( @@ -142,7 +177,7 @@ def openIter(self, properties): @contextmanager def getBatch(self): """ - Yields a batch instance which can be used to perform atomic updates on the + Yields a batch instance which can be used to perform atomic updates on the database. As it's used within a context, getBatch has to called within a with block. @@ -154,6 +189,14 @@ def getBatch(self): If a database backend is implemented that does not support batches you have to implement an object that mimics a batches the behaviour. + + Args: + None + + Yields: + _batch (LevelDB batch): yields a new batch object which is processed after + the with block is done. + """ with _lock: @@ -171,8 +214,14 @@ def getPrefixedDB(self, prefix): A prefixed db is currently only used for the NotificationDB. If a database backend is implemented that does not support a prefixed - database you have to implement a data structure/class that mimics its + database you have to implement a data structure/class that mimics its behaviour. + + Args: + prefix (str): the prefix used to create a new prefixed DB. + + Returns: + PrefixedDB (object): a new instance of a prefixed DB. """ # check if prefix db has to be closed diff --git a/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py b/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py index cfdaae206..da60fc065 100644 --- a/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py +++ b/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py @@ -2,6 +2,20 @@ def internalDBFactory(classPrefix): + """ Internal database factory method used for prefixed dbs and snapshots. + + The returned class is very similar the the class returned in + neo.Storage.Implementation.DBFactory._dbFactory but has a different + __init__ method. + + Args: + classPrefix (str): Prefix to name the class appropiately. + + Returns: + classPrefix + DBImpl (object): dynamically generated class used for + PrefixedDBs and SnapshotDBs. + + """ # import what's needed import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index f8d7e4f8b..5b932f3fa 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -7,17 +7,22 @@ class DBProperties: - """ - Description: + """ Store the different DB properties Used to pass the relevant information to no.Storage.Implementation.[BACKEND].[BACKEND]DBClassMethods.openIter + + Args: + prefix (str, optional): Prefix to search for. + include_value (bool, optional): include vale used for creating an + iterator. + include_key (bool, optional): include key used for creating an + iterator. """ prefix = None include_value = None include_key = None - - + def __init__(self, prefix=None, include_value=True, include_key=True): self.prefix = prefix self.include_value = include_value From 5ac7e58de19b065eaa80d10746362e24d0a70ba6 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Tue, 14 May 2019 22:18:26 +0200 Subject: [PATCH 12/23] fixed typos, changed DBFactory --- neo/Settings.py | 6 +- .../AbstractDBImplementation.py | 12 +- neo/Storage/Implementation/DBFactory.py | 49 +--- .../LevelDB/LevelDBClassMethods.py | 235 ------------------ .../Implementation/LevelDB/LevelDBImpl.py | 190 ++++++++++++++ .../Implementation/LevelDB/LevelDBSnapshot.py | 20 ++ .../LevelDB/PrefixedDBFactory.py | 35 --- .../Implementation/LevelDB/__init__.py | 0 neo/Storage/Interface/DBInterface.py | 10 +- 9 files changed, 228 insertions(+), 329 deletions(-) delete mode 100644 neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py create mode 100644 neo/Storage/Implementation/LevelDB/LevelDBImpl.py create mode 100644 neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py delete mode 100644 neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py create mode 100644 neo/Storage/Implementation/LevelDB/__init__.py diff --git a/neo/Settings.py b/neo/Settings.py index bac8597ed..f49103a2d 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -146,13 +146,13 @@ def debug_storage_leveldb_path(self): def database_properties(self): return {'blockchain': {'path': self.chain_leveldb_path, 'skip_version_check': False, - 'backend': 'leveldb'}, + 'backend': 'neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl'}, 'notification': {'path': self.notification_leveldb_path, - 'backend': 'leveldb'}, + 'backend': 'neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl'}, 'debug': {'path': self.debug_storage_leveldb_path, - 'backend': 'leveldb'} + 'backend': 'neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl'} } # Helpers diff --git a/neo/Storage/Implementation/AbstractDBImplementation.py b/neo/Storage/Implementation/AbstractDBImplementation.py index 61dc4dc00..a9fdb03e4 100644 --- a/neo/Storage/Implementation/AbstractDBImplementation.py +++ b/neo/Storage/Implementation/AbstractDBImplementation.py @@ -1,17 +1,17 @@ from abc import ABC, abstractmethod """ Description: - Abstract class used to ensure the mandatory methods are overwritten - in everxy new database implementation. + Alternative backends can be added by implementing the required methods + of this abstract class. Usage: The dynamically generated class coming from the database factory inherits - from the abstract class, means the generated class cannot be used if not + from the abstract class, means the generated class cannot be used if not all methods defined in this class are overwritten. If this class is extended, make sure you extend also all other database implementations. - For a more detailed information on the methods and how to implement a new + For a more detailed information on the methods and how to implement a new database backend check out: neo.Storage.Implementation.LevelDB.LevelDBClassMethods """ @@ -48,9 +48,9 @@ def getBatch(self): raise NotImplementedError @abstractmethod - def closeDB(self): + def getPrefixedDB(self, prefix): raise NotImplementedError @abstractmethod - def getPrefixedDB(self, prefix): + def closeDB(self): raise NotImplementedError diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index afe278c72..c9fabfebd 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -1,9 +1,5 @@ -from neo.Storage.Implementation.AbstractDBImplementation import ( - AbstractDBImplementation -) from neo.Settings import settings -from neo.logging import log_manager - +from neo.Utils.plugin import load_class_from_path """Database factory module @@ -27,8 +23,6 @@ """ -logger = log_manager.getLogger() - BC_CONST = 'blockchain' NOTIF_CONST = 'notification' DEBUG_CONST = 'debug' @@ -50,7 +44,7 @@ def getBlockchainDB(path=None): if not path: path = DATABASE_PROPS[BC_CONST]['path'] - BlockchainDB = _dbFactory(BC_CONST, DATABASE_PROPS[BC_CONST]) + BlockchainDB = load_class_from_path(DATABASE_PROPS[BC_CONST]['backend']) _blockchain_db_instance = BlockchainDB(path) return _blockchain_db_instance @@ -69,7 +63,7 @@ def getNotificationDB(path=None): if not path: path = DATABASE_PROPS[NOTIF_CONST]['path'] - NotificationDB = _dbFactory(NOTIF_CONST, DATABASE_PROPS[NOTIF_CONST]) + NotificationDB = load_class_from_path(DATABASE_PROPS[NOTIF_CONST]['backend']) _notif_db_instance = NotificationDB(path) return _notif_db_instance @@ -82,41 +76,6 @@ def getDebugStorageDB(): _debug_db_instance (object): A new debug storage instance. """ - DebugStorageDB = _dbFactory(DEBUG_CONST, DATABASE_PROPS[DEBUG_CONST]) + DebugStorageDB = load_class_from_path(DATABASE_PROPS[DEBUG_CONST]['backend']) _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]['path']) return _debug_db_instance - - -def _dbFactory(dbType, properties): - """ Method to generate a database class. - - Args: - dbType (str): Type of the database (Blockchain, Notification, Debug). - properties (dict): The properties defined within the settings module. - - Returns: - New database class to instantiate a new database. - - - """ - - if properties['backend'] == 'leveldb': - - """ - Module implements the methods used by the dynamically generated class. - """ - import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions - - methods = [x for x in dir(functions) if not x.startswith('__')] - - # build the dict containing all the attributes (methods + members) - attributes = {methods[i]: getattr( - functions, methods[i]) for i in range(0, len(methods))} - - # add __init__ method - attributes['__init__'] = attributes.pop(functions._init_method) - - return type( - properties['backend'].title() + 'DBImpl' + dbType.title(), - (AbstractDBImplementation,), - attributes) diff --git a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py b/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py deleted file mode 100644 index 8391ee67a..000000000 --- a/neo/Storage/Implementation/LevelDB/LevelDBClassMethods.py +++ /dev/null @@ -1,235 +0,0 @@ -import plyvel -import threading - -from contextlib import contextmanager - -from neo.Core.Blockchain import Blockchain -from neo.Storage.Common.DBPrefix import DBPrefix -from neo.Storage.Interface.DBInterface import DBProperties -from neo.logging import log_manager - - -logger = log_manager.getLogger() - -""" -Description: - All the methods are used within the dynamically generated class from the - database factory. -Usage: - For a new database implementation all methods defined in - AbstractDBImplementation have to be implemented. -""" - -# name of the init method for a generic database class -_init_method = '_db_init' - -# name of the init method for a prefixed database class -_prefix_init_method = '_prefix_db_init' - -# path where the data files are stored -_path = None - -# either the real database class, in this case a LevelDB instance, -# or a prefixed db or snapshot -_db = None - -# iterator instance created within openIter(self, properties) -_iter = None - -# batch instance to perform batch operations on the database -_batch = None - -_lock = threading.RLock() - - -@property -def Path(self): - """str: full path to the database""" - return self._path - - -def _prefix_db_init(self, _prefixdb): - """ - Init method used within the internalDBFactory, slightly different from the - init method as we don't have to open a new database but store a snapshot or - a prefixed db. - - Args: - _prefixdb (object): the prefixed db instance - - """ - - try: - self._db = _prefixdb - except Exception as e: - raise Exception("leveldb exception [ %s ]" % e) - - -def _db_init(self, path): - - """ - Init method used within the DBFactory, opens a new or existing database. - - Args: - path (str): full path to the database directory. - - Attributes: - path (str): full path to the database directory. - _db (object): the database instance - - """ - - try: - self._path = path - self._db = plyvel.DB(path, create_if_missing=True) - logger.info("Created DB at %s " % self._path) - except Exception as e: - raise Exception("leveldb exception [ %s ]" % e) - - -def write(self, key, value): - self._db.put(key, value) - - -def get(self, key, default=None): - return self._db.get(key, default) - - -def delete(self, key): - self._db.delete(key) - - -def cloneDatabase(self, clone_db): - """ - Clones the current database into "clone_db" - - Args: - clone_db (object): the instance of the database to clone to. - - Returns: - clone_db (object): returns a cloned db instance - - """ - - db_snapshot = self.createSnapshot() - with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, - include_value=True)) as iterator: - for key, value in iterator: - clone_db.write(key, value) - return clone_db - - -def createSnapshot(self): - """ - Creates a snapshot of the current database, used for DebugStorage and - NotificationDB. To keep the snapshot compatible to the current design it's - created through a factory which returns basically the same class we use - for the real database and all the methods that can be used on the real db - can also be used on the snapshot. - - Args: - None - - Returns: - SnapshotDB (object): a new instance of a snapshot DB. - - """ - - # TODO check if snapshot db has to be closed - from neo.Storage.Implementation.LevelDB.PrefixedDBFactory import internalDBFactory - - SnapshotDB = internalDBFactory('Snapshot') - return SnapshotDB(self._db.snapshot()) - - -@contextmanager -def openIter(self, properties): - """ - Opens an iterator within a context manager. - - Usage: - Due to the fact that a context manager is used the returned iterator has - to be used within a with block. It's then closed after it returnes from - the scope it's used in. - Example from cloneDatabase method: - - with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, - include_value=True)) as iterator: - - Args: - properties (DBProperties): object containing the different properties - used to open an iterator. - - Yields: - _iter (LevelDB iterator): yields an iterator which is closed after the - with block is done. - """ - - self._iter = self._db.iterator( - prefix=properties.prefix, - include_value=properties.include_value, - include_key=properties.include_key) - - yield self._iter - self._iter.close() - - -@contextmanager -def getBatch(self): - """ - Yields a batch instance which can be used to perform atomic updates on the - database. - As it's used within a context, getBatch has to called within a with block. - - Example: - with self._db.getBatch() as batch: - batch.put(b'key1', b'value') - batch.put(b'key2', b'value') - batch.delete(b'key2') - - If a database backend is implemented that does not support batches you have - to implement an object that mimics a batches the behaviour. - - Args: - None - - Yields: - _batch (LevelDB batch): yields a new batch object which is processed after - the with block is done. - - """ - - with _lock: - self._batch = self._db.write_batch() - yield self._batch - self._batch.write() - - -def getPrefixedDB(self, prefix): - """ - Returns a prefixed db instance, which is basically the same as a real - database but exists only in memory and contains only the data with the - given prefix. - - A prefixed db is currently only used for the NotificationDB. - - If a database backend is implemented that does not support a prefixed - database you have to implement a data structure/class that mimics its - behaviour. - - Args: - prefix (str): the prefix used to create a new prefixed DB. - - Returns: - PrefixedDB (object): a new instance of a prefixed DB. - """ - - # check if prefix db has to be closed - from neo.Storage.Implementation.LevelDB.PrefixedDBFactory import internalDBFactory - - PrefixedDB = internalDBFactory('Prefixed') - return PrefixedDB(self._db.prefixed_db(prefix)) - - -def closeDB(self): - self._db.close() diff --git a/neo/Storage/Implementation/LevelDB/LevelDBImpl.py b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py new file mode 100644 index 000000000..9aa69920a --- /dev/null +++ b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py @@ -0,0 +1,190 @@ +import plyvel +import threading + +from contextlib import contextmanager + +from neo.Storage.Implementation.AbstractDBImplementation import ( + AbstractDBImplementation +) +from neo.Utils.plugin import load_class_from_path +from neo.Storage.Common.DBPrefix import DBPrefix +from neo.Storage.Interface.DBInterface import DBProperties +from neo.logging import log_manager + + +logger = log_manager.getLogger() + +""" +Description: + Backend implementation for the LevelDB database. + It overrides all methods from the `AbstractDBImplementation` class. + The database factory (`DBFactory`) uses these methods to dynamically + generate a conforming database instance for internal usage. + +Usage: + For a new database implementation all methods defined in + AbstractDBImplementation have to be implemented. + +""" + + +class LevelDBImpl(AbstractDBImplementation): + + # the instance of the database + _db = None + + _lock = threading.RLock() + + def __init__(self, path): + + """ + Init method used within the DBFactory, opens a new or existing database. + + Args: + path (str): full path to the database directory. + + Attributes: + path (str): full path to the database directory. + _db (object): the database instance + + """ + + try: + self._path = path + self._db = plyvel.DB(path, create_if_missing=True) + logger.info("Created DB at %s " % self._path) + except Exception as e: + raise Exception("leveldb exception [ %s ]" % e) + + def write(self, key, value): + self._db.put(key, value) + + def get(self, key, default=None): + return self._db.get(key, default) + + def delete(self, key): + self._db.delete(key) + + def cloneDatabase(self, clone_db): + """ + Clones the current database into "clone_db" + + Args: + clone_db (object): the instance of the database to clone to. + + Returns: + clone_db (object): returns a cloned db instance + + """ + + db_snapshot = self.createSnapshot() + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, + include_value=True)) as iterator: + for key, value in iterator: + clone_db.write(key, value) + return clone_db + + def createSnapshot(self): + """ + Creates a snapshot of the current database, used for DebugStorage and + NotificationDB. To keep the snapshot compatible to the current design it's + created through a factory which returns basically the same class we use + for the real database and all the methods that can be used on the real db + can also be used on the snapshot. + + Args: + None + + Returns: + SnapshotDB (object): a new instance of a snapshot DB. + + """ + + SnapshotDB = load_class_from_path('neo.Storage.Implementation.LevelDB.LevelDBSnapshot.LevelDBSnapshot') + return SnapshotDB(self._db.snapshot()) + + @contextmanager + def openIter(self, properties): + """ + Opens an iterator within a context manager. + + Usage: + Due to the fact that a context manager is used the returned iterator has + to be used within a with block. It's then closed after it returnes from + the scope it's used in. + Example from cloneDatabase method: + + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, + include_value=True)) as iterator: + + Args: + properties (DBProperties): object containing the different properties + used to open an iterator. + + Yields: + _iter (LevelDB iterator): yields an iterator which is closed after the + with block is done. + """ + + _iter = self._db.iterator( + prefix=properties.prefix, + include_value=properties.include_value, + include_key=properties.include_key) + + yield _iter + _iter.close() + + @contextmanager + def getBatch(self): + """ + Yields a batch instance which can be used to perform atomic updates on the + database. + As it's used within a context, getBatch has to called within a with block. + + Example: + with self._db.getBatch() as batch: + batch.put(b'key1', b'value') + batch.put(b'key2', b'value') + batch.delete(b'key2') + + If a database backend is implemented that does not support batches you have + to implement an object that mimics batch behaviour. + + Args: + None + + Yields: + _batch (LevelDB batch): yields a new batch object which is processed after + the with block is done. + + """ + + with self._lock: + _batch = self._db.write_batch() + yield _batch + _batch.write() + + def getPrefixedDB(self, prefix): + """ + Returns a prefixed db instance, which is basically the same as a real + database but exists only in memory and contains only the data with the + given prefix. + + A prefixed db is currently only used for the NotificationDB. + + If a database backend is implemented that does not support a prefixed + database you have to implement a data structure/class that mimics its + behaviour. + + Args: + prefix (str): the prefix used to create a new prefixed DB. + + Returns: + PrefixedDB (object): a new instance of a prefixed DB. + """ + + PrefixedDB = load_class_from_path('neo.Storage.Implementation.LevelDB.LevelDBSnapshot.LevelDBSnapshot') + return PrefixedDB(self._db.prefixed_db(prefix)) + + def closeDB(self): + self._db.close() diff --git a/neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py b/neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py new file mode 100644 index 000000000..67ddc2450 --- /dev/null +++ b/neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py @@ -0,0 +1,20 @@ +from neo.Storage.Implementation.LevelDB.LevelDBImpl import LevelDBImpl + + +class LevelDBSnapshot(LevelDBImpl): + + def __init__(self, _prefixdb): + """ + Init method used with a snapshotDB or prefixedDB, slightly different from the + init method as we don't have to open a new database but store a snapshot or + a prefixed db. + + Args: + _prefixdb (object): the prefixed db instance + + """ + + try: + self._db = _prefixdb + except Exception as e: + raise Exception("leveldb exception [ %s ]" % e) diff --git a/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py b/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py deleted file mode 100644 index da60fc065..000000000 --- a/neo/Storage/Implementation/LevelDB/PrefixedDBFactory.py +++ /dev/null @@ -1,35 +0,0 @@ -from neo.Storage.Implementation.AbstractDBImplementation import AbstractDBImplementation - - -def internalDBFactory(classPrefix): - """ Internal database factory method used for prefixed dbs and snapshots. - - The returned class is very similar the the class returned in - neo.Storage.Implementation.DBFactory._dbFactory but has a different - __init__ method. - - Args: - classPrefix (str): Prefix to name the class appropiately. - - Returns: - classPrefix + DBImpl (object): dynamically generated class used for - PrefixedDBs and SnapshotDBs. - - """ - - # import what's needed - import neo.Storage.Implementation.LevelDB.LevelDBClassMethods as functions - - methods = [x for x in dir(functions) if not x.startswith('__')] - - # build attributes dict - attributes = {methods[i]: getattr( - functions, methods[i]) for i in range(0, len(methods))} - - # add __init__ method - attributes['__init__'] = attributes.pop(functions._prefix_init_method) - - return type( - classPrefix.title() + 'DBImpl', - (AbstractDBImplementation,), - attributes) diff --git a/neo/Storage/Implementation/LevelDB/__init__.py b/neo/Storage/Implementation/LevelDB/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index 5b932f3fa..a20011fc8 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -9,14 +9,14 @@ class DBProperties: """ Store the different DB properties Used to pass the relevant information to - no.Storage.Implementation.[BACKEND].[BACKEND]DBClassMethods.openIter + neo.Storage.Implementation.[BACKEND].[BACKEND]DBClassMethods.openIter Args: prefix (str, optional): Prefix to search for. - include_value (bool, optional): include vale used for creating an - iterator. - include_key (bool, optional): include key used for creating an - iterator. + include_value (bool, optional): used to define if the value should be + included when opening an iterator. + include_key (bool, optional): used to define if the key value shoud be + included when opening an iterator. """ prefix = None From 11e644be1ac2c429974c2c88b48a7bf999348776 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Wed, 15 May 2019 22:05:14 +0200 Subject: [PATCH 13/23] extended docstrings + fixed minor bugs --- neo/Core/Blockchain.py | 4 +- neo/Settings.py | 21 +-- .../AbstractDBImplementation.py | 152 +++++++++++++++++- neo/Storage/Implementation/DBFactory.py | 12 +- .../Implementation/LevelDB/LevelDBImpl.py | 100 ------------ neo/Storage/Interface/DBInterface.py | 14 +- neo/data/protocol.coz.json | 14 +- neo/data/protocol.mainnet.json | 14 +- neo/data/protocol.privnet.json | 14 +- neo/data/protocol.testnet.json | 16 +- neo/data/protocol.unittest-net.json | 14 +- 11 files changed, 237 insertions(+), 138 deletions(-) diff --git a/neo/Core/Blockchain.py b/neo/Core/Blockchain.py index ec021c0a0..e40f169a1 100644 --- a/neo/Core/Blockchain.py +++ b/neo/Core/Blockchain.py @@ -189,8 +189,8 @@ def __init__(self, db, skip_version_check=False, skip_header_check=False): if res == 'continue': with self._db.getBatch() as wb: - with self._db.openIter() as iterator: - for key, value in iterator: + with self._db.openIter(DBProperties(include_value=False)) as iterator: + for key in iterator: wb.delete(key) self.Persist(Blockchain.GenesisBlock()) diff --git a/neo/Settings.py b/neo/Settings.py index f49103a2d..160d516d7 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -118,6 +118,8 @@ class SettingsHolder: DEFAULT_RPC_SERVER = 'neo.api.JSONRPC.JsonRpcApi.JsonRpcApi' DEFAULT_REST_SERVER = 'neo.api.REST.RestApi.RestApi' + DATABASE_PROPS = None + # Logging settings log_level = None log_smart_contract_events = False @@ -144,16 +146,7 @@ def debug_storage_leveldb_path(self): return os.path.abspath(os.path.join(self.DATA_DIR_PATH, self.DEBUG_STORAGE_PATH)) def database_properties(self): - return {'blockchain': {'path': self.chain_leveldb_path, - 'skip_version_check': False, - 'backend': 'neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl'}, - - 'notification': {'path': self.notification_leveldb_path, - 'backend': 'neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl'}, - - 'debug': {'path': self.debug_storage_leveldb_path, - 'backend': 'neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl'} - } + return self.DATABASE_PROPS # Helpers @property @@ -218,7 +211,6 @@ def get_config_and_warn(key, default, abort=False): self.REGISTER_TX_FEE = fees['RegisterTransaction'] config = data['ApplicationConfiguration'] - self.LEVELDB_PATH = config['DataDirectoryPath'] self.RPC_PORT = int(config['RPCPort']) self.NODE_PORT = int(config['NodePort']) self.WS_PORT = config['WsPort'] @@ -230,13 +222,16 @@ def get_config_and_warn(key, default, abort=False): Helper.ADDRESS_VERSION = self.ADDRESS_VERSION self.USE_DEBUG_STORAGE = config.get('DebugStorage', False) - self.DEBUG_STORAGE_PATH = config.get('DebugStoragePath', 'Chains/debugstorage') - self.NOTIFICATION_DB_PATH = config.get('NotificationDataPath', 'Chains/notification_data') self.SERVICE_ENABLED = config.get('ServiceEnabled', self.ACCEPT_INCOMING_PEERS) self.COMPILER_NEP_8 = config.get('CompilerNep8', False) self.REST_SERVER = config.get('RestServer', self.DEFAULT_REST_SERVER) self.RPC_SERVER = config.get('RPCServer', self.DEFAULT_RPC_SERVER) + self.DATABASE_PROPS = config.get('Database') + self.LEVELDB_PATH = self.DATABASE_PROPS['Blockchain']['DataDirectoryPath'] + self.NOTIFICATION_DB_PATH = self.DATABASE_PROPS['Notification']['NotificationDataPath'] + self.DEBUG_STORAGE_PATH = self.DATABASE_PROPS['DebugStorage']['DebugStoragePath'] + def setup_mainnet(self): """ Load settings from the mainnet JSON config file """ self.setup(FILENAME_SETTINGS_MAINNET) diff --git a/neo/Storage/Implementation/AbstractDBImplementation.py b/neo/Storage/Implementation/AbstractDBImplementation.py index a9fdb03e4..dfc618a9a 100644 --- a/neo/Storage/Implementation/AbstractDBImplementation.py +++ b/neo/Storage/Implementation/AbstractDBImplementation.py @@ -11,46 +11,194 @@ If this class is extended, make sure you extend also all other database implementations. - For a more detailed information on the methods and how to implement a new + For a more detailed description on the methods and how to implement a new database backend check out: - neo.Storage.Implementation.LevelDB.LevelDBClassMethods + neo.Storage.Implementation.LevelDB.LevelDBImpl """ class AbstractDBImplementation(ABC): + @abstractmethod + def __init__(self, path): + """ + Init method used within the DBFactory, opens a new or existing database. + + Args: + path (str): full path to the database directory. + + Attributes: + path (str): full path to the database directory. + _db (object): the database instance + + """ + raise NotImplementedError + @abstractmethod def write(self, key, value): + """ + Writes the given key/value pair to the database. + + Args: + key (bytearray): has to be a prefixed bytearray, for prefixes check + neo.Storage.Common.DBPrefix + + value (bytearray): the value as bytearray + + Returns: + None + """ raise NotImplementedError @abstractmethod def get(self, key): + """ + Retrieves the value based on the given key from the database. + + Args: + key (bytearray): has to be a prefixed bytearray, for prefixes check + neo.Storage.Common.DBPrefix + + Returns: + bytearray + """ raise NotImplementedError @abstractmethod def delete(self, key): + """ + Deletes a row from the database. + + Args: + key (bytearray): has to be a prefixed bytearray, for prefixes check + neo.Storage.Common.DBPrefix + + Returns: + None + """ raise NotImplementedError @abstractmethod def cloneDatabase(self, clone_db): + """ + Clones the current database into "clone_db" + + Args: + clone_db (object): the instance of the database to clone to. + + Returns: + clone_db (object): returns a cloned db instance + + """ + raise NotImplementedError @abstractmethod def createSnapshot(self): + """ + Creates a snapshot of the current database, used for DebugStorage and + NotificationDB. To keep the snapshot compatible to the current design it's + created through a factory which returns basically the same class we use + for the real database and all the methods that can be used on the real db + can also be used on the snapshot. + + Args: + None + + Returns: + SnapshotDB (object): a new instance of a snapshot DB. + + """ + raise NotImplementedError @abstractmethod def openIter(self, properties): + """ + Opens an iterator within a context manager. + + Usage: + Due to the fact that a context manager is used the returned iterator has + to be used within a with block. It's then closed after it returnes from + the scope it's used in. + Example from cloneDatabase method: + + with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, + include_value=True)) as iterator: + + Args: + properties (DBProperties): object containing the different properties + used to open an iterator. + + Yields: + _iter (LevelDB iterator): yields an iterator which is closed after the + with block is done. + """ + raise NotImplementedError @abstractmethod def getBatch(self): + """ + Yields a batch instance which can be used to perform atomic updates on the + database. + As it's used within a context, getBatch has to called within a with block. + + Example: + with self._db.getBatch() as batch: + batch.put(b'key1', b'value') + batch.put(b'key2', b'value') + batch.delete(b'key2') + + If a database backend is implemented that does not support batches you have + to implement an object that mimics batch behaviour. + + Args: + None + + Yields: + _batch (LevelDB batch): yields a new batch object which is processed after + the with block is done. + LevelDB batch: + Methods: + put(key bytearry:, value: bytearray) -> None + Stores the given key/value pair to the database. + + delete(key: bytearray) -> None + Deletes the given key from the database after + the batch was persisted. + + clear(None) -> None + Removes all entries from a batch. + + + """ + raise NotImplementedError @abstractmethod def getPrefixedDB(self, prefix): + """ + Returns a prefixed db instance, which is basically the same as a real + database but exists only in memory and contains only the data with the + given prefix. + + A prefixed db is currently only used for the NotificationDB. + + If a database backend is implemented that does not support a prefixed + database you have to implement a data structure/class that mimics its + behaviour. + + Args: + prefix (str): the prefix used to create a new prefixed DB. + + Returns: + PrefixedDB (object): a new instance of a prefixed DB. + """ + raise NotImplementedError @abstractmethod def closeDB(self): + """Shuts down the database instance.""" raise NotImplementedError diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index c9fabfebd..623a50c0f 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -23,9 +23,9 @@ """ -BC_CONST = 'blockchain' -NOTIF_CONST = 'notification' -DEBUG_CONST = 'debug' +BC_CONST = 'Blockchain' +NOTIF_CONST = 'Notification' +DEBUG_CONST = 'DebugStorage' DATABASE_PROPS = settings.database_properties() @@ -42,7 +42,7 @@ def getBlockchainDB(path=None): """ if not path: - path = DATABASE_PROPS[BC_CONST]['path'] + path = DATABASE_PROPS[BC_CONST]['DataDirectoryPath'] BlockchainDB = load_class_from_path(DATABASE_PROPS[BC_CONST]['backend']) _blockchain_db_instance = BlockchainDB(path) @@ -61,7 +61,7 @@ def getNotificationDB(path=None): """ if not path: - path = DATABASE_PROPS[NOTIF_CONST]['path'] + path = DATABASE_PROPS[NOTIF_CONST]['NotificationDataPath'] NotificationDB = load_class_from_path(DATABASE_PROPS[NOTIF_CONST]['backend']) _notif_db_instance = NotificationDB(path) @@ -77,5 +77,5 @@ def getDebugStorageDB(): """ DebugStorageDB = load_class_from_path(DATABASE_PROPS[DEBUG_CONST]['backend']) - _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]['path']) + _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]['DebugStoragePath']) return _debug_db_instance diff --git a/neo/Storage/Implementation/LevelDB/LevelDBImpl.py b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py index 9aa69920a..0859ce01c 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBImpl.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py @@ -36,19 +36,6 @@ class LevelDBImpl(AbstractDBImplementation): _lock = threading.RLock() def __init__(self, path): - - """ - Init method used within the DBFactory, opens a new or existing database. - - Args: - path (str): full path to the database directory. - - Attributes: - path (str): full path to the database directory. - _db (object): the database instance - - """ - try: self._path = path self._db = plyvel.DB(path, create_if_missing=True) @@ -66,17 +53,6 @@ def delete(self, key): self._db.delete(key) def cloneDatabase(self, clone_db): - """ - Clones the current database into "clone_db" - - Args: - clone_db (object): the instance of the database to clone to. - - Returns: - clone_db (object): returns a cloned db instance - - """ - db_snapshot = self.createSnapshot() with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: @@ -85,47 +61,11 @@ def cloneDatabase(self, clone_db): return clone_db def createSnapshot(self): - """ - Creates a snapshot of the current database, used for DebugStorage and - NotificationDB. To keep the snapshot compatible to the current design it's - created through a factory which returns basically the same class we use - for the real database and all the methods that can be used on the real db - can also be used on the snapshot. - - Args: - None - - Returns: - SnapshotDB (object): a new instance of a snapshot DB. - - """ - SnapshotDB = load_class_from_path('neo.Storage.Implementation.LevelDB.LevelDBSnapshot.LevelDBSnapshot') return SnapshotDB(self._db.snapshot()) @contextmanager def openIter(self, properties): - """ - Opens an iterator within a context manager. - - Usage: - Due to the fact that a context manager is used the returned iterator has - to be used within a with block. It's then closed after it returnes from - the scope it's used in. - Example from cloneDatabase method: - - with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, - include_value=True)) as iterator: - - Args: - properties (DBProperties): object containing the different properties - used to open an iterator. - - Yields: - _iter (LevelDB iterator): yields an iterator which is closed after the - with block is done. - """ - _iter = self._db.iterator( prefix=properties.prefix, include_value=properties.include_value, @@ -136,28 +76,6 @@ def openIter(self, properties): @contextmanager def getBatch(self): - """ - Yields a batch instance which can be used to perform atomic updates on the - database. - As it's used within a context, getBatch has to called within a with block. - - Example: - with self._db.getBatch() as batch: - batch.put(b'key1', b'value') - batch.put(b'key2', b'value') - batch.delete(b'key2') - - If a database backend is implemented that does not support batches you have - to implement an object that mimics batch behaviour. - - Args: - None - - Yields: - _batch (LevelDB batch): yields a new batch object which is processed after - the with block is done. - - """ with self._lock: _batch = self._db.write_batch() @@ -165,24 +83,6 @@ def getBatch(self): _batch.write() def getPrefixedDB(self, prefix): - """ - Returns a prefixed db instance, which is basically the same as a real - database but exists only in memory and contains only the data with the - given prefix. - - A prefixed db is currently only used for the NotificationDB. - - If a database backend is implemented that does not support a prefixed - database you have to implement a data structure/class that mimics its - behaviour. - - Args: - prefix (str): the prefix used to create a new prefixed DB. - - Returns: - PrefixedDB (object): a new instance of a prefixed DB. - """ - PrefixedDB = load_class_from_path('neo.Storage.Implementation.LevelDB.LevelDBSnapshot.LevelDBSnapshot') return PrefixedDB(self._db.prefixed_db(prefix)) diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index a20011fc8..21fc67286 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -6,17 +6,15 @@ class DBProperties: - - """ Store the different DB properties - Used to pass the relevant information to - neo.Storage.Implementation.[BACKEND].[BACKEND]DBClassMethods.openIter + """ + Container for holding DB properties + Used to pass the configuration options to the DB iterator initializer. + neo.Storage.Implementation.[BACKEND].[BACKEND]Impl.openIter Args: prefix (str, optional): Prefix to search for. - include_value (bool, optional): used to define if the value should be - included when opening an iterator. - include_key (bool, optional): used to define if the key value shoud be - included when opening an iterator. + include_value (bool, optional): whether to include keys in the returned data + include_key (bool, optional): whether to include values in the returned data """ prefix = None diff --git a/neo/data/protocol.coz.json b/neo/data/protocol.coz.json index 1c597ff3d..c834ee262 100644 --- a/neo/data/protocol.coz.json +++ b/neo/data/protocol.coz.json @@ -43,6 +43,18 @@ "DebugStorage": 1, "CompilerNep8": false, "BootstrapName": "fauxnet", - "BootstrapFiles": "this_does_not_exist_for_this_network" + "BootstrapFiles": "this_does_not_exist_for_this_network", + "Database": { + "Blockchain": { + "skip_version_check": false, + "DataDirectoryPath": "Chains/SC234", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "Notification": { + "NotificationDataPath": "Chains/Test_Notif", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "DebugStorage": { + "DebugStoragePath": "Chains/debugstorage", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"} + } } } diff --git a/neo/data/protocol.mainnet.json b/neo/data/protocol.mainnet.json index d5e769b60..e031e806b 100644 --- a/neo/data/protocol.mainnet.json +++ b/neo/data/protocol.mainnet.json @@ -59,6 +59,18 @@ "AcceptIncomingPeers": false, "CompilerNep8": false, "RestServer": "neo.api.REST.RestApi.RestApi", - "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi" + "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi", + "Database": { + "Blockchain": { + "skip_version_check": false, + "DataDirectoryPath": "Chains/SC234", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "Notification": { + "NotificationDataPath": "Chains/Test_Notif", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "DebugStorage": { + "DebugStoragePath": "Chains/debugstorage", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"} + } } } diff --git a/neo/data/protocol.privnet.json b/neo/data/protocol.privnet.json index 82fb358b7..e56ec3bc2 100644 --- a/neo/data/protocol.privnet.json +++ b/neo/data/protocol.privnet.json @@ -41,6 +41,18 @@ "AcceptIncomingPeers": false, "CompilerNep8": false, "RestServer": "neo.api.REST.RestApi.RestApi", - "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi" + "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi", + "Database": { + "Blockchain": { + "skip_version_check": false, + "DataDirectoryPath": "Chains/SC234", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "Notification": { + "NotificationDataPath": "Chains/Test_Notif", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "DebugStorage": { + "DebugStoragePath": "Chains/debugstorage", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"} + } } } diff --git a/neo/data/protocol.testnet.json b/neo/data/protocol.testnet.json index 2e1ced108..fe2e9087d 100644 --- a/neo/data/protocol.testnet.json +++ b/neo/data/protocol.testnet.json @@ -1,7 +1,5 @@ { "ApplicationConfiguration": { - "DataDirectoryPath": "Chains/SC234", - "NotificationDataPath": "Chains/Test_Notif", "RPCPort": 20332, "NodePort": 20333, "SslCert": "", @@ -16,7 +14,19 @@ "AcceptIncomingPeers": false, "CompilerNep8": false, "RestServer": "neo.api.REST.RestApi.RestApi", - "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi" + "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi", + "Database": { + "Blockchain": { + "skip_version_check": false, + "DataDirectoryPath": "Chains/SC234", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "Notification": { + "NotificationDataPath": "Chains/Test_Notif", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "DebugStorage": { + "DebugStoragePath": "Chains/debugstorage", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"} + } }, "ProtocolConfiguration": { "AddressVersion": 23, diff --git a/neo/data/protocol.unittest-net.json b/neo/data/protocol.unittest-net.json index 83671ee38..56d22b5e2 100644 --- a/neo/data/protocol.unittest-net.json +++ b/neo/data/protocol.unittest-net.json @@ -43,6 +43,18 @@ "BootstrapName": "fauxnet", "BootstrapFiles": "this_does_not_exist_for_this_network", "RestServer": "neo.api.REST.RestApi.RestApi", - "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi" + "RPCServer": "neo.api.JSONRPC.JsonRpcApi.JsonRpcApi", + "Database": { + "Blockchain": { + "skip_version_check": false, + "DataDirectoryPath": "Chains/SC234", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "Notification": { + "NotificationDataPath": "Chains/Test_Notif", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, + "DebugStorage": { + "DebugStoragePath": "Chains/debugstorage", + "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"} + } } } From 22372613e9f9ef8d543ddc419a93895bd75baf3a Mon Sep 17 00:00:00 2001 From: Merl111 Date: Thu, 16 May 2019 11:59:15 +0200 Subject: [PATCH 14/23] added unit test for db factory, default values for db settings --- neo/Settings.py | 9 +- .../Implementation/test/test_db_factory.py | 124 ++++++++++++++++++ 2 files changed, 130 insertions(+), 3 deletions(-) create mode 100644 neo/Storage/Implementation/test/test_db_factory.py diff --git a/neo/Settings.py b/neo/Settings.py index 160d516d7..0fc3e91ce 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -119,6 +119,9 @@ class SettingsHolder: DEFAULT_REST_SERVER = 'neo.api.REST.RestApi.RestApi' DATABASE_PROPS = None + BC_DB = None + NOTIF_DB = None + DEBUG_DB = None # Logging settings log_level = None @@ -228,9 +231,9 @@ def get_config_and_warn(key, default, abort=False): self.RPC_SERVER = config.get('RPCServer', self.DEFAULT_RPC_SERVER) self.DATABASE_PROPS = config.get('Database') - self.LEVELDB_PATH = self.DATABASE_PROPS['Blockchain']['DataDirectoryPath'] - self.NOTIFICATION_DB_PATH = self.DATABASE_PROPS['Notification']['NotificationDataPath'] - self.DEBUG_STORAGE_PATH = self.DATABASE_PROPS['DebugStorage']['DebugStoragePath'] + self.LEVELDB_PATH = self.DATABASE_PROPS['Blockchain'].get('DataDirectoryPath', 'Chains/SC234') + self.NOTIFICATION_DB_PATH = self.DATABASE_PROPS['Notification'].get('NotificationDataPath', 'Chains/notification_data') + self.DEBUG_STORAGE_PATH = self.DATABASE_PROPS['DebugStorage'].get('DebugStoragePath', 'Chains/debugstorage') def setup_mainnet(self): """ Load settings from the mainnet JSON config file """ diff --git a/neo/Storage/Implementation/test/test_db_factory.py b/neo/Storage/Implementation/test/test_db_factory.py new file mode 100644 index 000000000..92d132d54 --- /dev/null +++ b/neo/Storage/Implementation/test/test_db_factory.py @@ -0,0 +1,124 @@ +from neo.Storage.Implementation.DBFactory import getBlockchainDB +from neo.Settings import settings +from collections import abc +from unittest import TestCase, skip +import shutil +import os + + +class LevelDBTest(TestCase): + + DB_TESTPATH = os.path.join(settings.DATA_DIR_PATH, 'UnitTestChain') + _db = None + + @classmethod + def setUpClass(cls): + settings.setup_unittest_net() + cls._db = getBlockchainDB(cls.DB_TESTPATH) + + @classmethod + def tearDownClass(cls): + cls._db.closeDB() + shutil.rmtree(cls.DB_TESTPATH) + + def setupDB(self, db): + if self._db: + self._db.closeDB() + shutil.rmtree(self.DB_TESTPATH) + + self._db = getBlockchainDB(self.DB_TESTPATH) + + def test_write_read(self): + + self._db.write(b'00001.x', b'x') + self._db.write(b'00001.y', b'y') + self._db.write(b'00001.z', b'z') + + self.assertEqual(self._db.get(b'00001.x'), b'x') + self.assertEqual(self._db.get(b'00001.y'), b'y') + self.assertEqual(self._db.get(b'00001.z'), b'z') + + def test_delete_default(self): + + self._db.write(b'00001.x', b'x') + self._db.delete(b'00001.z') + + self.assertEqual(self._db.get(b'00001.z'), None) + self.assertEqual(self._db.get(b'00001.z', b'default'), b'default') + + def test_iterator(self): + + self._db.write(b'00001.x', b'x') + self._db.write(b'00001.y', b'y') + self._db.write(b'00001.z', b'z') + + self._db.write(b'00002.w', b'w') + self._db.write(b'00002.x', b'x') + self._db.write(b'00002.y', b'y') + self._db.write(b'00002.z', b'z') + + from neo.Storage.Interface.DBInterface import DBProperties + + ''' + Hhas to be converted as leveldb returns a custom iterator object, + rocksdb just uses lists/dicts. Should not matter, still tests the + same. + ''' + def make_compatible(obj, to): + if not isinstance(obj, to): + new_obj = to(obj) + if isinstance(new_obj, dict): + return new_obj.items() + return new_obj + return obj + + with self._db.openIter(DBProperties(prefix=b'00001', include_value=True, include_key=False)) as iterator: + + iterator = make_compatible(iterator, list) + self.assertEqual(len(iterator), 3) + self.assertIsInstance(iterator, list) + + with self._db.openIter(DBProperties(prefix=b'00002', include_value=False, include_key=True)) as iterator: + iterator = make_compatible(iterator, list) + self.assertEqual(len(iterator), 4) + self.assertIsInstance(iterator, list) + + with self._db.openIter(DBProperties(prefix=b'00002', include_value=True, include_key=True)) as iterator: + iterator = make_compatible(iterator, dict) + self.assertEqual(len(iterator), 4) + self.assertIsInstance(iterator, abc.ItemsView) + + with self._db.openIter(DBProperties(prefix=None, include_value=True, include_key=True)) as iterator: + iterator = make_compatible(iterator, dict) + self.assertEqual(len(iterator), 7) + self.assertIsInstance(iterator, abc.ItemsView) + + with self._db.openIter(DBProperties(prefix=None, include_value=False, include_key=True)) as iterator: + iterator = make_compatible(iterator, list) + self.assertEqual(len(iterator), 7) + self.assertIsInstance(iterator, list) + + with self._db.openIter(DBProperties(prefix=None, include_value=True, include_key=False)) as iterator: + iterator = make_compatible(iterator, list) + self.assertEqual(len(iterator), 7) + self.assertIsInstance(iterator, list) + + def test_batch(self): + + self._db.write(b'00001.x', b'x') + self._db.write(b'00001.y', b'y') + self._db.write(b'00001.z', b'z') + + self._db.write(b'00002.w', b'w') + self._db.write(b'00002.x', b'x') + self._db.write(b'00002.y', b'y') + self._db.write(b'00002.z', b'z') + + from neo.Storage.Interface.DBInterface import DBProperties + + with self._db.getBatch() as batch: + batch.put(b'00001.x', b'batch_x') + batch.delete(b'00002.x') + + self.assertEqual(self._db.get(b'00001.x'), b'batch_x') + self.assertIsNone(self._db.get(b'00002.x')) From 281264d2776821e6f2bfc533b1d0d5e944cbed8e Mon Sep 17 00:00:00 2001 From: Merl111 Date: Thu, 16 May 2019 14:02:34 +0200 Subject: [PATCH 15/23] corrected database paths in configs --- neo/Settings.py | 2 +- neo/data/protocol.coz.json | 6 ++---- neo/data/protocol.mainnet.json | 6 ++---- neo/data/protocol.privnet.json | 6 ++---- neo/data/protocol.unittest-net.json | 6 ++---- 5 files changed, 9 insertions(+), 17 deletions(-) diff --git a/neo/Settings.py b/neo/Settings.py index 0fc3e91ce..8285cc8bd 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -231,7 +231,7 @@ def get_config_and_warn(key, default, abort=False): self.RPC_SERVER = config.get('RPCServer', self.DEFAULT_RPC_SERVER) self.DATABASE_PROPS = config.get('Database') - self.LEVELDB_PATH = self.DATABASE_PROPS['Blockchain'].get('DataDirectoryPath', 'Chains/SC234') + self.LEVELDB_PATH = self.DATABASE_PROPS['Blockchain'].get('DataDirectoryPath', '-testChains/SC234') self.NOTIFICATION_DB_PATH = self.DATABASE_PROPS['Notification'].get('NotificationDataPath', 'Chains/notification_data') self.DEBUG_STORAGE_PATH = self.DATABASE_PROPS['DebugStorage'].get('DebugStoragePath', 'Chains/debugstorage') diff --git a/neo/data/protocol.coz.json b/neo/data/protocol.coz.json index c834ee262..ebdd9b889 100644 --- a/neo/data/protocol.coz.json +++ b/neo/data/protocol.coz.json @@ -28,8 +28,6 @@ } }, "ApplicationConfiguration": { - "DataDirectoryPath": "Chains/coznet", - "NotificationDataPath": "Chains/coz_notif", "RPCPort": 20332, "NodePort": 20333, "WsPort": 20334, @@ -47,10 +45,10 @@ "Database": { "Blockchain": { "skip_version_check": false, - "DataDirectoryPath": "Chains/SC234", + "DataDirectoryPath": "Chains/coznet", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "Notification": { - "NotificationDataPath": "Chains/Test_Notif", + "NotificationDataPath": "Chains/coz_notif", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "DebugStorage": { "DebugStoragePath": "Chains/debugstorage", diff --git a/neo/data/protocol.mainnet.json b/neo/data/protocol.mainnet.json index e031e806b..299013e56 100644 --- a/neo/data/protocol.mainnet.json +++ b/neo/data/protocol.mainnet.json @@ -43,8 +43,6 @@ } }, "ApplicationConfiguration": { - "DataDirectoryPath": "Chains/Main", - "NotificationDataPath": "Chains/Main_Notif", "RPCPort": 10332, "NodePort": 10333, "WsPort": 10334, @@ -63,10 +61,10 @@ "Database": { "Blockchain": { "skip_version_check": false, - "DataDirectoryPath": "Chains/SC234", + "DataDirectoryPath": "Chains/Main", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "Notification": { - "NotificationDataPath": "Chains/Test_Notif", + "NotificationDataPath": "Chains/Main_Notif", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "DebugStorage": { "DebugStoragePath": "Chains/debugstorage", diff --git a/neo/data/protocol.privnet.json b/neo/data/protocol.privnet.json index e56ec3bc2..ecd8f790a 100644 --- a/neo/data/protocol.privnet.json +++ b/neo/data/protocol.privnet.json @@ -25,8 +25,6 @@ } }, "ApplicationConfiguration": { - "DataDirectoryPath": "Chains/privnet", - "NotificationDataPath": "Chains/privnet_notif", "RPCPort": 20332, "NodePort": 20333, "WsPort": 20334, @@ -45,10 +43,10 @@ "Database": { "Blockchain": { "skip_version_check": false, - "DataDirectoryPath": "Chains/SC234", + "DataDirectoryPath": "Chains/privnet", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "Notification": { - "NotificationDataPath": "Chains/Test_Notif", + "NotificationDataPath": "Chains/privnet_notif", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "DebugStorage": { "DebugStoragePath": "Chains/debugstorage", diff --git a/neo/data/protocol.unittest-net.json b/neo/data/protocol.unittest-net.json index 56d22b5e2..c05325a49 100644 --- a/neo/data/protocol.unittest-net.json +++ b/neo/data/protocol.unittest-net.json @@ -25,8 +25,6 @@ } }, "ApplicationConfiguration": { - "DataDirectoryPath": "Chains/unittest", - "NotificationDataPath": "Chains/unittest_notif", "RPCPort": 20332, "NodePort": 20333, "WsPort": 20334, @@ -47,10 +45,10 @@ "Database": { "Blockchain": { "skip_version_check": false, - "DataDirectoryPath": "Chains/SC234", + "DataDirectoryPath": "Chains/unittest", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "Notification": { - "NotificationDataPath": "Chains/Test_Notif", + "NotificationDataPath": "Chains/unittest_notif", "backend": "neo.Storage.Implementation.LevelDB.LevelDBImpl.LevelDBImpl"}, "DebugStorage": { "DebugStoragePath": "Chains/debugstorage", From f08dd6b90a9acc7f2f4b73f2977a316c8899e7af Mon Sep 17 00:00:00 2001 From: Merl111 Date: Mon, 20 May 2019 20:37:52 +0200 Subject: [PATCH 16/23] fixed paths to data directories --- neo/Settings.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/neo/Settings.py b/neo/Settings.py index 8285cc8bd..7b16ea924 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -192,6 +192,16 @@ def get_config_and_warn(key, default, abort=False): sys.exit(-1) return value + def update_db_dict(db_dict): + for key in db_dict: + if key == 'Blockchain': + db_dict[key]['DataDirectoryPath'] = self.chain_leveldb_path + if key == 'Notification': + db_dict[key]['NotificationDataPath'] = self.notification_leveldb_path + if key == 'DebugStorage': + db_dict[key]['DebugStoragePath'] = self.debug_storage_leveldb_path + return db_dict + if not self.DATA_DIR_PATH: # Setup default data dir self.set_data_dir(None) @@ -230,10 +240,10 @@ def get_config_and_warn(key, default, abort=False): self.REST_SERVER = config.get('RestServer', self.DEFAULT_REST_SERVER) self.RPC_SERVER = config.get('RPCServer', self.DEFAULT_RPC_SERVER) - self.DATABASE_PROPS = config.get('Database') - self.LEVELDB_PATH = self.DATABASE_PROPS['Blockchain'].get('DataDirectoryPath', '-testChains/SC234') - self.NOTIFICATION_DB_PATH = self.DATABASE_PROPS['Notification'].get('NotificationDataPath', 'Chains/notification_data') - self.DEBUG_STORAGE_PATH = self.DATABASE_PROPS['DebugStorage'].get('DebugStoragePath', 'Chains/debugstorage') + self.LEVELDB_PATH = config['Database']['Blockchain'].get('DataDirectoryPath', 'Chains/SC234') + self.NOTIFICATION_DB_PATH = config['Database']['Notification'].get('NotificationDataPath', 'Chains/notification_data') + self.DEBUG_STORAGE_PATH = config['Database']['DebugStorage'].get('DebugStoragePath', 'Chains/debugstorage') + self.DATABASE_PROPS = update_db_dict(config['Database']) def setup_mainnet(self): """ Load settings from the mainnet JSON config file """ From df26c0555d5a404515eb70c93bda27370866ca79 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Tue, 21 May 2019 21:27:21 +0200 Subject: [PATCH 17/23] fix, fixed typos, minor refactoring after review, fixed bug with paths --- neo/Settings.py | 2 +- neo/Storage/Common/DebugStorage.py | 2 +- .../AbstractDBImplementation.py | 14 ++--- neo/Storage/Implementation/DBFactory.py | 5 +- .../Implementation/LevelDB/LevelDBImpl.py | 33 +++++++--- .../Implementation/LevelDB/LevelDBSnapshot.py | 20 ------ .../Implementation/test/test_db_factory.py | 62 ++++++++++++++++++- neo/Storage/Interface/DBInterface.py | 8 +-- neo/bin/api_server.py | 2 - 9 files changed, 100 insertions(+), 48 deletions(-) delete mode 100644 neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py diff --git a/neo/Settings.py b/neo/Settings.py index 7b16ea924..23a75457f 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -240,7 +240,7 @@ def update_db_dict(db_dict): self.REST_SERVER = config.get('RestServer', self.DEFAULT_REST_SERVER) self.RPC_SERVER = config.get('RPCServer', self.DEFAULT_RPC_SERVER) - self.LEVELDB_PATH = config['Database']['Blockchain'].get('DataDirectoryPath', 'Chains/SC234') + self.LEVELDB_PATH = config['Database']['Blockchain'].get('DataDirectoryPath', 'Chains/Main') self.NOTIFICATION_DB_PATH = config['Database']['Notification'].get('NotificationDataPath', 'Chains/notification_data') self.DEBUG_STORAGE_PATH = config['Database']['DebugStorage'].get('DebugStoragePath', 'Chains/debugstorage') self.DATABASE_PROPS = update_db_dict(config['Database']) diff --git a/neo/Storage/Common/DebugStorage.py b/neo/Storage/Common/DebugStorage.py index 4e33ed6cc..22c17e71b 100644 --- a/neo/Storage/Common/DebugStorage.py +++ b/neo/Storage/Common/DebugStorage.py @@ -24,7 +24,7 @@ def reset(self): def __init__(self): try: - self._db = GetBlockchain().Default().GetDB().cloneDatabase( + self._db = GetBlockchain().Default().GetDB().cloneDatabaseStorage( DBFactory.getDebugStorageDB()) except Exception as e: logger.info("DEBUG leveldb unavailable, you may already be running this process: %s " % e) diff --git a/neo/Storage/Implementation/AbstractDBImplementation.py b/neo/Storage/Implementation/AbstractDBImplementation.py index dfc618a9a..4ad2f5ca9 100644 --- a/neo/Storage/Implementation/AbstractDBImplementation.py +++ b/neo/Storage/Implementation/AbstractDBImplementation.py @@ -79,9 +79,10 @@ def delete(self, key): raise NotImplementedError @abstractmethod - def cloneDatabase(self, clone_db): + def cloneDatabaseStorage(self, clone_storage): """ - Clones the current database into "clone_db" + Clones the Smart Contract storages of the current database + into "clone_storage" Args: clone_db (object): the instance of the database to clone to. @@ -96,11 +97,8 @@ def cloneDatabase(self, clone_db): @abstractmethod def createSnapshot(self): """ - Creates a snapshot of the current database, used for DebugStorage and - NotificationDB. To keep the snapshot compatible to the current design it's - created through a factory which returns basically the same class we use - for the real database and all the methods that can be used on the real db - can also be used on the snapshot. + Creates a read-only snapshot of the current database, used for + DebugStorage and NotificationDB Args: None @@ -121,7 +119,7 @@ def openIter(self, properties): Due to the fact that a context manager is used the returned iterator has to be used within a with block. It's then closed after it returnes from the scope it's used in. - Example from cloneDatabase method: + Example from cloneDatabaseStorage method: with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: diff --git a/neo/Storage/Implementation/DBFactory.py b/neo/Storage/Implementation/DBFactory.py index 623a50c0f..082572758 100644 --- a/neo/Storage/Implementation/DBFactory.py +++ b/neo/Storage/Implementation/DBFactory.py @@ -27,8 +27,6 @@ NOTIF_CONST = 'Notification' DEBUG_CONST = 'DebugStorage' -DATABASE_PROPS = settings.database_properties() - def getBlockchainDB(path=None): """ @@ -41,6 +39,7 @@ def getBlockchainDB(path=None): _blockchain_db_instance (object): A new blockchain database instance. """ + DATABASE_PROPS = settings.database_properties() if not path: path = DATABASE_PROPS[BC_CONST]['DataDirectoryPath'] @@ -60,6 +59,7 @@ def getNotificationDB(path=None): _notif_db_instance (object): A new notification database instance. """ + DATABASE_PROPS = settings.database_properties() if not path: path = DATABASE_PROPS[NOTIF_CONST]['NotificationDataPath'] @@ -76,6 +76,7 @@ def getDebugStorageDB(): _debug_db_instance (object): A new debug storage instance. """ + DATABASE_PROPS = settings.database_properties() DebugStorageDB = load_class_from_path(DATABASE_PROPS[DEBUG_CONST]['backend']) _debug_db_instance = DebugStorageDB(DATABASE_PROPS[DEBUG_CONST]['DebugStoragePath']) return _debug_db_instance diff --git a/neo/Storage/Implementation/LevelDB/LevelDBImpl.py b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py index 0859ce01c..7d1596927 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBImpl.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py @@ -6,7 +6,6 @@ from neo.Storage.Implementation.AbstractDBImplementation import ( AbstractDBImplementation ) -from neo.Utils.plugin import load_class_from_path from neo.Storage.Common.DBPrefix import DBPrefix from neo.Storage.Interface.DBInterface import DBProperties from neo.logging import log_manager @@ -52,17 +51,16 @@ def get(self, key, default=None): def delete(self, key): self._db.delete(key) - def cloneDatabase(self, clone_db): + def cloneDatabaseStorage(self, clone_storage): db_snapshot = self.createSnapshot() with db_snapshot.openIter(DBProperties(prefix=DBPrefix.ST_Storage, include_value=True)) as iterator: for key, value in iterator: - clone_db.write(key, value) - return clone_db + clone_storage.write(key, value) + return clone_storage def createSnapshot(self): - SnapshotDB = load_class_from_path('neo.Storage.Implementation.LevelDB.LevelDBSnapshot.LevelDBSnapshot') - return SnapshotDB(self._db.snapshot()) + return LevelDBSnapshot(self._db.snapshot()) @contextmanager def openIter(self, properties): @@ -83,8 +81,27 @@ def getBatch(self): _batch.write() def getPrefixedDB(self, prefix): - PrefixedDB = load_class_from_path('neo.Storage.Implementation.LevelDB.LevelDBSnapshot.LevelDBSnapshot') - return PrefixedDB(self._db.prefixed_db(prefix)) + return LevelDBSnapshot(self._db.prefixed_db(prefix)) def closeDB(self): self._db.close() + + +class LevelDBSnapshot(LevelDBImpl): + + def __init__(self, _prefixdb): + """ + Init method used with a snapshotDB or prefixedDB, slightly different from the + init method as we don't have to open a new database but store a snapshot or + a prefixed db. + + Args: + _prefixdb (object): the prefixed db instance + + """ + + try: + self._db = _prefixdb + except Exception as e: + raise Exception("leveldb exception [ %s ]" % e) + diff --git a/neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py b/neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py deleted file mode 100644 index 67ddc2450..000000000 --- a/neo/Storage/Implementation/LevelDB/LevelDBSnapshot.py +++ /dev/null @@ -1,20 +0,0 @@ -from neo.Storage.Implementation.LevelDB.LevelDBImpl import LevelDBImpl - - -class LevelDBSnapshot(LevelDBImpl): - - def __init__(self, _prefixdb): - """ - Init method used with a snapshotDB or prefixedDB, slightly different from the - init method as we don't have to open a new database but store a snapshot or - a prefixed db. - - Args: - _prefixdb (object): the prefixed db instance - - """ - - try: - self._db = _prefixdb - except Exception as e: - raise Exception("leveldb exception [ %s ]" % e) diff --git a/neo/Storage/Implementation/test/test_db_factory.py b/neo/Storage/Implementation/test/test_db_factory.py index 92d132d54..3ca964805 100644 --- a/neo/Storage/Implementation/test/test_db_factory.py +++ b/neo/Storage/Implementation/test/test_db_factory.py @@ -8,7 +8,7 @@ class LevelDBTest(TestCase): - DB_TESTPATH = os.path.join(settings.DATA_DIR_PATH, 'UnitTestChain') + DB_TESTPATH = os.path.join(settings.DATA_DIR_PATH, 'UnitTestChain/') _db = None @classmethod @@ -60,7 +60,7 @@ def test_iterator(self): from neo.Storage.Interface.DBInterface import DBProperties ''' - Hhas to be converted as leveldb returns a custom iterator object, + Has to be converted as leveldb returns a custom iterator object, rocksdb just uses lists/dicts. Should not matter, still tests the same. ''' @@ -75,34 +75,92 @@ def make_compatible(obj, to): with self._db.openIter(DBProperties(prefix=b'00001', include_value=True, include_key=False)) as iterator: iterator = make_compatible(iterator, list) + + self.assertEqual(iterator[0], b'x') + self.assertEqual(iterator[1], b'y') + self.assertEqual(iterator[2], b'z') + + with self.assertRaises(Exception) as context: + self.assertEqual(iterator[3], b'z') + self.assertTrue('list index out of range' in str(context.exception)) + self.assertEqual(len(iterator), 3) self.assertIsInstance(iterator, list) with self._db.openIter(DBProperties(prefix=b'00002', include_value=False, include_key=True)) as iterator: iterator = make_compatible(iterator, list) + + self.assertEqual(iterator[0], b'00002.w') + self.assertEqual(iterator[1], b'00002.x') + self.assertEqual(iterator[2], b'00002.y') + self.assertEqual(iterator[3], b'00002.z') + + with self.assertRaises(Exception) as context: + self.assertEqual(iterator[4], b'XXX') + + self.assertTrue('list index out of range' in str(context.exception)) self.assertEqual(len(iterator), 4) self.assertIsInstance(iterator, list) with self._db.openIter(DBProperties(prefix=b'00002', include_value=True, include_key=True)) as iterator: iterator = make_compatible(iterator, dict) + + self.assertEqual(dict(iterator).get(b'00002.w'), b'w') + self.assertEqual(dict(iterator).get(b'00002.x'), b'x') + self.assertEqual(dict(iterator).get(b'00002.y'), b'y') + self.assertEqual(dict(iterator).get(b'00002.z'), b'z') + self.assertEqual(dict(iterator).get(b'00002.A'), None) + self.assertEqual(len(iterator), 4) self.assertIsInstance(iterator, abc.ItemsView) with self._db.openIter(DBProperties(prefix=None, include_value=True, include_key=True)) as iterator: iterator = make_compatible(iterator, dict) + self.assertEqual(dict(iterator).get(b'00001.x'), b'x') + self.assertEqual(dict(iterator).get(b'00001.y'), b'y') + self.assertEqual(dict(iterator).get(b'00001.z'), b'z') + self.assertEqual(dict(iterator).get(b'00002.w'), b'w') + self.assertEqual(dict(iterator).get(b'00002.x'), b'x') + self.assertEqual(dict(iterator).get(b'00002.y'), b'y') + self.assertEqual(dict(iterator).get(b'00002.z'), b'z') + self.assertEqual(dict(iterator).get(b'00002.A'), None) self.assertEqual(len(iterator), 7) self.assertIsInstance(iterator, abc.ItemsView) with self._db.openIter(DBProperties(prefix=None, include_value=False, include_key=True)) as iterator: iterator = make_compatible(iterator, list) + + self.assertEqual(iterator[0], b'00001.x') + self.assertEqual(iterator[1], b'00001.y') + self.assertEqual(iterator[2], b'00001.z') + self.assertEqual(iterator[3], b'00002.w') + self.assertEqual(iterator[4], b'00002.x') + self.assertEqual(iterator[5], b'00002.y') + self.assertEqual(iterator[6], b'00002.z') + self.assertEqual(len(iterator), 7) self.assertIsInstance(iterator, list) with self._db.openIter(DBProperties(prefix=None, include_value=True, include_key=False)) as iterator: iterator = make_compatible(iterator, list) + + self.assertEqual(iterator[0], b'x') + self.assertEqual(iterator[1], b'y') + self.assertEqual(iterator[2], b'z') + self.assertEqual(iterator[3], b'w') + self.assertEqual(iterator[4], b'x') + self.assertEqual(iterator[5], b'y') + self.assertEqual(iterator[6], b'z') + self.assertEqual(len(iterator), 7) self.assertIsInstance(iterator, list) + with self.assertRaises(Exception) as context: + with self._db.openIter(DBProperties(prefix=None, include_value=False, include_key=False)) as iterator: + pass + self.assertTrue('Either key or value have to be true' in str(context.exception)) + + def test_batch(self): self._db.write(b'00001.x', b'x') diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index 21fc67286..4c624903c 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -17,11 +17,11 @@ class DBProperties: include_key (bool, optional): whether to include values in the returned data """ - prefix = None - include_value = None - include_key = None - def __init__(self, prefix=None, include_value=True, include_key=True): + + if not include_value and not include_key: + raise Exception('Either key or value have to be true') + self.prefix = prefix self.include_value = include_value self.include_key = include_key diff --git a/neo/bin/api_server.py b/neo/bin/api_server.py index e7736b58f..d20e758ba 100755 --- a/neo/bin/api_server.py +++ b/neo/bin/api_server.py @@ -156,8 +156,6 @@ def main(): # host parser.add_argument("--host", action="store", type=str, help="Hostname ( for example 127.0.0.1)", default="0.0.0.0") - # rollback - parser.add_argument("--rollback", action="store", type=int, help="Block id to rollback the chain to") # Now parse args = parser.parse_args() # print(args) From 50a8c7da49003929b361d05cbc6641f42b5525fc Mon Sep 17 00:00:00 2001 From: Merl111 Date: Tue, 21 May 2019 21:51:47 +0200 Subject: [PATCH 18/23] made linter happy --- neo/Storage/Implementation/LevelDB/LevelDBImpl.py | 1 - neo/Storage/Implementation/test/test_db_factory.py | 1 - 2 files changed, 2 deletions(-) diff --git a/neo/Storage/Implementation/LevelDB/LevelDBImpl.py b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py index 7d1596927..4bb084d3c 100644 --- a/neo/Storage/Implementation/LevelDB/LevelDBImpl.py +++ b/neo/Storage/Implementation/LevelDB/LevelDBImpl.py @@ -104,4 +104,3 @@ def __init__(self, _prefixdb): self._db = _prefixdb except Exception as e: raise Exception("leveldb exception [ %s ]" % e) - diff --git a/neo/Storage/Implementation/test/test_db_factory.py b/neo/Storage/Implementation/test/test_db_factory.py index 3ca964805..22f6cea65 100644 --- a/neo/Storage/Implementation/test/test_db_factory.py +++ b/neo/Storage/Implementation/test/test_db_factory.py @@ -160,7 +160,6 @@ def make_compatible(obj, to): pass self.assertTrue('Either key or value have to be true' in str(context.exception)) - def test_batch(self): self._db.write(b'00001.x', b'x') From 71262dd7ae13e8367c926c56160507bdfad5a0fb Mon Sep 17 00:00:00 2001 From: Merl111 Date: Thu, 30 May 2019 14:41:27 +0200 Subject: [PATCH 19/23] removed files slipped in through upstream merge --- .../Blockchains/LevelDB/LevelDBBlockchain.py | 868 ------------------ .../LevelDB/TestLevelDBBlockchain.py | 152 --- 2 files changed, 1020 deletions(-) delete mode 100644 neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py delete mode 100644 neo/Implementations/Blockchains/LevelDB/TestLevelDBBlockchain.py diff --git a/neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py b/neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py deleted file mode 100644 index e75c4b709..000000000 --- a/neo/Implementations/Blockchains/LevelDB/LevelDBBlockchain.py +++ /dev/null @@ -1,868 +0,0 @@ -import plyvel -import binascii -import struct -from neo.Core.Blockchain import Blockchain -from neo.Core.Header import Header -from neo.Core.Block import Block -from neo.Core.TX.Transaction import Transaction, TransactionType -from neo.Core.IO.BinaryWriter import BinaryWriter -from neo.Core.IO.BinaryReader import BinaryReader -from neo.IO.MemoryStream import StreamManager -from neo.Implementations.Blockchains.LevelDB.DBCollection import DBCollection -from neo.Implementations.Blockchains.LevelDB.CachedScriptTable import CachedScriptTable -from neo.Core.Fixed8 import Fixed8 -from neo.Core.UInt160 import UInt160 -from neo.Core.UInt256 import UInt256 - -from neo.Core.State.UnspentCoinState import UnspentCoinState -from neo.Core.State.AccountState import AccountState -from neo.Core.State.CoinState import CoinState -from neo.Core.State.SpentCoinState import SpentCoinState, SpentCoinItem, SpentCoin -from neo.Core.State.AssetState import AssetState -from neo.Core.State.ValidatorState import ValidatorState -from neo.Core.State.ContractState import ContractState -from neo.Core.State.StorageItem import StorageItem -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix - -from neo.SmartContract.StateMachine import StateMachine -from neo.SmartContract.ApplicationEngine import ApplicationEngine -from neo.SmartContract import TriggerType -from neo.Core.Cryptography.Crypto import Crypto -from neo.Core.BigInteger import BigInteger -from neo.EventHub import events - -from prompt_toolkit import prompt -from neo.logging import log_manager - -logger = log_manager.getLogger('db') - - -class LevelDBBlockchain(Blockchain): - _path = None - _db = None - - _header_index = [] - _block_cache = {} - - _current_block_height = 0 - _stored_header_count = 0 - - _disposed = False - - _verify_blocks = False - - # this is the version of the database - # should not be updated for network version changes - _sysversion = b'schema v.0.6.9' - - _persisting_block = None - - TXProcessed = 0 - - @property - def CurrentBlockHash(self): - try: - return self._header_index[self._current_block_height] - except Exception as e: - logger.info("Could not get current block hash, returning none: %s ", ) - - return None - - @property - def CurrentBlockHashPlusOne(self): - try: - return self._header_index[self._current_block_height + 1] - except Exception as e: - pass - return self.CurrentBlockHash - - @property - def CurrentHeaderHash(self): - return self._header_index[-1] - - @property - def HeaderHeight(self): - height = len(self._header_index) - 1 - return height - - @property - def Height(self): - return self._current_block_height - - @property - def CurrentBlock(self): - if self._persisting_block: - return self._persisting_block - return self.GetBlockByHeight(self.Height) - - @property - def Path(self): - return self._path - - def __init__(self, path, skip_version_check=False, skip_header_check=False): - super(LevelDBBlockchain, self).__init__() - self._path = path - - self._header_index = [] - self._header_index.append(Blockchain.GenesisBlock().Header.Hash.ToBytes()) - - self.TXProcessed = 0 - - try: - self._db = plyvel.DB(self._path, create_if_missing=True) - logger.info("Created Blockchain DB at %s " % self._path) - except Exception as e: - logger.info("leveldb unavailable, you may already be running this process: %s " % e) - raise Exception('Leveldb Unavailable') - - version = self._db.get(DBPrefix.SYS_Version) - - if skip_version_check: - self._db.put(DBPrefix.SYS_Version, self._sysversion) - version = self._sysversion - - if version == self._sysversion: # or in the future, if version doesn't equal the current version... - - ba = bytearray(self._db.get(DBPrefix.SYS_CurrentBlock, 0)) - self._current_block_height = int.from_bytes(ba[-4:], 'little') - - if not skip_header_check: - ba = bytearray(self._db.get(DBPrefix.SYS_CurrentHeader, 0)) - current_header_height = int.from_bytes(ba[-4:], 'little') - current_header_hash = bytes(ba[:64].decode('utf-8'), encoding='utf-8') - - hashes = [] - try: - for key, value in self._db.iterator(prefix=DBPrefix.IX_HeaderHashList): - ms = StreamManager.GetStream(value) - reader = BinaryReader(ms) - hlist = reader.Read2000256List() - key = int.from_bytes(key[-4:], 'little') - hashes.append({'k': key, 'v': hlist}) - StreamManager.ReleaseStream(ms) - except Exception as e: - logger.info("Could not get stored header hash list: %s " % e) - - if len(hashes): - hashes.sort(key=lambda x: x['k']) - genstr = Blockchain.GenesisBlock().Hash.ToBytes() - for hlist in hashes: - - for hash in hlist['v']: - if hash != genstr: - self._header_index.append(hash) - self._stored_header_count += 1 - - if self._stored_header_count == 0: - logger.info("Current stored headers empty, re-creating from stored blocks...") - headers = [] - for key, value in self._db.iterator(prefix=DBPrefix.DATA_Block): - dbhash = bytearray(value)[8:] - headers.append(Header.FromTrimmedData(binascii.unhexlify(dbhash), 0)) - - headers.sort(key=lambda h: h.Index) - for h in headers: - if h.Index > 0: - self._header_index.append(h.Hash.ToBytes()) - - # this will trigger the write of stored headers - if len(headers): - self.OnAddHeader(headers[-1]) - - elif current_header_height > self._stored_header_count: - - try: - hash = current_header_hash - targethash = self._header_index[-1] - - newhashes = [] - while hash != targethash: - header = self.GetHeader(hash) - newhashes.insert(0, header) - hash = header.PrevHash.ToBytes() - - self.AddHeaders(newhashes) - except Exception as e: - pass - - elif version is None: - self.Persist(Blockchain.GenesisBlock()) - self._db.put(DBPrefix.SYS_Version, self._sysversion) - else: - logger.error("\n\n") - logger.warning("Database schema has changed from %s to %s.\n" % (version, self._sysversion)) - logger.warning("You must either resync from scratch, or use the np-bootstrap command to bootstrap the chain.") - - try: - res = prompt("Type 'continue' to erase your current database and sync from new. Otherwise this program will exit:\n> ") - except KeyboardInterrupt: - res = False - if res == 'continue': - - with self._db.write_batch() as wb: - for key, value in self._db.iterator(): - wb.delete(key) - - self.Persist(Blockchain.GenesisBlock()) - self._db.put(DBPrefix.SYS_Version, self._sysversion) - - else: - raise Exception("Database schema changed") - - def GetStates(self, prefix, classref): - return DBCollection(self._db, prefix, classref) - - def GetAccountState(self, address, print_all_accounts=False): - - if type(address) is str: - try: - address = address.encode('utf-8') - except Exception as e: - logger.info("could not convert argument to bytes :%s " % e) - return None - - accounts = DBCollection(self._db, DBPrefix.ST_Account, AccountState) - acct = accounts.TryGet(keyval=address) - - return acct - - def GetStorageItem(self, storage_key): - storages = DBCollection(self._db, DBPrefix.ST_Storage, StorageItem) - item = storages.TryGet(storage_key.ToArray()) - return item - - def SearchContracts(self, query): - res = [] - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - keys = contracts.Keys - - query = query.casefold() - - for item in keys: - - contract = contracts.TryGet(keyval=item) - try: - if query in contract.Name.decode('utf-8').casefold(): - res.append(contract) - elif query in contract.Author.decode('utf-8').casefold(): - res.append(contract) - elif query in contract.Description.decode('utf-8').casefold(): - res.append(contract) - elif query in contract.Email.decode('utf-8').casefold(): - res.append(contract) - except Exception as e: - logger.info("Could not query contract: %s " % e) - - return res - - def ShowAllContracts(self): - - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - keys = contracts.Keys - return keys - - def GetContract(self, hash): - - if type(hash) is str: - try: - hash = UInt160.ParseString(hash).ToBytes() - except Exception as e: - logger.info("could not convert argument to bytes :%s " % e) - return None - - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - contract = contracts.TryGet(keyval=hash) - return contract - - def GetAllSpentCoins(self): - coins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - - return coins.Keys - - def GetUnspent(self, hash, index): - - coins = DBCollection(self._db, DBPrefix.ST_Coin, UnspentCoinState) - - state = coins.TryGet(hash) - - if state is None: - return None - if index >= len(state.Items): - return None - if state.Items[index] & CoinState.Spent > 0: - return None - tx, height = self.GetTransaction(hash) - - return tx.outputs[index] - - def GetSpentCoins(self, tx_hash): - - if type(tx_hash) is not bytes: - tx_hash = bytes(tx_hash.encode('utf-8')) - - coins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - result = coins.TryGet(keyval=tx_hash) - - return result - - def GetAllUnspent(self, hash): - - unspents = [] - - unspentcoins = DBCollection(self._db, DBPrefix.ST_Coin, UnspentCoinState) - - state = unspentcoins.TryGet(keyval=hash.ToBytes()) - - if state: - tx, height = self.GetTransaction(hash) - - for index, item in enumerate(state.Items): - if item & CoinState.Spent == 0: - unspents.append(tx.outputs[index]) - return unspents - - def GetUnclaimed(self, hash): - - tx, height = self.GetTransaction(hash) - - if tx is None: - return None - - out = {} - coins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - - state = coins.TryGet(keyval=hash.ToBytes()) - - if state: - for item in state.Items: - out[item.index] = SpentCoin(tx.outputs[item.index], height, item.height) - - return out - - def SearchAssetState(self, query): - res = [] - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - keys = assets.Keys - - if query.lower() == "neo": - query = "AntShare" - - if query.lower() in {"gas", "neogas"}: - query = "AntCoin" - - for item in keys: - asset = assets.TryGet(keyval=item) - if query in asset.Name.decode('utf-8'): - res.append(asset) - elif query in Crypto.ToAddress(asset.Issuer): - res.append(asset) - elif query in Crypto.ToAddress(asset.Admin): - res.append(asset) - - return res - - def GetAssetState(self, assetId): - - if type(assetId) is str: - try: - assetId = assetId.encode('utf-8') - except Exception as e: - logger.info("could not convert argument to bytes :%s " % e) - return None - - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - asset = assets.TryGet(assetId) - - return asset - - def ShowAllAssets(self): - - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - keys = assets.Keys - return keys - - def GetTransaction(self, hash): - - if type(hash) is str: - hash = hash.encode('utf-8') - elif type(hash) is UInt256: - hash = hash.ToBytes() - - out = self._db.get(DBPrefix.DATA_Transaction + hash) - if out is not None: - out = bytearray(out) - height = int.from_bytes(out[:4], 'little') - out = out[4:] - outhex = binascii.unhexlify(out) - return Transaction.DeserializeFromBufer(outhex, 0), height - - return None, -1 - - def AddBlockDirectly(self, block, do_persist_complete=True): - # Adds a block when importing, which skips adding - # the block header - if block.Index != self.Height + 1: - raise Exception("Invalid block") - self.Persist(block) - if do_persist_complete: - self.OnPersistCompleted(block) - - def AddBlock(self, block): - - if not block.Hash.ToBytes() in self._block_cache: - self._block_cache[block.Hash.ToBytes()] = block - - header_len = len(self._header_index) - - if block.Index - 1 >= header_len: - return False - - if block.Index == header_len: - - if self._verify_blocks and not block.Verify(): - return False - elif len(block.Transactions) < 1: - return False - self.AddHeader(block.Header) - - return True - - def ContainsBlock(self, index): - if index <= self._current_block_height: - return True - return False - - def ContainsTransaction(self, hash): - tx = self._db.get(DBPrefix.DATA_Transaction + hash.ToBytes()) - return True if tx is not None else False - - def GetHeader(self, hash): - if isinstance(hash, UInt256): - hash = hash.ToString().encode() - - try: - out = bytearray(self._db.get(DBPrefix.DATA_Block + hash)) - out = out[8:] - outhex = binascii.unhexlify(out) - return Header.FromTrimmedData(outhex, 0) - except TypeError as e2: - pass - except Exception as e: - logger.info("OTHER ERRROR %s " % e) - return None - - def GetHeaderBy(self, height_or_hash): - hash = None - - intval = None - try: - intval = int(height_or_hash) - except Exception as e: - pass - - if intval is None and len(height_or_hash) == 64: - bhash = height_or_hash.encode('utf-8') - if bhash in self._header_index: - hash = bhash - - elif intval is None and len(height_or_hash) == 66: - bhash = height_or_hash[2:].encode('utf-8') - if bhash in self._header_index: - hash = bhash - - elif intval is not None and self.GetHeaderHash(intval) is not None: - hash = self.GetHeaderHash(intval) - - if hash is not None: - return self.GetHeader(hash) - - return None - - def GetHeaderByHeight(self, height): - - if len(self._header_index) <= height: - return False - - hash = self._header_index[height] - - return self.GetHeader(hash) - - def GetHeaderHash(self, height): - if height < len(self._header_index) and height >= 0: - return self._header_index[height] - return None - - def GetBlockHash(self, height): - """ - Get the block hash by its block height - Args: - height(int): height of the block to retrieve hash from. - - Returns: - bytes: a non-raw block hash (e.g. b'6dd83ed8a3fc02e322f91f30431bf3662a8c8e8ebe976c3565f0d21c70620991', but not b'\x6d\xd8...etc' - """ - if self._current_block_height < height: - return - - if len(self._header_index) <= height: - return - - return self._header_index[height] - - def GetSysFeeAmount(self, hash): - - if type(hash) is UInt256: - hash = hash.ToBytes() - try: - value = self._db.get(DBPrefix.DATA_Block + hash)[0:8] - amount = struct.unpack("= len(self._header_index): - return None - return self._header_index[header.Index + 1] - return None - - def AddHeader(self, header): - self.AddHeaders([header]) - - def AddHeaders(self, headers): - - newheaders = [] - count = 0 - for header in headers: - - if header.Index - 1 >= len(self._header_index) + count: - logger.info( - "header is greater than header index length: %s %s " % (header.Index, len(self._header_index))) - break - - if header.Index < count + len(self._header_index): - continue - if self._verify_blocks and not header.Verify(): - break - - count = count + 1 - - newheaders.append(header) - - if len(newheaders): - self.ProcessNewHeaders(newheaders) - - return True - - def ProcessNewHeaders(self, headers): - - lastheader = headers[-1] - - hashes = [h.Hash.ToBytes() for h in headers] - - self._header_index = self._header_index + hashes - - if lastheader is not None: - self.OnAddHeader(lastheader) - - def OnAddHeader(self, header): - - hHash = header.Hash.ToBytes() - - if hHash not in self._header_index: - self._header_index.append(hHash) - - with self._db.write_batch() as wb: - while header.Index - 2000 >= self._stored_header_count: - ms = StreamManager.GetStream() - w = BinaryWriter(ms) - headers_to_write = self._header_index[self._stored_header_count:self._stored_header_count + 2000] - w.Write2000256List(headers_to_write) - out = ms.ToArray() - StreamManager.ReleaseStream(ms) - wb.put(DBPrefix.IX_HeaderHashList + self._stored_header_count.to_bytes(4, 'little'), out) - - self._stored_header_count += 2000 - - with self._db.write_batch() as wb: - if self._db.get(DBPrefix.DATA_Block + hHash) is None: - wb.put(DBPrefix.DATA_Block + hHash, bytes(8) + header.ToArray()) - wb.put(DBPrefix.SYS_CurrentHeader, hHash + header.Index.to_bytes(4, 'little')) - - @property - def BlockCacheCount(self): - return len(self._block_cache) - - def Persist(self, block): - - self._persisting_block = block - - accounts = DBCollection(self._db, DBPrefix.ST_Account, AccountState) - unspentcoins = DBCollection(self._db, DBPrefix.ST_Coin, UnspentCoinState) - spentcoins = DBCollection(self._db, DBPrefix.ST_SpentCoin, SpentCoinState) - assets = DBCollection(self._db, DBPrefix.ST_Asset, AssetState) - validators = DBCollection(self._db, DBPrefix.ST_Validator, ValidatorState) - contracts = DBCollection(self._db, DBPrefix.ST_Contract, ContractState) - storages = DBCollection(self._db, DBPrefix.ST_Storage, StorageItem) - - amount_sysfee = self.GetSysFeeAmount(block.PrevHash) + (block.TotalFees().value / Fixed8.D) - amount_sysfee_bytes = struct.pack(" Date: Thu, 30 May 2019 23:34:59 +0200 Subject: [PATCH 20/23] made linter happy...again --- neo/Storage/Interface/DBInterface.py | 1 - 1 file changed, 1 deletion(-) diff --git a/neo/Storage/Interface/DBInterface.py b/neo/Storage/Interface/DBInterface.py index f527feaf1..7f7f9c0ba 100644 --- a/neo/Storage/Interface/DBInterface.py +++ b/neo/Storage/Interface/DBInterface.py @@ -29,7 +29,6 @@ def __init__(self, prefix=None, include_value=True, include_key=True): class DBInterface(object): - def __init__(self, db, prefix, class_ref): self._built_keys = False self.DebugStorage = False From 98c9ed42c1155ff673078ed4b4ba0fad3747f167 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Wed, 5 Jun 2019 21:54:07 +0200 Subject: [PATCH 21/23] updated import_blocks and export_blocks to new database layer, removed call of static methond in Persist() --- neo/Core/Blockchain.py | 37 +++++++++++++++-- neo/SmartContract/StateMachine.py | 8 ++-- neo/SmartContract/StateReader.py | 66 +++++++++++++++---------------- neo/bin/export_blocks.py | 4 +- neo/bin/import_blocks.py | 8 ++-- 5 files changed, 77 insertions(+), 46 deletions(-) diff --git a/neo/Core/Blockchain.py b/neo/Core/Blockchain.py index a67d79cdc..792987ff1 100644 --- a/neo/Core/Blockchain.py +++ b/neo/Core/Blockchain.py @@ -35,13 +35,18 @@ from neo.Core.UInt256 import UInt256 from neo.Core.UInt160 import UInt160 from neo.Core.IO.BinaryWriter import BinaryWriter + +from neo.SmartContract.StateMachine import StateMachine from neo.SmartContract.Contract import Contract from neo.SmartContract.ApplicationEngine import ApplicationEngine from neo.Storage.Common.DBPrefix import DBPrefix +from neo.Storage.Common.CachedScriptTable import CachedScriptTable from neo.Storage.Interface.DBInterface import DBInterface, DBProperties +from neo.SmartContract import TriggerType from neo.VM.OpCode import PUSHF, PUSHT from functools import lru_cache from neo.Network.common import msgrouter +from neo.EventHub import events from neo.Network.common import blocking_prompt as prompt from neo.Network.common import wait_for @@ -967,9 +972,11 @@ async def Persist(self, block): assets = DBInterface(self._db, DBPrefix.ST_Asset, AssetState) validators = DBInterface(self._db, DBPrefix.ST_Validator, ValidatorState) contracts = DBInterface(self._db, DBPrefix.ST_Contract, ContractState) + storages = DBInterface(self._db, DBPrefix.ST_Storage, StorageItem) amount_sysfee = self.GetSysFeeAmount(block.PrevHash) + (block.TotalFees().value / Fixed8.D) amount_sysfee_bytes = struct.pack(" Tuple[bool, str]: distance = self._current_block_height - block.Index @@ -1124,7 +1155,7 @@ def RegisterBlockchain(blockchain): Register the default block chain instance. Args: - blockchain: a blockchain instance. E.g. neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain + blockchain: a blockchain instance. E.g. neo.Storage.Implementation.LevelDB.LevelDBImpl """ if Blockchain._instance is None: Blockchain._instance = blockchain diff --git a/neo/SmartContract/StateMachine.py b/neo/SmartContract/StateMachine.py index 3dae65048..1ecc7f55a 100644 --- a/neo/SmartContract/StateMachine.py +++ b/neo/SmartContract/StateMachine.py @@ -12,7 +12,7 @@ from neo.Core.UInt256 import UInt256 from neo.Core.State.AccountState import AccountState from neo.Core.Fixed8 import Fixed8 -from neo.Core.Blockchain import Blockchain +from neo.Blockchain import GetBlockchain from neo.VM.InteropService import StackItem from neo.VM.ExecutionEngine import ExecutionEngine from neo.SmartContract.StorageContext import StorageContext @@ -250,7 +250,7 @@ def Blockchain_GetAccount(self, engine: ExecutionEngine): return True def Blockchain_GetValidators(self, engine: ExecutionEngine): - validators = Blockchain.Default().GetValidators() + validators = GetBlockchain().GetValidators() items = [StackItem(validator.encode_point(compressed=True)) for validator in validators] @@ -262,7 +262,7 @@ def Blockchain_GetAsset(self, engine: ExecutionEngine): data = engine.CurrentContext.EvaluationStack.Pop().GetByteArray() asset = None - if Blockchain.Default() is not None: + if GetBlockchain() is not None: asset = self.Assets.TryGet(UInt256(data=data)) if asset is None: return False @@ -369,7 +369,7 @@ def Transaction_GetUnspentCoins(self, engine: ExecutionEngine): if tx is None: return False - outputs = Blockchain.Default().GetAllUnspent(tx.Hash) + outputs = GetBlockchain().GetAllUnspent(tx.Hash) if len(outputs) > engine.maxArraySize: return False diff --git a/neo/SmartContract/StateReader.py b/neo/SmartContract/StateReader.py index 8f3192398..86b8aa757 100644 --- a/neo/SmartContract/StateReader.py +++ b/neo/SmartContract/StateReader.py @@ -3,7 +3,7 @@ from neo.SmartContract.NotifyEventArgs import NotifyEventArgs from neo.SmartContract.StorageContext import StorageContext from neo.Core.State.StorageKey import StorageKey -from neo.Core.Blockchain import Blockchain +from neo.Blockchain import GetBlockchain from neo.Core.Cryptography.Crypto import Crypto from neo.Core.BigInteger import BigInteger from neo.Core.UInt160 import UInt160 @@ -33,25 +33,25 @@ class StateReader(InteropService): @property def Accounts(self): if not self._accounts: - self._accounts = Blockchain.Default().GetStates(DBPrefix.ST_Account, AccountState) + self._accounts = GetBlockchain().GetStates(DBPrefix.ST_Account, AccountState) return self._accounts @property def Assets(self): if not self._assets: - self._assets = Blockchain.Default().GetStates(DBPrefix.ST_Asset, AssetState) + self._assets = GetBlockchain().GetStates(DBPrefix.ST_Asset, AssetState) return self._assets @property def Contracts(self): if not self._contracts: - self._contracts = Blockchain.Default().GetStates(DBPrefix.ST_Contract, ContractState) + self._contracts = GetBlockchain().GetStates(DBPrefix.ST_Contract, ContractState) return self._contracts @property def Storages(self): if not self._storages: - self._storages = Blockchain.Default().GetStates(DBPrefix.ST_Storage, StorageItem) + self._storages = GetBlockchain().GetStates(DBPrefix.ST_Storage, StorageItem) return self._storages def RegisterWithPrice(self, method, func, price): @@ -123,7 +123,7 @@ def GetPrice(self, hash: int): return self.prices.get(hash, 0) def ExecutionCompleted(self, engine, success, error=None): - height = Blockchain.Default().Height + 1 + height = GetBlockchain().Height + 1 tx_hash = None if engine.ScriptContainer: @@ -248,7 +248,7 @@ def Runtime_Notify(self, engine: ExecutionEngine): if settings.emit_notify_events_on_sc_execution_error: # emit Notify events even if the SC execution might fail. tx_hash = engine.ScriptContainer.Hash - height = Blockchain.Default().Height + 1 + height = GetBlockchain().Height + 1 success = None self.events_to_dispatch.append(NotifyEvent(SmartContractEvent.RUNTIME_NOTIFY, payload, args.ScriptHash, height, tx_hash, @@ -271,14 +271,14 @@ def Runtime_Log(self, engine: ExecutionEngine): self.events_to_dispatch.append(SmartContractEvent(SmartContractEvent.RUNTIME_LOG, ContractParameter(ContractParameterType.String, value=message), hash, - Blockchain.Default().Height + 1, + GetBlockchain().Height + 1, tx_hash, test_mode=engine.testMode)) return True def Runtime_GetCurrentTime(self, engine: ExecutionEngine): - BC = Blockchain.Default() + BC = GetBlockchain() header = BC.GetHeaderByHeight(BC.Height) if header is None: header = Blockchain.GenesisBlock() @@ -327,10 +327,10 @@ def Runtime_Deserialize(self, engine: ExecutionEngine): return True def Blockchain_GetHeight(self, engine: ExecutionEngine): - if Blockchain.Default() is None: + if GetBlockchain() is None: engine.CurrentContext.EvaluationStack.PushT(0) else: - engine.CurrentContext.EvaluationStack.PushT(Blockchain.Default().Height) + engine.CurrentContext.EvaluationStack.PushT(GetBlockchain().Height) return True @@ -343,25 +343,25 @@ def Blockchain_GetHeader(self, engine: ExecutionEngine): height = BigInteger.FromBytes(data) - if Blockchain.Default() is not None: + if GetBlockchain() is not None: - header = Blockchain.Default().GetHeaderBy(height_or_hash=height) + header = GetBlockchain().GetHeaderBy(height_or_hash=height) elif height == 0: - header = Blockchain.GenesisBlock().Header + header = GetBlockchain().GenesisBlock().Header elif len(data) == 32: hash = UInt256(data=data) - if Blockchain.Default() is not None: + if GetBlockchain() is not None: - header = Blockchain.Default().GetHeaderBy(height_or_hash=hash) + header = GetBlockchain().GetHeaderBy(height_or_hash=hash) - elif hash == Blockchain.GenesisBlock().Hash: + elif hash == GetBlockchain().GenesisBlock().Hash: - header = Blockchain.GenesisBlock().Header + header = GetBlockchain().GenesisBlock().Header engine.CurrentContext.EvaluationStack.PushT(StackItem.FromInterface(header)) return True @@ -379,25 +379,25 @@ def Blockchain_GetBlock(self, engine: ExecutionEngine): if len(data) <= 5: height = BigInteger.FromBytes(data) - if Blockchain.Default() is not None: + if GetBlockchain() is not None: - block = Blockchain.Default().GetBlockByHeight(height) + block = GetBlockchain().GetBlockByHeight(height) elif height == 0: - block = Blockchain.GenesisBlock() + block = GetBlockchain().GenesisBlock() elif len(data) == 32: hash = UInt256(data=data).ToBytes() - if Blockchain.Default() is not None: + if GetBlockchain() is not None: - block = Blockchain.Default().GetBlockByHash(hash=hash) + block = GetBlockchain().GetBlockByHash(hash=hash) - elif hash == Blockchain.GenesisBlock().Hash: + elif hash == GetBlockchain().GenesisBlock().Hash: - block = Blockchain.GenesisBlock().Header + block = GetBlockchain().GenesisBlock().Header engine.CurrentContext.EvaluationStack.PushT(StackItem.FromInterface(block)) return True @@ -406,8 +406,8 @@ def Blockchain_GetTransaction(self, engine: ExecutionEngine): data = engine.CurrentContext.EvaluationStack.Pop().GetByteArray() tx = None - if Blockchain.Default() is not None: - tx, height = Blockchain.Default().GetTransaction(UInt256(data=data)) + if GetBlockchain() is not None: + tx, height = GetBlockchain().GetTransaction(UInt256(data=data)) engine.CurrentContext.EvaluationStack.PushT(StackItem.FromInterface(tx)) return True @@ -416,8 +416,8 @@ def Blockchain_GetTransactionHeight(self, engine: ExecutionEngine): data = engine.CurrentContext.EvaluationStack.Pop().GetByteArray() height = -1 - if Blockchain.Default() is not None: - tx, height = Blockchain.Default().GetTransaction(UInt256(data=data)) + if GetBlockchain() is not None: + tx, height = GetBlockchain().GetTransaction(UInt256(data=data)) engine.CurrentContext.EvaluationStack.PushT(height) return True @@ -561,7 +561,7 @@ def Storage_Get(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.STORAGE_GET, ContractParameter(ContractParameterType.String, value='%s -> %s' % (keystr, valStr)), - context.ScriptHash, Blockchain.Default().Height + 1, tx_hash, test_mode=engine.testMode)) + context.ScriptHash, GetBlockchain().Height + 1, tx_hash, test_mode=engine.testMode)) return True @@ -597,7 +597,7 @@ def Storage_Put(self, engine: ExecutionEngine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.STORAGE_PUT, ContractParameter(ContractParameterType.String, '%s -> %s' % (keystr, valStr)), - context.ScriptHash, Blockchain.Default().Height + 1, + context.ScriptHash, GetBlockchain().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=test_mode)) @@ -637,7 +637,7 @@ def Contract_Destroy(self, engine): self.events_to_dispatch.append( SmartContractEvent(SmartContractEvent.CONTRACT_DESTROY, ContractParameter(ContractParameterType.InteropInterface, contract), - hash, Blockchain.Default().Height + 1, + hash, GetBlockchain().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=engine.testMode)) return True @@ -666,7 +666,7 @@ def Storage_Delete(self, engine: ExecutionEngine): else: test_mode = engine.testMode self.events_to_dispatch.append(SmartContractEvent(SmartContractEvent.STORAGE_DELETE, ContractParameter(ContractParameterType.String, keystr), - context.ScriptHash, Blockchain.Default().Height + 1, + context.ScriptHash, GetBlockchain().Height + 1, engine.ScriptContainer.Hash if engine.ScriptContainer else None, test_mode=test_mode)) diff --git a/neo/bin/export_blocks.py b/neo/bin/export_blocks.py index 7e5738ade..3da07e217 100644 --- a/neo/bin/export_blocks.py +++ b/neo/bin/export_blocks.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 from neo.Settings import settings -from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain +from neo.Storage.Implementation.DBFactory import getBlockchainDB from neo.Core.Blockchain import Blockchain import argparse from tqdm import trange @@ -45,7 +45,7 @@ def main(): file_path = args.output # Instantiate the blockchain and subscribe to notifications - blockchain = LevelDBBlockchain(settings.chain_leveldb_path) + blockchain = Blockchain(getBlockchainDB(settings.chain_leveldb_path)) Blockchain.DeregisterBlockchain() Blockchain.RegisterBlockchain(blockchain) diff --git a/neo/bin/import_blocks.py b/neo/bin/import_blocks.py index ef06660e4..dff2daaea 100644 --- a/neo/bin/import_blocks.py +++ b/neo/bin/import_blocks.py @@ -2,8 +2,8 @@ from neo.Core.Blockchain import Blockchain from neo.Core.Block import Block -from neo.Implementations.Blockchains.LevelDB.LevelDBBlockchain import LevelDBBlockchain -from neo.Implementations.Blockchains.LevelDB.DBPrefix import DBPrefix +from neo.Storage.Implementation.DBFactory import getBlockchainDB +from neo.Storage.Common.DBPrefix import DBPrefix from neo.Settings import settings from neo.Core.IO.BinaryReader import BinaryReader from neo.Core.IO.BinaryWriter import BinaryWriter @@ -94,7 +94,7 @@ async def _main(): notif_target_dir = os.path.join(settings.DATA_DIR_PATH, settings.NOTIFICATION_DB_PATH) if append: - blockchain = LevelDBBlockchain(settings.chain_leveldb_path, skip_header_check=True) + blockchain = Blockchain(getBlockchainDB(settings.chain_leveldb_path), skip_header_check=True) Blockchain.DeregisterBlockchain() Blockchain.RegisterBlockchain(blockchain) @@ -122,7 +122,7 @@ async def _main(): return False # Instantiate the blockchain and subscribe to notifications - blockchain = LevelDBBlockchain(settings.chain_leveldb_path) + blockchain = Blockchain(getBlockchainDB(settings.chain_leveldb_path)) Blockchain.DeregisterBlockchain() Blockchain.RegisterBlockchain(blockchain) From c5bb6ceaca3ed8c7ca31ae02913ec9a6b636e398 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Wed, 5 Jun 2019 21:54:07 +0200 Subject: [PATCH 22/23] fix, minor fixes --- neo/Core/Blockchain.py | 2 +- neo/Settings.py | 2 +- neo/SmartContract/StateReader.py | 6 ++++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/neo/Core/Blockchain.py b/neo/Core/Blockchain.py index 792987ff1..479c8c713 100644 --- a/neo/Core/Blockchain.py +++ b/neo/Core/Blockchain.py @@ -1065,7 +1065,7 @@ async def Persist(self, block): elif tx.Type == TransactionType.InvocationTransaction: script_table = CachedScriptTable(contracts) - service = StateMachine(accounts, validators, assets, contracts, storages, wb) + service = StateMachine(accounts, validators, assets, contracts, storages, wb, self) engine = ApplicationEngine( trigger_type=TriggerType.Application, diff --git a/neo/Settings.py b/neo/Settings.py index db23a7b9a..79d680522 100644 --- a/neo/Settings.py +++ b/neo/Settings.py @@ -127,7 +127,7 @@ class SettingsHolder: # Logging settings log_level = None log_smart_contract_events = False - log_vm_instructions = True + log_vm_instructions = False # Emit Notify events when smart contract execution failed. Use for debugging purposes only. emit_notify_events_on_sc_execution_error = False diff --git a/neo/SmartContract/StateReader.py b/neo/SmartContract/StateReader.py index 86b8aa757..f82758eb2 100644 --- a/neo/SmartContract/StateReader.py +++ b/neo/SmartContract/StateReader.py @@ -279,11 +279,13 @@ def Runtime_Log(self, engine: ExecutionEngine): def Runtime_GetCurrentTime(self, engine: ExecutionEngine): BC = GetBlockchain() + print(BC) header = BC.GetHeaderByHeight(BC.Height) + print(header, GetBlockchain().SECONDS_PER_BLOCK) if header is None: - header = Blockchain.GenesisBlock() + header = GetBlockchain().GenesisBlock() - engine.CurrentContext.EvaluationStack.PushT(header.Timestamp + Blockchain.SECONDS_PER_BLOCK) + engine.CurrentContext.EvaluationStack.PushT(header.Timestamp + GetBlockchain().SECONDS_PER_BLOCK) return True def Runtime_Serialize(self, engine: ExecutionEngine): From 939749d8545814983c22e1156e6d386a3d203cb4 Mon Sep 17 00:00:00 2001 From: Merl111 Date: Wed, 12 Jun 2019 13:28:52 +0200 Subject: [PATCH 23/23] fix, take --datadir intou account when starting np=prompt, removed duplicate log --- neo/Implementations/Notifications/NotificationDB.py | 1 - neo/bin/prompt.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/neo/Implementations/Notifications/NotificationDB.py b/neo/Implementations/Notifications/NotificationDB.py index 5a44ce115..eb470cc37 100644 --- a/neo/Implementations/Notifications/NotificationDB.py +++ b/neo/Implementations/Notifications/NotificationDB.py @@ -72,7 +72,6 @@ def __init__(self, path): try: self._db = getNotificationDB(path) - logger.info("Created Notification DB At %s " % path) except Exception as e: logger.info("Notification leveldb unavailable, you may already be running this process: %s " % e) raise Exception('Notification Leveldb Unavailable %s ' % e) diff --git a/neo/bin/prompt.py b/neo/bin/prompt.py index 47d80d685..368bf8c1a 100755 --- a/neo/bin/prompt.py +++ b/neo/bin/prompt.py @@ -354,7 +354,7 @@ def set_max_peers(num_peers) -> bool: use_asyncio_event_loop() # Instantiate the blockchain and subscribe to notifications - blockchain = Blockchain(DBFactory.getBlockchainDB()) + blockchain = Blockchain(DBFactory.getBlockchainDB(settings.chain_leveldb_path)) Blockchain.RegisterBlockchain(blockchain) # Try to set up a notification db