Merged latest develop changes.

This commit is contained in:
Adrian Tiberius 2015-07-04 01:54:45 +02:00
parent 8199b8a2df
commit a3b331e7f0
35 changed files with 644 additions and 236 deletions

View File

@ -52,7 +52,9 @@ android {
}
dependencies {
compile project(':ethereumj-core-android')
compile (project(':ethereumj-core-android')) {
exclude group: "org.hibernate", module: "hibernate-core"
}
compile 'com.android.support:multidex:1.0.0'
compile fileTree(include: ['*.jar'], dir: '../libraries')
compile 'com.android.support:support-v4:22.2.0'

View File

@ -51,7 +51,6 @@ dependencies {
exclude group: "org.apache.commons", module: "commons-pool2"
exclude group: "org.slf4j", module: "slf4j-log4j12"
exclude group: "log4j", module: "apache-log4j-extras"
exclude group: "org.hibernate", module: "hibernate-core"
exclude group: "org.hibernate", module: "hibernate-entitymanager"
exclude group: "redis.clients", module: "jedis"
exclude group: "org.antlr", module: "antlr4-runtime"

View File

@ -47,7 +47,7 @@
<appender-ref ref="logcat" />
</logger>
<logger name="net" level="ERROR">
<logger name="net" level="DEBUG">
<appender-ref ref="logcat" />
</logger>

View File

@ -4,6 +4,7 @@ import org.ethereum.core.Block;
import org.ethereum.core.TransactionReceipt;
import org.ethereum.db.BlockStore;
import org.ethereum.util.ByteUtil;
import org.hibernate.SessionFactory;
import java.math.BigInteger;
import java.util.ArrayList;
@ -125,4 +126,9 @@ public class BlockStoreImpl implements BlockStore {
public void flush() {
}
@Override
public void setSessionFactory(SessionFactory sessionFactory) {
}
}

View File

@ -5,6 +5,7 @@ import org.ethereum.core.Block;
import org.ethereum.core.TransactionReceipt;
import org.ethereum.db.BlockStore;
import org.ethereum.db.ByteArrayWrapper;
import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -210,4 +211,8 @@ public class InMemoryBlockStore implements BlockStore {
logger.info("Loaded db in: {} ms", ((float)(t_ - t) / 1_000_000));
}
@Override
public void setSessionFactory(SessionFactory sessionFactory) {
}
}

View File

@ -7,7 +7,6 @@ buildscript {
}
}
dependencies {
classpath 'me.champeau.gradle:antlr4-gradle-plugin:0.1'
classpath 'org.jfrog.buildinfo:build-info-extractor-gradle:3.0.1'
}
}
@ -105,7 +104,7 @@ dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
compile('io.netty:netty-all:4.0.28.Final')
compile "io.netty:netty-all:4.0.28.Final"
compile "com.madgag.spongycastle:core:${scastleVersion}" // for SHA3 and SECP256K1
compile "com.madgag.spongycastle:prov:${scastleVersion}" // for SHA3 and SECP256K1

View File

@ -15,8 +15,8 @@ import java.util.*;
public class SystemProperties {
private static Logger logger = LoggerFactory.getLogger("general");
private final static int DEFAULT_TX_APPROVE_TIMEOUT = 10;
private final static String DEFAULT_DISCOVERY_PEER_LIST = "poc-9.ethdev.com:30303";
private final static String DEFAULT_ACTIVE_PEER_NODEID = ""; // FIXME
private final static String DEFAULT_ACTIVE_PEER_IP = "poc-9.ethdev.com";
@ -49,6 +49,8 @@ public class SystemProperties {
private static final String DEFAULT_BLOCKS_LOADER = "";
private static final int DEFAULT_FLUSH_BATCH_SIZE = 5_000;
private static final boolean DEFAULT_FLUSH_IGNORE_CONSENSUS = false;
private static final int DEFAULT_DETAILS_INMEMORY_STORAGE_LIMIT = 1_000;
private static final int DEFAULT_FLUSH_REPO_SIZE = 128_000_000;
/* Testing */
@ -260,14 +262,21 @@ public class SystemProperties {
private int intProperty(String key, int defaultValue) {
return Integer.parseInt(prop.getProperty(key, String.valueOf(defaultValue)));
}
public int flushBatchSize() {
return intProperty("flush.batch.size", DEFAULT_FLUSH_BATCH_SIZE);
public int detailsInMemoryStorageLimit() {
return intProperty("details.inmemory.storage.limit", DEFAULT_DETAILS_INMEMORY_STORAGE_LIMIT);
}
public boolean flushIgnoreConsensus() {
return boolProperty("flush.ignore.consensus", DEFAULT_FLUSH_IGNORE_CONSENSUS);
public int flushBlocksBatchSize() {
return intProperty("flush.blocks.batch.size", DEFAULT_FLUSH_BATCH_SIZE);
}
public int flushBlocksRepoSize() {
return intProperty("flush.blocks.repo.size", DEFAULT_FLUSH_REPO_SIZE);
}
public boolean flushBlocksIgnoreConsensus() {
return boolProperty("flush.blocks.ignore.consensus", DEFAULT_FLUSH_IGNORE_CONSENSUS);
}
public String vmTraceDir() {

View File

@ -4,6 +4,7 @@ import org.ethereum.config.Constants;
import org.ethereum.config.SystemProperties;
import org.ethereum.crypto.HashUtil;
import org.ethereum.db.BlockStore;
import org.ethereum.db.RepositoryImpl;
import org.ethereum.facade.Blockchain;
import org.ethereum.facade.Repository;
import org.ethereum.listener.EthereumListener;
@ -321,10 +322,14 @@ public class BlockchainImpl implements Blockchain {
}
private boolean needFlush(Block block) {
boolean isBatchReached = block.getNumber() % CONFIG.flushBatchSize() == 0;
boolean isConsensus = CONFIG.flushIgnoreConsensus() || adminInfo.isConsensus();
if (CONFIG.flushBlocksRepoSize() > 0 && repository.getClass().isAssignableFrom(RepositoryImpl.class)) {
return ((RepositoryImpl) repository).getAllocatedMemorySize() > CONFIG.flushBlocksRepoSize();
} else {
boolean isBatchReached = block.getNumber() % CONFIG.flushBlocksBatchSize() == 0;
boolean isConsensus = CONFIG.flushBlocksIgnoreConsensus() || adminInfo.isConsensus();
return isConsensus && isBatchReached;
return isConsensus && isBatchReached;
}
}
private byte[] calcReceiptsTrie(List<TransactionReceipt> receipts){

View File

@ -0,0 +1,21 @@
package org.ethereum.datasource;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class DataSourcePool {
private static Map<String, KeyValueDataSource> pool = new ConcurrentHashMap<>();
public static KeyValueDataSource levelDbByName(String name) {
KeyValueDataSource dataSource = pool.get(name);
if (dataSource == null) {
dataSource = new LevelDbDataSource(name);
dataSource.init();
pool.put(name, dataSource);
}
return dataSource;
}
}

View File

@ -1,7 +1,6 @@
package org.ethereum.datasource;
import org.ethereum.db.ByteArrayWrapper;
import org.ethereum.util.ByteUtil;
import org.iq80.leveldb.DBException;
import java.util.HashMap;
@ -54,7 +53,6 @@ public class HashMapDB implements KeyValueDataSource {
@Override
public Set<byte[]> keys() {
Set<byte[]> keys = new HashSet<>();
for (ByteArrayWrapper key : storage.keySet()){
keys.add(key.getData());
@ -64,10 +62,8 @@ public class HashMapDB implements KeyValueDataSource {
@Override
public void updateBatch(Map<byte[], byte[]> rows) {
for (byte[] key : rows.keySet()){
byte[] value = rows.get(key);
storage.put(wrap(key), value);
storage.put(wrap(key), rows.get(key));
}
}

View File

@ -1,6 +1,5 @@
package org.ethereum.datasource;
import org.ethereum.config.SystemProperties;
import org.iq80.leveldb.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -11,10 +10,10 @@ import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static java.lang.System.getProperty;
import static org.ethereum.config.SystemProperties.CONFIG;
import static org.fusesource.leveldbjni.JniDBFactory.factory;
//import static org.iq80.leveldb.impl.Iq80DBFactory.factory;
/**
* @author Roman Mandeleil
* @since 18.01.2015
@ -45,20 +44,19 @@ public class LevelDbDataSource implements KeyValueDataSource {
options.writeBufferSize(10 * 1024);
options.cacheSize(0);
try {
logger.debug("Opening database");
File dbLocation = new File(System.getProperty("user.dir") + "/" +
SystemProperties.CONFIG.databaseDir() + "/");
File fileLocation = new File(dbLocation, name);
File fileLocation = new File(getProperty("user.dir") + "/" + CONFIG.databaseDir() + "/" + name);
File dbLocation = fileLocation.getParentFile();
if (!dbLocation.exists()) dbLocation.mkdirs();
if (SystemProperties.CONFIG.databaseReset()) {
if (CONFIG.databaseReset()) {
destroyDB(fileLocation);
}
logger.debug("Initializing new or existing database: '{}'", name);
db = factory.open(fileLocation, options);
} catch (IOException ioe) {
logger.error(ioe.getMessage(), ioe);
throw new RuntimeException("Can't initialize database");
@ -76,7 +74,6 @@ public class LevelDbDataSource implements KeyValueDataSource {
}
}
@Override
public void setName(String name) {
this.name = name;
@ -100,15 +97,12 @@ public class LevelDbDataSource implements KeyValueDataSource {
@Override
public Set<byte[]> keys() {
try {
try (DBIterator dbIterator = db.iterator()) {
Set<byte[]> keys = new HashSet<>();
while (dbIterator.hasNext()) {
keys.add(dbIterator.next().getKey());
}
return keys;
try (DBIterator iterator = db.iterator()) {
Set<byte[]> result = new HashSet<>();
for (iterator.seekToFirst(); iterator.hasNext(); iterator.next()) {
result.add(iterator.peekNext().getKey());
}
return result;
} catch (IOException e) {
throw new RuntimeException(e);
}
@ -116,14 +110,12 @@ public class LevelDbDataSource implements KeyValueDataSource {
@Override
public void updateBatch(Map<byte[], byte[]> rows) {
try {
try (WriteBatch batch = db.createWriteBatch()) {
for (Map.Entry<byte[], byte[]> row : rows.entrySet()) {
batch.put(row.getKey(), row.getValue());
}
db.write(batch);
try (WriteBatch batch = db.createWriteBatch()) {
for (Map.Entry<byte[], byte[]> entry : rows.entrySet()) {
batch.put(entry.getKey(), entry.getValue());
}
db.write(batch);
} catch (IOException e) {
throw new RuntimeException(e);
}

View File

@ -22,13 +22,11 @@ public class MapDBDataSource implements KeyValueDataSource {
@Override
public void init() {
File dbLocation = new File(getProperty("user.dir") + "/" + SystemProperties.CONFIG.databaseDir() + "/");
if (!dbLocation.exists()) {
dbLocation.mkdirs();
}
File dbFile = new File(getProperty("user.dir") + "/" + SystemProperties.CONFIG.databaseDir() + "/" + name);
if (!dbFile.getParentFile().exists()) dbFile.getParentFile().mkdirs();
db = DBMaker.fileDB(new File(dbLocation, name))
db = DBMaker.fileDB(dbFile)
.transactionDisable()
.closeOnJvmShutdown()
.make();

View File

@ -43,7 +43,7 @@ public interface BlockStore {
public void flush();
public void load();
//public void setSessionFactory(SessionFactory sessionFactory);
public void setSessionFactory(SessionFactory sessionFactory);
}

View File

@ -85,7 +85,7 @@ public class BlockStoreDummy implements BlockStore {
public void load() {
}
//@Override
@Override
public void setSessionFactory(SessionFactory sessionFactory) {
}

View File

@ -180,10 +180,8 @@ public class BlockStoreImpl implements BlockStore {
public void load() {
}
/*
@Override
public void setSessionFactory(SessionFactory sessionFactory) {
}
*/
}

View File

@ -6,7 +6,7 @@ import java.util.List;
import java.util.Map;
public interface ContractDetails {
void put(DataWord key, DataWord value);
DataWord get(DataWord key);
@ -35,7 +35,15 @@ public interface ContractDetails {
void setStorage(Map<DataWord, DataWord> storage);
byte[] getAddress();
void setAddress(byte[] address);
ContractDetails clone();
String toString();
void syncStorage();
int getAllocatedMemorySize();
}

View File

@ -22,7 +22,6 @@ public class ContractDetailsCacheImpl implements ContractDetails {
ContractDetails origContract = new ContractDetailsImpl();
private byte[] code = EMPTY_BYTE_ARRAY;
private boolean dirty = false;
@ -174,6 +173,15 @@ public class ContractDetailsCacheImpl implements ContractDetails {
this.storage = storage;
}
@Override
public byte[] getAddress() {
return (origContract == null) ? null : origContract.getAddress();
}
@Override
public void setAddress(byte[] address) {
if (origContract != null) origContract.setAddress(address);
}
@Override
public ContractDetails clone() {
@ -196,6 +204,18 @@ public class ContractDetailsCacheImpl implements ContractDetails {
return ret;
}
@Override
public void syncStorage() {
if (origContract != null) origContract.syncStorage();
}
@Override
public int getAllocatedMemorySize() {
return (origContract == null)
? code.length + storage.size() * 32 * 2
: origContract.getAllocatedMemorySize();
}
public void commit(){
if (origContract == null) return;

View File

@ -1,6 +1,7 @@
package org.ethereum.db;
import org.ethereum.datasource.HashMapDB;
import org.ethereum.config.SystemProperties;
import org.ethereum.datasource.KeyValueDataSource;
import org.ethereum.trie.SecureTrie;
import org.ethereum.util.RLP;
import org.ethereum.util.RLPElement;
@ -12,8 +13,8 @@ import org.spongycastle.util.encoders.Hex;
import java.util.*;
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
import static org.ethereum.util.ByteUtil.wrap;
import static org.ethereum.datasource.DataSourcePool.levelDbByName;
import static org.ethereum.util.ByteUtil.*;
/**
* @author Roman Mandeleil
@ -23,13 +24,16 @@ public class ContractDetailsImpl implements ContractDetails {
private byte[] rlpEncoded;
private byte[] address = EMPTY_BYTE_ARRAY;
private byte[] code = EMPTY_BYTE_ARRAY;
private Set<ByteArrayWrapper> keys = new HashSet<>();
private SecureTrie storageTrie = new SecureTrie(null);
private boolean dirty = false;
private boolean deleted = false;
private SecureTrie storageTrie = new SecureTrie(new HashMapDB());
private Set<ByteArrayWrapper> keys = new HashSet<>(); // FIXME: sync to the disk
private boolean externalStorage;
private KeyValueDataSource externalStorageDataSource;
private int keysSize;
public ContractDetailsImpl() {
}
@ -38,39 +42,50 @@ public class ContractDetailsImpl implements ContractDetails {
decode(rlpCode);
}
public ContractDetailsImpl(SecureTrie storageTrie, byte[] code) {
public ContractDetailsImpl(byte[] address, SecureTrie storageTrie, byte[] code) {
this.address = address;
this.storageTrie = storageTrie;
this.code = code;
}
private void addKey(byte[] key) {
keys.add(wrap(key));
keysSize += key.length;
}
private void removeKey(byte[] key) {
if (keys.remove(wrap(key))) {
keysSize -= key.length;
}
}
@Override
public void put(DataWord key, DataWord value) {
if (value.equals(DataWord.ZERO)){
if (value.equals(DataWord.ZERO)) {
storageTrie.delete(key.getData());
keys.remove(wrap(key.getData()));
} else{
removeKey(key.getData());
} else {
storageTrie.update(key.getData(), RLP.encodeElement(value.getNoLeadZeroesData()));
keys.add(wrap(key.getData()));
addKey(key.getData());
}
this.setDirty(true);
this.rlpEncoded = null;
externalStorage = (keys.size() > SystemProperties.CONFIG.detailsInMemoryStorageLimit()) || externalStorage;
}
@Override
public DataWord get(DataWord key) {
DataWord result = null;
byte[] data = storageTrie.get(key.getData());
if (data.length == 0)
return null;
else{
if (data.length > 0) {
byte[] dataDecoded = RLP.decode2(data).get(0).getRLPData();
return new DataWord(dataDecoded);
result = new DataWord(dataDecoded);
}
return result;
}
@Override
@ -94,16 +109,24 @@ public class ContractDetailsImpl implements ContractDetails {
RLPList data = RLP.decode2(rlpCode);
RLPList rlpList = (RLPList) data.get(0);
RLPItem storage = (RLPItem) rlpList.get(0);
RLPElement code = rlpList.get(1);
RLPList keys = (RLPList) rlpList.get(2);
RLPItem address = (RLPItem) rlpList.get(0);
RLPItem isExternalStorage = (RLPItem) rlpList.get(1);
RLPItem storage = (RLPItem) rlpList.get(2);
RLPElement code = rlpList.get(3);
RLPList keys = (RLPList) rlpList.get(4);
RLPItem storageRoot = (RLPItem) rlpList.get(5);
this.address = address.getRLPData();
this.externalStorage = (isExternalStorage.getRLPData() != null);
this.storageTrie.deserialize(storage.getRLPData());
this.code = (code.getRLPData() == null) ? EMPTY_BYTE_ARRAY : code.getRLPData();
for (int i = 0; i < keys.size(); ++i){
byte[] key = keys.get(i).getRLPData();
this.keys.add(wrap(key));
for (RLPElement key : keys) {
addKey(key.getRLPData());
}
if (externalStorage) {
storageTrie.setRoot(storageRoot.getRLPData());
storageTrie.getCache().setDB(getExternalStorageDataSource());
}
this.rlpEncoded = rlpCode;
@ -111,15 +134,18 @@ public class ContractDetailsImpl implements ContractDetails {
@Override
public byte[] getEncoded() {
if (rlpEncoded == null) {
byte[] storage = RLP.encodeElement(storageTrie.serialize());
byte[] rlpAddress = RLP.encodeElement(address);
byte[] rlpIsExternalStorage = RLP.encodeByte((byte) (externalStorage ? 1 : 0));
byte[] rlpStorageRoot = RLP.encodeElement(externalStorage ? storageTrie.getRootHash() : EMPTY_BYTE_ARRAY );
byte[] rlpStorage = RLP.encodeElement(storageTrie.serialize());
byte[] rlpCode = RLP.encodeElement(code);
byte[] rlpKeys = RLP.encodeSet(keys);
this.rlpEncoded = RLP.encodeList(storage, rlpCode, rlpKeys);
this.rlpEncoded = RLP.encodeList(rlpAddress, rlpIsExternalStorage, rlpStorage, rlpCode, rlpKeys, rlpStorageRoot);
}
return rlpEncoded;
}
@ -144,14 +170,12 @@ public class ContractDetailsImpl implements ContractDetails {
return deleted;
}
@Override
public Map<DataWord, DataWord> getStorage() {
Map<DataWord, DataWord> storage = new HashMap<>();
for (ByteArrayWrapper keyBytes : keys){
for (ByteArrayWrapper keyBytes : keys) {
DataWord key = new DataWord(keyBytes);
DataWord value = get(key);
@ -170,14 +194,40 @@ public class ContractDetailsImpl implements ContractDetails {
@Override
public void setStorage(Map<DataWord, DataWord> storage) {
for (DataWord key : storage.keySet()) {
DataWord value = storage.get(key);
put(key, value);
put(key, storage.get(key));
}
}
@Override
public byte[] getAddress() {
return address;
}
@Override
public void setAddress(byte[] address) {
this.address = address;
this.rlpEncoded = null;
}
@Override
public void syncStorage() {
if (externalStorage) {
storageTrie.getCache().setDB(getExternalStorageDataSource());
storageTrie.sync();
}
}
private KeyValueDataSource getExternalStorageDataSource() {
if (externalStorageDataSource == null) {
externalStorageDataSource = levelDbByName("details-storage/" + toHexString(address));
}
return externalStorageDataSource;
}
public void setExternalStorageDataSource(KeyValueDataSource dataSource) {
this.externalStorageDataSource = dataSource;
}
@Override
public ContractDetails clone() {
@ -189,7 +239,7 @@ public class ContractDetailsImpl implements ContractDetails {
storageTrie.getRoot();
return new ContractDetailsImpl(null, cloneCode);
return new ContractDetailsImpl(address, null, cloneCode);
}
@Override
@ -200,6 +250,15 @@ public class ContractDetailsImpl implements ContractDetails {
return ret;
}
@Override
public int getAllocatedMemorySize() {
int result = rlpEncoded == null ? 0 : rlpEncoded.length;
result += address.length;
result += code.length;
result += storageTrie.getCache().getAllocatedMemorySize();
return result;
}
}

View File

@ -29,7 +29,7 @@ public class DatabaseImpl implements Database {
this.keyValueDataSource = keyValueDataSource;
}
public DatabaseImpl(String name) {
keyValueDataSource.setName(name);

View File

@ -51,6 +51,8 @@ public class DetailsDataStore {
}
public void update(byte[] key, ContractDetails contractDetails) {
contractDetails.setAddress(key);
ByteArrayWrapper wrappedKey = wrap(key);
cache.put(wrappedKey, contractDetails);
removes.remove(wrappedKey);
@ -79,9 +81,12 @@ public class DetailsDataStore {
Map<byte[], byte[]> batch = new HashMap<>();
for (Map.Entry<ByteArrayWrapper, ContractDetails> entry : cache.entrySet()) {
ContractDetails details = entry.getValue();
details.syncStorage();
byte[] key = entry.getKey().getData();
byte[] value = entry.getValue().getEncoded();
byte[] value = details.getEncoded();
batch.put(key, value);
totalSize += value.length;
}
@ -116,4 +121,13 @@ public class DetailsDataStore {
System.out.println("drafted: " + addr);
} catch (IOException e) {e.printStackTrace();}
}
public int getAllocatedMemorySize() {
int result = 0;
for (ContractDetails details : cache.values()) {
result += details.getAllocatedMemorySize();
}
return result;
}
}

View File

@ -251,7 +251,7 @@ public class InMemoryBlockStore implements BlockStore{
logger.info("Loaded db in: {} ms", ((float)(t_ - t) / 1_000_000));
}
//@Override
@Override
public void setSessionFactory(SessionFactory sessionFactory) {
this.sessionFactory = sessionFactory;
}

View File

@ -10,6 +10,7 @@ import org.ethereum.json.EtherObjectMapper;
import org.ethereum.json.JSONHelper;
import org.ethereum.trie.SecureTrie;
import org.ethereum.trie.Trie;
import org.ethereum.trie.TrieImpl;
import org.ethereum.vm.DataWord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -134,19 +135,21 @@ public class RepositoryImpl implements Repository {
} else {
if (!contractDetails.isDirty()) continue;
ContractDetailsCacheImpl contractDetailsCache = (ContractDetailsCacheImpl)contractDetails;
if (contractDetailsCache.origContract == null){
contractDetailsCache.origContract = new ContractDetailsImpl();
contractDetailsCache.commit();
}
ContractDetailsCacheImpl contractDetailsCache = (ContractDetailsCacheImpl)contractDetails;
if (contractDetailsCache.origContract == null){
contractDetailsCache.origContract = new ContractDetailsImpl();
contractDetailsCache.origContract.setAddress(hash.getData());
contractDetailsCache.commit();
}
contractDetails = contractDetailsCache.origContract;
contractDetails = contractDetailsCache.origContract;
dds.update(hash.getData(), contractDetails);
dds.update(hash.getData(), contractDetails);
accountState.setStateRoot(contractDetails.getStorageHash());
accountState.setCodeHash(sha3(contractDetails.getCode()));
worldState.update(hash.getData(), accountState.getEncoded());
accountState.setStateRoot(contractDetails.getStorageHash());
accountState.setCodeHash(sha3(contractDetails.getCode()));
worldState.update(hash.getData(), accountState.getEncoded());
if (logger.isDebugEnabled()) {
logger.debug("update: [{}],nonce: [{}] balance: [{}] \n [{}]",
@ -175,6 +178,9 @@ public class RepositoryImpl implements Repository {
worldState.sync();
}
public int getAllocatedMemorySize() {
return dds.getAllocatedMemorySize() + ((TrieImpl) worldState).getCache().getAllocatedMemorySize();
}
@Override
public void rollback() {
@ -455,9 +461,7 @@ public class RepositoryImpl implements Repository {
AccountState accountState = new AccountState();
worldState.update(addr, accountState.getEncoded());
ContractDetails contractDetails = new ContractDetailsImpl();
dds.update(addr, contractDetails);
dds.update(addr, new ContractDetailsImpl());
return accountState;
}
@ -475,19 +479,13 @@ public class RepositoryImpl implements Repository {
AccountState account = getAccountState(addr);
ContractDetails details = getContractDetails(addr);
if (account == null)
account = new AccountState();
else
account = account.clone();
account = (account == null) ? new AccountState() : account.clone();
details = new ContractDetailsCacheImpl(details);
// details.setAddress(addr);
if (details == null) {
details = new ContractDetailsCacheImpl(null);
}
else
details = new ContractDetailsCacheImpl(details);
cacheAccounts.put(wrap(addr), account);
cacheDetails.put(wrap(addr), details);
ByteArrayWrapper wrappedAddress = wrap(addr);
cacheAccounts.put(wrappedAddress, account);
cacheDetails.put(wrappedAddress, details);
}
@Override

View File

@ -4,6 +4,7 @@ import org.ethereum.config.SystemProperties;
import org.ethereum.core.Transaction;
import org.ethereum.datasource.KeyValueDataSource;
import org.ethereum.datasource.LevelDbDataSource;
import org.ethereum.datasource.mapdb.MapDBFactory;
import org.ethereum.datasource.redis.RedisConnection;
import org.ethereum.db.RepositoryImpl;
import org.hibernate.SessionFactory;
@ -33,6 +34,8 @@ public class CommonConfig {
if ("redis".equals(dataSource) && redisConnection.isAvailable()) {
// Name will be defined before initialization
return redisConnection.createDataSource("");
} else if ("mapdb".equals(dataSource)) {
return mapDBFactory.createDataSource();
}
dataSource = "leveldb";

View File

@ -435,7 +435,11 @@ public class EthHandler extends SimpleChannelInboundHandler<EthMessage> {
Vector<Block> blocks = new Vector<>();
for (byte[] hash : hashes) {
Block block = blockchain.getBlockByHash(hash);
blocks.add(block);
if (block != null) {
blocks.add(block);
} else {
logger.error("Could not retrieve block by hash: " + hash.toString());
}
}
BlocksMessage bm = new BlocksMessage(blocks);

View File

@ -13,7 +13,6 @@ import org.spongycastle.util.encoders.Hex;
import java.util.ArrayList;
import java.util.List;
import static org.ethereum.net.p2p.P2pMessageCodes.HELLO;
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
/**
@ -65,28 +64,33 @@ public class HelloMessage extends P2pMessage {
byte[] p2pVersionBytes = paramsList.get(0).getRLPData();
this.p2pVersion = p2pVersionBytes != null ? p2pVersionBytes[0] : 0;
byte[] clientIdBytes = paramsList.get(1).getRLPData();
this.clientId = new String(clientIdBytes != null ? clientIdBytes : EMPTY_BYTE_ARRAY);
try {
byte[] clientIdBytes = paramsList.get(1).getRLPData();
this.clientId = new String(clientIdBytes != null ? clientIdBytes : EMPTY_BYTE_ARRAY);
RLPList capabilityList = (RLPList) paramsList.get(2);
this.capabilities = new ArrayList<>();
for (Object aCapabilityList : capabilityList) {
RLPList capabilityList = (RLPList) paramsList.get(2);
this.capabilities = new ArrayList<>();
for (Object aCapabilityList : capabilityList) {
RLPElement capId = ((RLPList) aCapabilityList).get(0);
RLPElement capVersion = ((RLPList) aCapabilityList).get(1);
RLPElement capId = ((RLPList) aCapabilityList).get(0);
RLPElement capVersion = ((RLPList) aCapabilityList).get(1);
String name = new String(capId.getRLPData());
byte version = capVersion.getRLPData() == null ? 0 : capVersion.getRLPData()[0];
String name = new String(capId.getRLPData());
byte version = capVersion.getRLPData() == null ? 0 : capVersion.getRLPData()[0];
Capability cap = new Capability(name, version);
this.capabilities.add(cap);
Capability cap = new Capability(name, version);
this.capabilities.add(cap);
}
byte[] peerPortBytes = paramsList.get(3).getRLPData();
this.listenPort = ByteUtil.byteArrayToInt(peerPortBytes);
byte[] peerIdBytes = paramsList.get(4).getRLPData();
this.peerId = Hex.toHexString(peerIdBytes);
}
catch ( Exception e ) {
System.out.println(e.getMessage());
}
byte[] peerPortBytes = paramsList.get(3).getRLPData();
this.listenPort = ByteUtil.byteArrayToInt(peerPortBytes);
byte[] peerIdBytes = paramsList.get(4).getRLPData();
this.peerId = Hex.toHexString(peerIdBytes);
this.parsed = true;
}

View File

@ -12,6 +12,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static java.lang.String.format;
import static org.ethereum.util.ByteUtil.length;
import static org.ethereum.util.ByteUtil.wrap;
import static org.ethereum.util.Value.fromRlpEncoded;
@ -23,9 +24,11 @@ public class Cache {
private static final Logger logger = LoggerFactory.getLogger("general");
private final KeyValueDataSource dataSource;
private KeyValueDataSource dataSource;
private Map<ByteArrayWrapper, Node> nodes = new ConcurrentHashMap<>();
private boolean isDirty;
private int allocatedMemorySize;
public Cache(KeyValueDataSource dataSource) {
this.dataSource = dataSource;
@ -44,6 +47,9 @@ public class Cache {
byte[] sha = value.hash();
this.nodes.put(wrap(sha), new Node(value, true));
this.isDirty = true;
allocatedMemorySize += length(sha, enc);
return sha;
}
return value;
@ -54,20 +60,29 @@ public class Cache {
// First check if the key is the cache
Node node = this.nodes.get(wrappedKey);
if (node == null) {
byte[] data = this.dataSource.get(key);
byte[] data = (this.dataSource == null) ? null : this.dataSource.get(key);
node = new Node(fromRlpEncoded(data), false);
this.nodes.put(wrappedKey, node);
allocatedMemorySize += length(key, data);
}
return node.getValue();
}
public void delete(byte[] key) {
this.nodes.remove(wrap(key));
ByteArrayWrapper wrappedKey = wrap(key);
if (dataSource == null) return;
this.dataSource.delete(key);
Node node = this.nodes.get(wrappedKey);
if (node != null) {
this.allocatedMemorySize -= length(key, node.getValue().encode());
}
this.nodes.remove(wrappedKey);
if (dataSource != null) {
this.dataSource.delete(key);
}
}
public void commit() {
@ -76,7 +91,6 @@ public class Cache {
long start = System.nanoTime();
long totalSize = 0;
Map<byte[], byte[]> batch = new HashMap<>();
for (ByteArrayWrapper key : this.nodes.keySet()) {
Node node = this.nodes.get(key);
@ -86,20 +100,20 @@ public class Cache {
byte[] value = node.getValue().encode();
batch.put(key.getData(), value);
totalSize += value.length;
}
}
dataSource.updateBatch(batch);
this.dataSource.updateBatch(batch);
this.isDirty = false;
this.nodes.clear();
long finish = System.nanoTime();
float flushSize = (float) totalSize / 1048576;
float flushSize = (float) this.allocatedMemorySize / 1048576;
float flushTime = (float) (finish - start) / 1_000_000;
logger.info(format("Flush state in: %02.2f ms, %d nodes, %02.2fMB", flushTime, batch.size(), flushSize));
this.allocatedMemorySize = 0;
}
public void undo() {
@ -129,17 +143,41 @@ public class Cache {
}
public String cacheDump() {
StringBuffer cacheDump = new StringBuffer();
for (ByteArrayWrapper key : nodes.keySet()) {
Node node = nodes.get(key);
if (node.getValue() != null)
cacheDump.append(key.toString()).append(" : ").append(node.getValue().toString()).append("\n");
}
return cacheDump.toString();
}
public void setDB(KeyValueDataSource dataSource) {
if (this.dataSource == dataSource) return;
Map<byte[], byte[]> rows = new HashMap<>();
if (this.dataSource == null) {
for (ByteArrayWrapper key : nodes.keySet()) {
Node node = nodes.get(key);
if (!node.isDirty()) {
rows.put(key.getData(), node.getValue().encode());
}
}
} else {
for (byte[] key : this.dataSource.keys()) {
rows.put(key, this.dataSource.get(key));
}
this.dataSource.close();
}
dataSource.updateBatch(rows);
this.dataSource = dataSource;
}
public int getAllocatedMemorySize() {
return allocatedMemorySize;
}
}

View File

@ -5,18 +5,16 @@ import org.ethereum.datasource.KeyValueDataSource;
import static org.ethereum.crypto.SHA3Helper.sha3;
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
public class SecureTrie extends TrieImpl implements Trie{
public class SecureTrie extends TrieImpl implements Trie {
public SecureTrie(KeyValueDataSource db) {
super(db, "");
this(db, "");
}
public SecureTrie(KeyValueDataSource db, Object root) {
super(db, root);
}
@Override
public byte[] get(byte[] key) {
return super.get(sha3(key));
@ -33,53 +31,10 @@ public class SecureTrie extends TrieImpl implements Trie{
}
@Override
public byte[] getRootHash() {
return super.getRootHash();
}
@Override
public void setRoot(byte[] root) {
super.setRoot(root);
}
@Override
public void sync() {
super.sync();
}
@Override
public void undo() {
super.undo();
}
@Override
public String getTrieDump() {
return super.getTrieDump();
}
@Override
public boolean validate() {
return super.validate();
}
@Override
public byte[] serialize() {
return super.serialize();
}
@Override
public void deserialize(byte[] data) {
super.deserialize(data);
}
@Override
public SecureTrie clone(){
public SecureTrie clone() {
this.getCache();
this.getRoot();
return null;
}
}

View File

@ -444,4 +444,12 @@ public class ByteUtil {
return result;
}
public static int length(byte[]... bytes) {
int result = 0;
for (byte[] array : bytes) {
result += (array == null) ? 0 : array.length;
}
return result;
}
}

View File

@ -86,6 +86,7 @@ public class Memory implements ProgramTraceListenerAware {
if (traceListener != null) traceListener.onMemoryWrite(address, data, dataSize);
}
public void extendAndWrite(int address, int allocSize, byte[] data) {
extend(address, allocSize);
write(address, data, data.length, false);

View File

@ -26,14 +26,17 @@ peer.discovery.ip.list = poc-7.ethdev.com:30303,\
#peer.active.port = 30300
#peer.active.nodeid = 4e94cab3e9a85a22b59f69a2ad1f10ff1eaff5f8d94a0025df18c936a687b6ac99b3fb655677e8b9d08087319bca69ad2ab0b80a9d0ab47296bdc54c8cb09853
#peer.active.ip = 139.162.13.89
#peer.active.port = 30303
#peer.active.nodeid = bf01b54b6bc7faa203286dfb8359ce11d7b1fe822968fb4991f508d6f5a36ab7d9ae8af9b0d61c0467fb08567e0fb71cfb9925a370b69f9ede97927db473d1f5
peer.active.ip = 192.168.122.90
peer.active.ip = 46.101.244.204
peer.active.port = 30303
peer.active.nodeid = 4a531abc51448e584faae944d7e244e3f1ac6a629579a937b8c16ed98efb2a7aff29f6ab8c73c60041d3b078533bca842ec61d9a85f12ea9e6c3c7657e85f062
peer.active.nodeid = 8f4dd2cc9b97143985ed129493069b253a570a6f2e55bb61004316b3db9639d8bac77a7d59188f87c747c9984f94e7b999aea285b772a3f8ca5743accb1d3927
# peer.active.ip = 139.162.13.89
# peer.active.port = 30303
# peer.active.nodeid = bf01b54b6bc7faa203286dfb8359ce11d7b1fe822968fb4991f508d6f5a36ab7d9ae8af9b0d61c0467fb08567e0fb71cfb9925a370b69f9ede97927db473d1f5
#peer.active.ip = 192.168.122.90
#peer.active.port = 30303
#peer.active.nodeid = 4a531abc51448e584faae944d7e244e3f1ac6a629579a937b8c16ed98efb2a7aff29f6ab8c73c60041d3b078533bca842ec61d9a85f12ea9e6c3c7657e85f062
# peer.active.ip = 52.4.40.229
# peer.active.port = 30303
@ -64,19 +67,19 @@ peer.discovery.workers = 3
# connection timeout for trying to
# connect to a peer [seconds]
peer.connection.timeout = 2
peer.connection.timeout = 300
# the time we wait to the network
# to approve the transaction, the
# transaction got approved when
# include into a transactions msg
# retrieved from the peer [seconds]
transaction.approve.timeout = 15
transaction.approve.timeout = 300
# the parameter specifies how much
# time we will wait for a message
# to come before closing the channel
peer.channel.read.timeout = 30
peer.channel.read.timeout = 300
# default directory where we keep
# basic Serpent samples relative
@ -151,7 +154,7 @@ max.hashes.ask = 10000
# sequenteally sending GET_BLOCKS msg
# we specify number of blocks we want
# to get, recomendec value [1..120]
max.blocks.ask = 500
max.blocks.ask = 1
# the network layer will ask for
@ -203,5 +206,7 @@ blockchain.only=false
#blocks.loader=E:\\temp\\_poc-9-blocks\\poc-9-st-530k.dmp
#blocks.loader=E:\\temp\\_poc-9-blocks\\poc-9-619k.dmp
flush.batch.size=10000
flush.ignore.consensus=false
flush.blocks.batch.size=10000
flush.blocks.repo.size=256000000
flush.blocks.ignore.consensus=false
details.inmemory.storage.limit=1000

View File

@ -0,0 +1,25 @@
package org.ethereum;
import org.ethereum.vm.DataWord;
import java.util.Random;
public final class TestUtils {
private TestUtils() {
}
public static byte[] randomBytes(int length) {
byte[] result = new byte[length];
new Random().nextBytes(result);
return result;
}
public static DataWord randomDataWord() {
return new DataWord(randomBytes(32));
}
public static byte[] randomAddress() {
return randomBytes(20);
}
}

View File

@ -0,0 +1,53 @@
package org.ethereum.datasource;
import org.junit.Ignore;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.ethereum.TestUtils.randomBytes;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@Ignore
public class LevelDbDataSourceTest {
@Test
public void testBatchUpdating() {
LevelDbDataSource dataSource = new LevelDbDataSource("test");
dataSource.init();
final int batchSize = 100;
Map<byte[], byte[]> batch = createBatch(batchSize);
dataSource.updateBatch(batch);
assertEquals(batchSize, dataSource.keys().size());
dataSource.close();
}
@Test
public void testPutting() {
LevelDbDataSource dataSource = new LevelDbDataSource("test");
dataSource.init();
byte[] key = randomBytes(32);
dataSource.put(key, randomBytes(32));
assertNotNull(dataSource.get(key));
assertEquals(1, dataSource.keys().size());
dataSource.close();
}
private static Map<byte[], byte[]> createBatch(int batchSize) {
HashMap<byte[], byte[]> result = new HashMap<>();
for (int i = 0; i < batchSize; i++) {
result.put(randomBytes(32), randomBytes(32));
}
return result;
}
}

View File

@ -1,13 +1,26 @@
package org.ethereum.db;
import org.ethereum.config.SystemProperties;
import org.ethereum.datasource.HashMapDB;
import org.ethereum.datasource.KeyValueDataSource;
import org.ethereum.vm.DataWord;
import org.junit.Test;
import org.spongycastle.util.encoders.Hex;
import java.util.HashMap;
import java.util.Map;
import static org.ethereum.TestUtils.randomAddress;
import static org.ethereum.TestUtils.randomBytes;
import static org.ethereum.TestUtils.randomDataWord;
import static org.ethereum.util.ByteUtil.toHexString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ContractDetailsTest {
private static final int IN_MEMORY_STORAGE_LIMIT = SystemProperties.CONFIG.detailsInMemoryStorageLimit();
@Test
public void test_1(){
@ -36,13 +49,13 @@ public class ContractDetailsTest {
assertEquals(Hex.toHexString(val_2),
Hex.toHexString(contractDetails_.get(new DataWord(key_2)).getNoLeadZeroesData()));
}
@Test
public void test_2(){
byte[] code = Hex.decode("7c0100000000000000000000000000000000000000000000000000000000600035046333d546748114610065578063430fe5f01461007c5780634d432c1d1461008d578063501385b2146100b857806357eb3b30146100e9578063dbc7df61146100fb57005b6100766004356024356044356102f0565b60006000f35b61008760043561039e565b60006000f35b610098600435610178565b8073ffffffffffffffffffffffffffffffffffffffff1660005260206000f35b6100c96004356024356044356101a0565b8073ffffffffffffffffffffffffffffffffffffffff1660005260206000f35b6100f1610171565b8060005260206000f35b610106600435610133565b8360005282602052816040528073ffffffffffffffffffffffffffffffffffffffff1660605260806000f35b5b60006020819052908152604090208054600182015460028301546003909301549192909173ffffffffffffffffffffffffffffffffffffffff1684565b5b60015481565b5b60026020526000908152604090205473ffffffffffffffffffffffffffffffffffffffff1681565b73ffffffffffffffffffffffffffffffffffffffff831660009081526020819052604081206002015481908302341080156101fe575073ffffffffffffffffffffffffffffffffffffffff8516600090815260208190526040812054145b8015610232575073ffffffffffffffffffffffffffffffffffffffff85166000908152602081905260409020600101548390105b61023b57610243565b3391506102e8565b6101966103ca60003973ffffffffffffffffffffffffffffffffffffffff3381166101965285166101b68190526000908152602081905260408120600201546101d6526101f68490526102169080f073ffffffffffffffffffffffffffffffffffffffff8616600090815260208190526040902060030180547fffffffffffffffffffffffff0000000000000000000000000000000000000000168217905591508190505b509392505050565b73ffffffffffffffffffffffffffffffffffffffff33166000908152602081905260408120548190821461032357610364565b60018054808201909155600090815260026020526040902080547fffffffffffffffffffffffff000000000000000000000000000000000000000016331790555b50503373ffffffffffffffffffffffffffffffffffffffff1660009081526020819052604090209081556001810192909255600290910155565b3373ffffffffffffffffffffffffffffffffffffffff166000908152602081905260409020600201555600608061019660043960048051602451604451606451600080547fffffffffffffffffffffffff0000000000000000000000000000000000000000908116909517815560018054909516909317909355600355915561013390819061006390396000f3007c0100000000000000000000000000000000000000000000000000000000600035046347810fe381146100445780637e4a1aa81461005557806383d2421b1461006957005b61004f6004356100ab565b60006000f35b6100636004356024356100fc565b60006000f35b61007460043561007a565b60006000f35b6001543373ffffffffffffffffffffffffffffffffffffffff9081169116146100a2576100a8565b60078190555b50565b73ffffffffffffffffffffffffffffffffffffffff8116600090815260026020526040902080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0016600117905550565b6001543373ffffffffffffffffffffffffffffffffffffffff9081169116146101245761012f565b600582905560068190555b505056");
byte[] address = randomBytes(32);
byte[] key_0 = Hex.decode("39a2338cbc13ff8523a9b1c9bc421b7518d63b70aa690ad37cb50908746c9a55");
byte[] val_0 = Hex.decode("0000000000000000000000000000000000000000000000000000000000000064");
@ -89,6 +102,7 @@ public class ContractDetailsTest {
ContractDetailsImpl contractDetails = new ContractDetailsImpl();
contractDetails.setCode(code);
contractDetails.setAddress(address);
contractDetails.put(new DataWord(key_0), new DataWord(val_0));
contractDetails.put(new DataWord(key_1), new DataWord(val_1));
contractDetails.put(new DataWord(key_2), new DataWord(val_2));
@ -111,6 +125,9 @@ public class ContractDetailsTest {
assertEquals(Hex.toHexString(code),
Hex.toHexString(contractDetails_.getCode()));
assertEquals(Hex.toHexString(address),
Hex.toHexString(contractDetails_.getAddress()));
assertEquals(Hex.toHexString(val_1),
Hex.toHexString(contractDetails_.get(new DataWord(key_1)).getData()));
@ -151,5 +168,102 @@ public class ContractDetailsTest {
Hex.toHexString(contractDetails_.get(new DataWord(key_13)).getData()));
}
@Test
public void testExternalStorageSerialization() {
byte[] address = randomAddress();
byte[] code = randomBytes(512);
Map<DataWord, DataWord> elements = new HashMap<>();
HashMapDB externalStorage = new HashMapDB();
ContractDetailsImpl original = new ContractDetailsImpl();
original.setExternalStorageDataSource(externalStorage);
original.setAddress(address);
original.setCode(code);
for (int i = 0; i < IN_MEMORY_STORAGE_LIMIT + 10; i++) {
DataWord key = randomDataWord();
DataWord value = randomDataWord();
elements.put(key, value);
original.put(key, value);
}
original.syncStorage();
byte[] rlp = original.getEncoded();
ContractDetailsImpl deserialized = new ContractDetailsImpl();
deserialized.setExternalStorageDataSource(externalStorage);
deserialized.decode(rlp);
assertEquals(toHexString(address), toHexString(deserialized.getAddress()));
assertEquals(toHexString(code), toHexString(deserialized.getCode()));
Map<DataWord, DataWord> storage = deserialized.getStorage();
assertEquals(elements.size(), storage.size());
for (DataWord key : elements.keySet()) {
assertEquals(elements.get(key), storage.get(key));
}
DataWord deletedKey = elements.keySet().iterator().next();
deserialized.put(deletedKey, DataWord.ZERO);
deserialized.put(randomDataWord(), DataWord.ZERO);
}
@Test
public void testExternalStorageTransition() {
byte[] address = randomAddress();
byte[] code = randomBytes(512);
Map<DataWord, DataWord> elements = new HashMap<>();
HashMapDB externalStorage = new HashMapDB();
ContractDetailsImpl original = new ContractDetailsImpl();
original.setExternalStorageDataSource(externalStorage);
original.setAddress(address);
original.setCode(code);
for (int i = 0; i < IN_MEMORY_STORAGE_LIMIT - 1; i++) {
DataWord key = randomDataWord();
DataWord value = randomDataWord();
elements.put(key, value);
original.put(key, value);
}
original.syncStorage();
assertTrue(externalStorage.getAddedItems() == 0);
ContractDetails deserialized = deserialize(original.getEncoded(), externalStorage);
// adds keys for in-memory storage limit overflow
for (int i = 0; i < 10; i++) {
DataWord key = randomDataWord();
DataWord value = randomDataWord();
elements.put(key, value);
deserialized.put(key, value);
}
deserialized.syncStorage();
assertTrue(externalStorage.getAddedItems() > 0);
deserialized = deserialize(deserialized.getEncoded(), externalStorage);
Map<DataWord, DataWord> storage = deserialized.getStorage();
assertEquals(elements.size(), storage.size());
for (DataWord key : elements.keySet()) {
assertEquals(elements.get(key), storage.get(key));
}
}
private static ContractDetails deserialize(byte[] rlp, KeyValueDataSource externalStorage) {
ContractDetailsImpl result = new ContractDetailsImpl();
result.setExternalStorageDataSource(externalStorage);
result.decode(rlp);
return result;
}
}

View File

@ -1,16 +1,20 @@
package org.ethereum.db;
import org.ethereum.config.SystemProperties;
import org.ethereum.datasource.HashMapDB;
import org.ethereum.datasource.KeyValueDataSource;
import org.ethereum.vm.DataWord;
import org.junit.Test;
import org.spongycastle.util.encoders.Hex;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import javax.annotation.Nullable;
import java.util.Map;
import static org.ethereum.TestUtils.*;
import static org.junit.Assert.*;
public class DetailsDataStoreTest {
@Test
public void test1(){
@ -24,6 +28,7 @@ public class DetailsDataStoreTest {
byte[] value = Hex.decode("aa");
ContractDetails contractDetails = new ContractDetailsImpl();
contractDetails.setAddress(randomAddress());
contractDetails.setCode(code);
contractDetails.put(new DataWord(key), new DataWord(value));
@ -57,6 +62,7 @@ public class DetailsDataStoreTest {
ContractDetails contractDetails = new ContractDetailsImpl();
contractDetails.setCode(code);
contractDetails.setAddress(randomAddress());
contractDetails.put(new DataWord(key), new DataWord(value));
dds.update(c_key, contractDetails);
@ -130,6 +136,69 @@ public class DetailsDataStoreTest {
ContractDetails contractDetails = dds.get(c_key);
assertNull(contractDetails);
}
@Test
public void testExternalStorage() {
DatabaseImpl db = new DatabaseImpl(new HashMapDB());
DetailsDataStore dds = new DetailsDataStore();
dds.setDB(db);
byte[] addrWithExternalStorage = randomAddress();
byte[] addrWithInternalStorage = randomAddress();
final int inMemoryStorageLimit = SystemProperties.CONFIG.detailsInMemoryStorageLimit();
HashMapDB externalStorage = new HashMapDB();
HashMapDB internalStorage = new HashMapDB();
ContractDetails detailsWithExternalStorage = randomContractDetails(512, inMemoryStorageLimit + 1, externalStorage);
ContractDetails detailsWithInternalStorage = randomContractDetails(512, inMemoryStorageLimit - 1, internalStorage);
dds.update(addrWithExternalStorage, detailsWithExternalStorage);
dds.update(addrWithInternalStorage, detailsWithInternalStorage);
dds.flush();
assertTrue(externalStorage.getAddedItems() > 0);
assertFalse(internalStorage.getAddedItems() > 0);
detailsWithExternalStorage = dds.get(addrWithExternalStorage);
assertNotNull(detailsWithExternalStorage);
Map<DataWord, DataWord> storage = detailsWithExternalStorage.getStorage();
assertNotNull(storage);
assertEquals(inMemoryStorageLimit + 1, storage.size());
byte[] withExternalStorageRlp = detailsWithExternalStorage.getEncoded();
ContractDetailsImpl decoded = new ContractDetailsImpl();
decoded.setExternalStorageDataSource(externalStorage);
decoded.decode(withExternalStorageRlp);
assertEquals(inMemoryStorageLimit + 1, decoded.getStorage().size());
assertTrue(withExternalStorageRlp.length < detailsWithInternalStorage.getEncoded().length);
detailsWithInternalStorage = dds.get(addrWithInternalStorage);
assertNotNull(detailsWithInternalStorage);
storage = detailsWithInternalStorage.getStorage();
assertNotNull(storage);
assertEquals(inMemoryStorageLimit - 1, storage.size());
// from inmemory to ondisk transition checking
externalStorage = new HashMapDB();
((ContractDetailsImpl) detailsWithInternalStorage).setExternalStorageDataSource(externalStorage);
detailsWithInternalStorage.put(randomDataWord(), randomDataWord());
}
private static ContractDetails randomContractDetails(int codeSize, int storageSize, @Nullable KeyValueDataSource storageDataSource) {
ContractDetailsImpl result = new ContractDetailsImpl();
result.setCode(randomBytes(codeSize));
if (storageDataSource != null) {
result.setExternalStorageDataSource(storageDataSource);
}
for (int i = 0; i < storageSize; i++) {
result.put(randomDataWord(), randomDataWord());
}
return result;
}
}

View File

@ -67,14 +67,14 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
@Test
public void testEmpty(){
BlockStore blockStore = new InMemoryBlockStore();
//blockStore.setSessionFactory(sessionFactory());
blockStore.setSessionFactory(sessionFactory());
assertNull(blockStore.getBestBlock());
}
@Test
public void testFlush(){
BlockStore blockStore = new InMemoryBlockStore();
//blockStore.setSessionFactory(sessionFactory());
blockStore.setSessionFactory(sessionFactory());
for( Block block : blocks ){
blockStore.saveBlock(block, null);
@ -87,17 +87,17 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
public void testSimpleLoad(){
BlockStore blockStore = new InMemoryBlockStore();
//SessionFactory sessionFactory = sessionFactory();
SessionFactory sessionFactory = sessionFactory();
for( Block block : blocks ){
blockStore.saveBlock(block, null);
}
//blockStore.setSessionFactory(sessionFactory);
blockStore.setSessionFactory(sessionFactory);
blockStore.flush();
blockStore = new InMemoryBlockStore();
//blockStore.setSessionFactory(sessionFactory);
blockStore.setSessionFactory(sessionFactory);
blockStore.load();
@ -108,8 +108,8 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
public void testFlushEach1000(){
InMemoryBlockStore blockStore = new InMemoryBlockStore();
//SessionFactory sessionFactory = sessionFactory();
//blockStore.setSessionFactory(sessionFactory);
SessionFactory sessionFactory = sessionFactory();
blockStore.setSessionFactory(sessionFactory);
for( int i = 0; i < blocks.size(); ++i ){
@ -126,7 +126,7 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
public void testBlockHashByNumber(){
BlockStore blockStore = new InMemoryBlockStore();
//SessionFactory sessionFactory = sessionFactory();
SessionFactory sessionFactory = sessionFactory();
for( Block block : blocks ){
blockStore.saveBlock(block, null);
@ -141,7 +141,7 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
hash = Hex.toHexString(blockStore.getBlockHashByNumber(5000));
assertTrue(hash.startsWith("820aa7"));
//blockStore.setSessionFactory(sessionFactory);
blockStore.setSessionFactory(sessionFactory);
blockStore.flush();
hash = Hex.toHexString(blockStore.getBlockHashByNumber(7000));
@ -158,7 +158,7 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
public void testBlockByNumber(){
BlockStore blockStore = new InMemoryBlockStore();
//SessionFactory sessionFactory = sessionFactory();
SessionFactory sessionFactory = sessionFactory();
for( Block block : blocks ){
blockStore.saveBlock(block, null);
@ -173,7 +173,7 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
hash = Hex.toHexString(blockStore.getBlockByNumber(5000).getHash());
assertTrue(hash.startsWith("820aa7"));
//blockStore.setSessionFactory(sessionFactory);
blockStore.setSessionFactory(sessionFactory);
blockStore.flush();
hash = Hex.toHexString(blockStore.getBlockByNumber(7000).getHash());
@ -191,8 +191,8 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
public void testGetBlockByNumber() {
BlockStore blockStore = new InMemoryBlockStore();
//SessionFactory sessionFactory = sessionFactory();
//blockStore.setSessionFactory(sessionFactory);
SessionFactory sessionFactory = sessionFactory();
blockStore.setSessionFactory(sessionFactory);
for( Block block : blocks ){
blockStore.saveBlock(block, null);
@ -209,8 +209,8 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
public void testDbGetBlockByHash(){
BlockStore blockStore = new InMemoryBlockStore();
//SessionFactory sessionFactory = sessionFactory();
//blockStore.setSessionFactory(sessionFactory);
SessionFactory sessionFactory = sessionFactory();
blockStore.setSessionFactory(sessionFactory);
for( Block block : blocks ){
blockStore.saveBlock(block, null);
@ -241,7 +241,7 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
Scanner scanner = new Scanner(inputStream, "UTF-8");
BlockStore blockStore = new InMemoryBlockStore();
//blockStore.setSessionFactory(sessionFactory());
blockStore.setSessionFactory(sessionFactory());
while (scanner.hasNextLine()) {