Merged latest develop changes
This commit is contained in:
parent
cec379db59
commit
97417aa147
|
@ -29,8 +29,9 @@ public class LevelDbDataSource implements KeyValueDataSource {
|
|||
|
||||
private static final Logger logger = LoggerFactory.getLogger("db");
|
||||
|
||||
String name;
|
||||
private String name;
|
||||
private DB db;
|
||||
private boolean alive;
|
||||
|
||||
public LevelDbDataSource() {
|
||||
}
|
||||
|
@ -42,14 +43,17 @@ public class LevelDbDataSource implements KeyValueDataSource {
|
|||
@Override
|
||||
public void init() {
|
||||
|
||||
if (isAlive()) return;
|
||||
if (name == null) throw new NullPointerException("no name set to the db");
|
||||
|
||||
Options options = new Options();
|
||||
options.createIfMissing(true);
|
||||
options.compressionType(CompressionType.NONE);
|
||||
options.blockSize(10 * 1024);
|
||||
options.writeBufferSize(10 * 1024);
|
||||
options.blockSize(10 * 1024 * 1024);
|
||||
options.writeBufferSize(10 * 1024 * 1024);
|
||||
options.cacheSize(0);
|
||||
options.paranoidChecks(true);
|
||||
options.verifyChecksums(true);
|
||||
|
||||
try {
|
||||
logger.debug("Opening database");
|
||||
|
@ -65,13 +69,17 @@ public class LevelDbDataSource implements KeyValueDataSource {
|
|||
|
||||
db = factory.open(fileLocation, options);
|
||||
|
||||
|
||||
alive = true;
|
||||
} catch (IOException ioe) {
|
||||
logger.error(ioe.getMessage(), ioe);
|
||||
throw new RuntimeException("Can't initialize database");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
return alive;
|
||||
}
|
||||
|
||||
public void destroyDB(File fileLocation) {
|
||||
logger.debug("Destroying existing database");
|
||||
|
@ -136,9 +144,13 @@ public class LevelDbDataSource implements KeyValueDataSource {
|
|||
|
||||
@Override
|
||||
public void close() {
|
||||
if (!isAlive()) return;
|
||||
|
||||
try {
|
||||
logger.info("Close db: {}", name);
|
||||
logger.debug("Close db: {}", name);
|
||||
db.close();
|
||||
|
||||
alive = false;
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to find the db file on the close: {} ", name);
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import org.ethereum.db.ByteArrayWrapper;
|
|||
import org.mapdb.DB;
|
||||
import org.mapdb.DBMaker;
|
||||
import org.mapdb.HTreeMap;
|
||||
import org.mapdb.Serializer;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashSet;
|
||||
|
@ -19,26 +20,32 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
private static final int BATCH_SIZE = 1024 * 1000 * 10;
|
||||
|
||||
private DB db;
|
||||
private HTreeMap<ByteArrayWrapper, byte[]> map;
|
||||
private Map<byte[], byte[]> map;
|
||||
private String name;
|
||||
private boolean alive;
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
File dbLocation = new File(SystemProperties.CONFIG.databaseDir() + "/");
|
||||
if (!dbLocation.exists()) {
|
||||
dbLocation.mkdirs();
|
||||
}
|
||||
File dbFile = new File(SystemProperties.CONFIG.databaseDir() + "/" + name);
|
||||
if (!dbFile.getParentFile().exists()) dbFile.getParentFile().mkdirs();
|
||||
|
||||
db = DBMaker.newFileDB(new File(dbLocation, name))
|
||||
.asyncWriteEnable()
|
||||
.mmapFileEnableIfSupported()
|
||||
// .compressionEnable()
|
||||
.cacheDisable()
|
||||
// .asyncWriteFlushDelay(1000)
|
||||
|
||||
db = DBMaker.fileDB(dbFile)
|
||||
.transactionDisable()
|
||||
.closeOnJvmShutdown()
|
||||
.make();
|
||||
|
||||
this.map = db.createHashMap(name).makeOrGet();
|
||||
this.map = db.hashMapCreate(name)
|
||||
.keySerializer(Serializer.BYTE_ARRAY)
|
||||
.valueSerializer(Serializer.BYTE_ARRAY)
|
||||
.makeOrGet();
|
||||
|
||||
alive = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
return alive;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -53,13 +60,13 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
|
||||
@Override
|
||||
public byte[] get(byte[] key) {
|
||||
return map.get(wrap(key));
|
||||
return map.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] put(byte[] key, byte[] value) {
|
||||
try {
|
||||
return map.put(wrap(key), value);
|
||||
return map.put(key, value);
|
||||
} finally {
|
||||
db.commit();
|
||||
}
|
||||
|
@ -76,11 +83,7 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
|
||||
@Override
|
||||
public Set<byte[]> keys() {
|
||||
HashSet<byte[]> result = new HashSet<>();
|
||||
for (ByteArrayWrapper key : map.keySet()) {
|
||||
result.add(key.getData());
|
||||
}
|
||||
return result;
|
||||
return map.keySet();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -90,8 +93,8 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
for (byte[] key : rows.keySet()) {
|
||||
byte[] value = rows.get(key);
|
||||
savedSize += value.length;
|
||||
|
||||
map.put(wrap(key), value);
|
||||
|
||||
map.put(key, value);
|
||||
if (savedSize > BATCH_SIZE) {
|
||||
db.commit();
|
||||
savedSize = 0;
|
||||
|
@ -105,5 +108,6 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
@Override
|
||||
public void close() {
|
||||
db.close();
|
||||
alive = false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ public class BlockStoreImpl implements BlockStore {
|
|||
return new Block(vo.rlp);
|
||||
}
|
||||
|
||||
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
|
||||
public List<byte[]> getListHashesEndWith(byte[] hash, long qty) {
|
||||
|
||||
List<byte[]> hashes = new ArrayList<byte[]>();
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ public class InMemoryBlockStore implements BlockStore {
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
|
||||
public List<byte[]> getListHashesEndWith(byte[] hash, long qty) {
|
||||
|
||||
Block startBlock = hashIndex.get(wrap(hash));
|
||||
|
||||
|
@ -95,12 +95,6 @@ public class InMemoryBlockStore implements BlockStore {
|
|||
return hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteBlocksSince(long number) {
|
||||
|
||||
// todo: delete blocks sinse
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveBlock(Block block, List<TransactionReceipt> receipts) {
|
||||
ByteArrayWrapper wHash = wrap(block.getHash());
|
||||
|
@ -110,14 +104,6 @@ public class InMemoryBlockStore implements BlockStore {
|
|||
totalDifficulty = totalDifficulty.add(block.getCumulativeDifficulty());
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficultySince(long number) {
|
||||
|
||||
// todo calculate from db + from cache
|
||||
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficulty() {
|
||||
return totalDifficulty;
|
||||
|
@ -129,23 +115,6 @@ public class InMemoryBlockStore implements BlockStore {
|
|||
return blocks.get(blocks.size() - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Block> getAllBlocks() {
|
||||
return blocks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
blocks.clear();
|
||||
hashIndex.clear();
|
||||
numberIndex.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// FIXME: wrap from here in to db class
|
||||
|
||||
public byte[] dbGetBlockHashByNumber(long blockNumber) {
|
||||
|
|
|
@ -258,7 +258,7 @@ public class SystemProperties {
|
|||
private boolean boolProperty(String key, Boolean defaultValue) {
|
||||
return Boolean.parseBoolean(prop.getProperty(key, String.valueOf(defaultValue)));
|
||||
}
|
||||
|
||||
|
||||
private int intProperty(String key, int defaultValue) {
|
||||
return Integer.parseInt(prop.getProperty(key, String.valueOf(defaultValue)));
|
||||
}
|
||||
|
@ -266,11 +266,11 @@ public class SystemProperties {
|
|||
public int detailsInMemoryStorageLimit() {
|
||||
return intProperty("details.inmemory.storage.limit", DEFAULT_DETAILS_INMEMORY_STORAGE_LIMIT);
|
||||
}
|
||||
|
||||
|
||||
public int flushBlocksBatchSize() {
|
||||
return intProperty("flush.blocks.batch.size", DEFAULT_FLUSH_BATCH_SIZE);
|
||||
}
|
||||
|
||||
|
||||
public int flushBlocksRepoSize() {
|
||||
return intProperty("flush.blocks.repo.size", DEFAULT_FLUSH_REPO_SIZE);
|
||||
}
|
||||
|
|
|
@ -144,8 +144,7 @@ public class BlockchainImpl implements Blockchain {
|
|||
|
||||
@Override
|
||||
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
|
||||
|
||||
return blockStore.getTransactionReceiptByHash(hash);
|
||||
throw new UnsupportedOperationException("TODO: will be implemented soon "); // FIXME: go and fix me
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -155,7 +154,7 @@ public class BlockchainImpl implements Blockchain {
|
|||
|
||||
@Override
|
||||
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
|
||||
return blockStore.getListOfHashesStartFrom(hash, qty);
|
||||
return blockStore.getListHashesEndWith(hash, qty);
|
||||
}
|
||||
|
||||
private byte[] calcTxTrie(List<Transaction> transactions){
|
||||
|
@ -201,43 +200,6 @@ public class BlockchainImpl implements Blockchain {
|
|||
return NO_PARENT;
|
||||
}
|
||||
|
||||
//TODO POC9 add rollback support
|
||||
if (1 == 1)
|
||||
return SUCCESS; // todo: temporary cancel the rollback
|
||||
|
||||
// cut on the chain got lastBlock + 1 > n
|
||||
if (block.getNumber() > bestBlock.getNumber() + 1) {
|
||||
channelManager.ethSync();
|
||||
}
|
||||
|
||||
if (!hasParentOnTheChain(block) && block.getNumber() > bestBlock.getNumber()) {
|
||||
|
||||
if (1 == 1)
|
||||
return SUCCESS; // todo: temporary cancel the rollback
|
||||
|
||||
logger.info("*** Blockchain will rollback and resynchronise now ");
|
||||
|
||||
long rollbackIdx = bestBlock.getNumber() - 30;
|
||||
if (rollbackIdx <= 0) rollbackIdx = bestBlock.getNumber() - bestBlock.getNumber() / 10;
|
||||
|
||||
Block rollbackBlock = blockStore.getBlockByNumber(rollbackIdx);
|
||||
repository.syncToRoot(rollbackBlock.getStateRoot());
|
||||
|
||||
BigInteger deltaTD = blockStore.getTotalDifficultySince(rollbackBlock.getNumber());
|
||||
totalDifficulty = totalDifficulty.subtract(deltaTD);
|
||||
bestBlock = rollbackBlock;
|
||||
|
||||
blockStore.deleteBlocksSince(rollbackBlock.getNumber());
|
||||
|
||||
blockQueue.clear();
|
||||
channelManager.ethSync();
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
// provisional, by the garbage will be
|
||||
// defined how to deal with it in the
|
||||
// future.
|
||||
garbage.add(block);
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
|
@ -295,11 +257,9 @@ public class BlockchainImpl implements Blockchain {
|
|||
track.commit();
|
||||
storeBlock(block, receipts);
|
||||
|
||||
if (block.getNumber() == 650_000){
|
||||
repository.flush();
|
||||
blockStore.flush();
|
||||
System.exit(-1);
|
||||
}
|
||||
// if (block.getNumber() == 708_461){
|
||||
// System.exit(-1);
|
||||
// }
|
||||
|
||||
if (needFlush(block)) {
|
||||
repository.flush();
|
||||
|
@ -319,7 +279,7 @@ public class BlockchainImpl implements Blockchain {
|
|||
if (blockQueue != null &&
|
||||
blockQueue.size() == 0 &&
|
||||
!syncDoneCalled &&
|
||||
channelManager.isAllSync()) {
|
||||
channelManager.isAllSync()) {
|
||||
|
||||
logger.info("Sync done");
|
||||
syncDoneCalled = true;
|
||||
|
@ -645,13 +605,6 @@ public class BlockchainImpl implements Blockchain {
|
|||
return bestBlock;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
blockStore.reset();
|
||||
altChains = new ArrayList<>();
|
||||
garbage = new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
blockQueue.close();
|
||||
|
|
|
@ -6,11 +6,13 @@ package org.ethereum.datasource;
|
|||
*/
|
||||
public interface DataSource {
|
||||
|
||||
void init();
|
||||
|
||||
void setName(String name);
|
||||
|
||||
|
||||
String getName();
|
||||
|
||||
void init();
|
||||
|
||||
boolean isAlive();
|
||||
|
||||
void close();
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.ethereum.datasource;
|
|||
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
|
@ -9,25 +10,27 @@ import static org.slf4j.LoggerFactory.getLogger;
|
|||
|
||||
public class DataSourcePool {
|
||||
|
||||
private static Logger logger = getLogger("db");
|
||||
private static final Logger logger = getLogger("db");
|
||||
private static ConcurrentMap<String, DataSource> pool = new ConcurrentHashMap<>();
|
||||
|
||||
public static KeyValueDataSource levelDbByName(String name) {
|
||||
return (KeyValueDataSource) getDataSourceFromPool(name, new LevelDbDataSource(name));
|
||||
return (KeyValueDataSource) getDataSourceFromPool(name, new LevelDbDataSource());
|
||||
}
|
||||
|
||||
private static DataSource getDataSourceFromPool(String name, DataSource dataSource) {
|
||||
private static DataSource getDataSourceFromPool(String name, @Nonnull DataSource dataSource) {
|
||||
dataSource.setName(name);
|
||||
DataSource result = pool.putIfAbsent(name, dataSource);
|
||||
if (result == null) {
|
||||
synchronized (dataSource) {
|
||||
dataSource.init();
|
||||
result = dataSource;
|
||||
}
|
||||
logger.info("Data source '{}' created and added to pool.", dataSource.getName());
|
||||
result = dataSource;
|
||||
logger.debug("Data source '{}' created and added to pool.", name);
|
||||
} else {
|
||||
logger.info("Data source '{}' returned from pool.", dataSource.getName());
|
||||
logger.debug("Data source '{}' returned from pool.", name);
|
||||
}
|
||||
|
||||
synchronized (result) {
|
||||
if (!result.isAlive()) result.init();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -36,7 +39,7 @@ public class DataSourcePool {
|
|||
if (dataSource != null){
|
||||
synchronized (dataSource) {
|
||||
dataSource.close();
|
||||
logger.info("Data source '{}' closed and removed from pool.", dataSource.getName());
|
||||
logger.debug("Data source '%s' closed and removed from pool.\n", dataSource.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,11 @@ public class HashMapDB implements KeyValueDataSource {
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setName(String name) {
|
||||
|
||||
|
|
|
@ -6,6 +6,9 @@ import org.slf4j.LoggerFactory;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -22,8 +25,9 @@ public class LevelDbDataSource implements KeyValueDataSource {
|
|||
|
||||
private static final Logger logger = LoggerFactory.getLogger("db");
|
||||
|
||||
String name;
|
||||
private String name;
|
||||
private DB db;
|
||||
private boolean alive;
|
||||
|
||||
public LevelDbDataSource() {
|
||||
}
|
||||
|
@ -34,31 +38,38 @@ public class LevelDbDataSource implements KeyValueDataSource {
|
|||
|
||||
@Override
|
||||
public void init() {
|
||||
|
||||
if (isAlive()) return;
|
||||
|
||||
if (name == null) throw new NullPointerException("no name set to the db");
|
||||
|
||||
Options options = new Options();
|
||||
options.createIfMissing(true);
|
||||
options.compressionType(CompressionType.NONE);
|
||||
options.blockSize(10 * 1024);
|
||||
options.writeBufferSize(10 * 1024);
|
||||
options.blockSize(10 * 1024 * 1024);
|
||||
options.writeBufferSize(10 * 1024 * 1024);
|
||||
options.cacheSize(0);
|
||||
|
||||
options.paranoidChecks(true);
|
||||
options.verifyChecksums(true);
|
||||
|
||||
try {
|
||||
logger.debug("Opening database");
|
||||
File fileLocation = new File(getProperty("user.dir") + "/" + CONFIG.databaseDir() + "/" + name);
|
||||
File dbLocation = fileLocation.getParentFile();
|
||||
if (!dbLocation.exists()) dbLocation.mkdirs();
|
||||
Path dbPath = Paths.get(getProperty("user.dir"), CONFIG.databaseDir(), name);
|
||||
Files.createDirectories(dbPath.getParent());
|
||||
|
||||
logger.debug("Initializing new or existing database: '{}'", name);
|
||||
db = factory.open(fileLocation, options);
|
||||
db = factory.open(dbPath.toFile(), options);
|
||||
|
||||
alive = true;
|
||||
} catch (IOException ioe) {
|
||||
logger.error(ioe.getMessage(), ioe);
|
||||
throw new RuntimeException("Can't initialize database");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
return alive;
|
||||
}
|
||||
|
||||
public void destroyDB(File fileLocation) {
|
||||
logger.debug("Destroying existing database");
|
||||
|
@ -124,9 +135,13 @@ public class LevelDbDataSource implements KeyValueDataSource {
|
|||
|
||||
@Override
|
||||
public void close() {
|
||||
if (!isAlive()) return;
|
||||
|
||||
try {
|
||||
logger.info("Close db: {}", name);
|
||||
logger.debug("Close db: {}", name);
|
||||
db.close();
|
||||
|
||||
alive = false;
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to find the db file on the close: {} ", name);
|
||||
}
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
package org.ethereum.datasource;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 07.07.2015
|
||||
*/
|
||||
public interface QueueDataSource extends DataSource {
|
||||
|
||||
void offerFirst(byte[] e);
|
||||
|
||||
byte[] peekFirst();
|
||||
|
||||
byte[] pollFirst();
|
||||
|
||||
void offerLast(byte[] e);
|
||||
|
||||
byte[] peekLast();
|
||||
|
||||
byte[] pollLast();
|
||||
|
||||
boolean isEmpty();
|
||||
}
|
|
@ -19,6 +19,7 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
private DB db;
|
||||
private Map<byte[], byte[]> map;
|
||||
private String name;
|
||||
private boolean alive;
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
|
@ -35,6 +36,13 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
.keySerializer(Serializer.BYTE_ARRAY)
|
||||
.valueSerializer(Serializer.BYTE_ARRAY)
|
||||
.makeOrGet();
|
||||
|
||||
alive = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
return alive;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -97,5 +105,6 @@ public class MapDBDataSource implements KeyValueDataSource {
|
|||
@Override
|
||||
public void close() {
|
||||
db.close();
|
||||
alive = false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,20 @@
|
|||
package org.ethereum.datasource.mapdb;
|
||||
|
||||
import org.ethereum.core.Block;
|
||||
import org.ethereum.datasource.KeyValueDataSource;
|
||||
import org.mapdb.DB;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public interface MapDBFactory {
|
||||
|
||||
KeyValueDataSource createDataSource();
|
||||
|
||||
Map<Long, byte[]> createHashStoreMap();
|
||||
|
||||
Map<Long, Block> createBlockQueueMap();
|
||||
|
||||
void destroy(Object resource);
|
||||
|
||||
DB createDB(String name);
|
||||
}
|
||||
|
|
|
@ -1,11 +1,73 @@
|
|||
package org.ethereum.datasource.mapdb;
|
||||
|
||||
import org.ethereum.config.SystemProperties;
|
||||
import org.ethereum.core.Block;
|
||||
import org.ethereum.datasource.KeyValueDataSource;
|
||||
import org.mapdb.DB;
|
||||
import org.mapdb.DBMaker;
|
||||
import org.mapdb.Serializer;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.lang.System.getProperty;
|
||||
|
||||
public class MapDBFactoryImpl implements MapDBFactory {
|
||||
|
||||
private final static String HASH_STORE_NAME = "hash_store";
|
||||
private final static String BLOCK_QUEUE_NAME = "block_queue";
|
||||
|
||||
private Map<Integer, DB> allocated = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public KeyValueDataSource createDataSource() {
|
||||
return new MapDBDataSource();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Long, byte[]> createHashStoreMap() {
|
||||
DB db = createDB(HASH_STORE_NAME);
|
||||
Map<Long, byte[]> map = db.hashMapCreate(HASH_STORE_NAME)
|
||||
.keySerializer(Serializer.LONG)
|
||||
.valueSerializer(Serializer.BYTE_ARRAY)
|
||||
.makeOrGet();
|
||||
allocate(map, db);
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Long, Block> createBlockQueueMap() {
|
||||
DB db = createDB(BLOCK_QUEUE_NAME);
|
||||
Map<Long, Block> map = db.hashMapCreate(BLOCK_QUEUE_NAME)
|
||||
.keySerializer(Serializer.LONG)
|
||||
.valueSerializer(Serializers.BLOCK)
|
||||
.makeOrGet();
|
||||
allocate(map, db);
|
||||
return map;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy(Object resource) {
|
||||
int hashCode = System.identityHashCode(resource);
|
||||
DB db = allocated.get(hashCode);
|
||||
if(db != null) {
|
||||
db.close();
|
||||
allocated.remove(hashCode);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DB createDB(String name) {
|
||||
File dbFile = new File(getProperty("user.dir") + "/" + SystemProperties.CONFIG.databaseDir() + "/" + name);
|
||||
if (!dbFile.getParentFile().exists()) dbFile.getParentFile().mkdirs();
|
||||
return DBMaker.fileDB(dbFile)
|
||||
.transactionDisable()
|
||||
.closeOnJvmShutdown()
|
||||
.make();
|
||||
}
|
||||
|
||||
private void allocate(Object resource, DB db) {
|
||||
allocated.put(System.identityHashCode(resource), db);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
package org.ethereum.datasource.mapdb;
|
||||
|
||||
import org.ethereum.config.SystemProperties;
|
||||
import org.ethereum.datasource.QueueDataSource;
|
||||
import org.mapdb.BTreeMap;
|
||||
import org.mapdb.DB;
|
||||
import org.mapdb.DBMaker;
|
||||
import org.mapdb.Serializer;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
import static java.lang.System.getProperty;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 07.07.2015
|
||||
*/
|
||||
public class MapDBQueueDataSource implements QueueDataSource {
|
||||
|
||||
private DB db;
|
||||
private BTreeMap<Long, byte[]> map;
|
||||
private String name;
|
||||
|
||||
@Override
|
||||
public void init() {
|
||||
File dbFile = new File(getProperty("user.dir") + "/" + SystemProperties.CONFIG.databaseDir() + "/" + name);
|
||||
if (!dbFile.getParentFile().exists()) dbFile.getParentFile().mkdirs();
|
||||
|
||||
db = DBMaker.fileDB(dbFile)
|
||||
.transactionDisable()
|
||||
.closeOnJvmShutdown()
|
||||
.make();
|
||||
|
||||
map = db.treeMapCreate(name)
|
||||
.keySerializer(Serializer.LONG)
|
||||
.valueSerializer(Serializer.BYTE_ARRAY)
|
||||
.makeOrGet();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
db.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void offerFirst(byte[] e) {
|
||||
if(map.isEmpty()) {
|
||||
offerEmpty(e);
|
||||
} else {
|
||||
map.put(map.firstKey() - 1, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized byte[] peekFirst() {
|
||||
if(map.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
return map.firstEntry().getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized byte[] pollFirst() {
|
||||
if(map.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
return map.pollFirstEntry().getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void offerLast(byte[] e) {
|
||||
if(map.isEmpty()) {
|
||||
offerEmpty(e);
|
||||
} else {
|
||||
map.put(map.lastKey() + 1, e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized byte[] peekLast() {
|
||||
if(map.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
return map.lastEntry().getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized byte[] pollLast() {
|
||||
if(map.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
return map.pollLastEntry().getValue();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return map.isEmpty();
|
||||
}
|
||||
|
||||
private void offerEmpty(byte[] e) {
|
||||
map.put(0L, e);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
package org.ethereum.datasource.mapdb;
|
||||
|
||||
import org.ethereum.core.Block;
|
||||
import org.mapdb.Serializer;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 09.07.2015
|
||||
*/
|
||||
public class Serializers {
|
||||
|
||||
public static final Serializer<Block> BLOCK = new Serializer<Block>() {
|
||||
|
||||
@Override
|
||||
public void serialize(DataOutput out, Block block) throws IOException {
|
||||
byte[] bytes = getBytes(block);
|
||||
BYTE_ARRAY.serialize(out, bytes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Block deserialize(DataInput in, int available) throws IOException {
|
||||
byte[] bytes = BYTE_ARRAY.deserialize(in, available);
|
||||
return bytes.length > 0 ? new Block(bytes) : null;
|
||||
}
|
||||
};
|
||||
|
||||
private static byte[] getBytes(Block block) {
|
||||
return block == null ? new byte[0] : block.getEncoded();
|
||||
}
|
||||
}
|
|
@ -48,6 +48,11 @@ public class RedisDataSource extends RedisMap<byte[], byte[]> implements KeyValu
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAlive() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setName(String name) {
|
||||
super.setName(name);
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.core.Block;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 09.07.2015
|
||||
*/
|
||||
public interface BlockQueue extends DiskStore {
|
||||
|
||||
void addAll(Collection<Block> blockList);
|
||||
|
||||
void add(Block block);
|
||||
|
||||
Block poll();
|
||||
|
||||
Block peek();
|
||||
|
||||
int size();
|
||||
|
||||
boolean isEmpty();
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.core.Block;
|
||||
import org.ethereum.datasource.mapdb.MapDBFactory;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 09.07.2015
|
||||
*/
|
||||
public class BlockQueueImpl implements BlockQueue {
|
||||
|
||||
private MapDBFactory mapDBFactory;
|
||||
|
||||
private Map<Long, Block> blocks;
|
||||
private List<Long> index;
|
||||
|
||||
@Override
|
||||
public void open() {
|
||||
blocks = mapDBFactory.createBlockQueueMap();
|
||||
index = new ArrayList<>(blocks.keySet());
|
||||
sortIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
mapDBFactory.destroy(blocks);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void addAll(Collection<Block> blockList) {
|
||||
List<Long> numbers = new ArrayList<>(blockList.size());
|
||||
for(Block b : blockList) {
|
||||
blocks.put(b.getNumber(), b);
|
||||
numbers.add(b.getNumber());
|
||||
}
|
||||
index.addAll(numbers);
|
||||
sortIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void add(Block block) {
|
||||
blocks.put(block.getNumber(), block);
|
||||
index.add(block.getNumber());
|
||||
sortIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Block poll() {
|
||||
if(!index.isEmpty()) {
|
||||
Long idx = index.get(0);
|
||||
Block block = blocks.get(idx);
|
||||
blocks.remove(idx);
|
||||
index.remove(0);
|
||||
return block;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Block peek() {
|
||||
if(!index.isEmpty()) {
|
||||
Long idx = index.get(0);
|
||||
return blocks.get(idx);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return index.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return index.isEmpty();
|
||||
}
|
||||
|
||||
private void sortIndex() {
|
||||
Collections.sort(index);
|
||||
}
|
||||
|
||||
public void setMapDBFactory(MapDBFactory mapDBFactory) {
|
||||
this.mapDBFactory = mapDBFactory;
|
||||
}
|
||||
}
|
|
@ -15,35 +15,23 @@ import java.util.List;
|
|||
*/
|
||||
public interface BlockStore {
|
||||
|
||||
public byte[] getBlockHashByNumber(long blockNumber);
|
||||
byte[] getBlockHashByNumber(long blockNumber);
|
||||
|
||||
Block getBlockByNumber(long blockNumber);
|
||||
|
||||
Block getBlockByHash(byte[] hash);
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty);
|
||||
|
||||
void deleteBlocksSince(long number);
|
||||
List<byte[]> getListHashesEndWith(byte[] hash, long qty);
|
||||
|
||||
void saveBlock(Block block, List<TransactionReceipt> receipts);
|
||||
|
||||
BigInteger getTotalDifficultySince(long number);
|
||||
|
||||
BigInteger getTotalDifficulty();
|
||||
|
||||
Block getBestBlock();
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Block> getAllBlocks();
|
||||
|
||||
void reset();
|
||||
|
||||
TransactionReceipt getTransactionReceiptByHash(byte[] hash);
|
||||
|
||||
public void flush();
|
||||
public void load();
|
||||
public void setSessionFactory(SessionFactory sessionFactory);
|
||||
void flush();
|
||||
void load();
|
||||
void setSessionFactory(SessionFactory sessionFactory);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -33,23 +33,12 @@ public class BlockStoreDummy implements BlockStore {
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
|
||||
public List<byte[]> getListHashesEndWith(byte[] hash, long qty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteBlocksSince(long number) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveBlock(Block block, List<TransactionReceipt> receipts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficultySince(long number) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -62,20 +51,6 @@ public class BlockStoreDummy implements BlockStore {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Block> getAllBlocks() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
|
@ -88,5 +63,5 @@ public class BlockStoreDummy implements BlockStore {
|
|||
@Override
|
||||
public void setSessionFactory(SessionFactory sessionFactory) {
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ public class BlockStoreImpl implements BlockStore {
|
|||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
|
||||
public List<byte[]> getListHashesEndWith(byte[] hash, long qty){
|
||||
|
||||
List<byte[]> hashes = new ArrayList<>();
|
||||
|
||||
|
@ -69,7 +69,7 @@ public class BlockStoreImpl implements BlockStore {
|
|||
createQuery("select hash from BlockVO where number <= :number and number >= :limit order by number desc").
|
||||
setParameter("number", block.getNumber()).
|
||||
setParameter("limit", block.getNumber() - qty).
|
||||
setMaxResults(qty).list();
|
||||
setMaxResults((int)qty).list();
|
||||
|
||||
for (byte[] h : result)
|
||||
hashes.add(h);
|
||||
|
@ -77,15 +77,6 @@ public class BlockStoreImpl implements BlockStore {
|
|||
return hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteBlocksSince(long number) {
|
||||
|
||||
sessionFactory.getCurrentSession().
|
||||
createQuery("delete from BlockVO where number > :number").
|
||||
setParameter("number", number).
|
||||
executeUpdate();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void saveBlock(Block block, List<TransactionReceipt> receipts) {
|
||||
|
@ -105,15 +96,6 @@ public class BlockStoreImpl implements BlockStore {
|
|||
sessionFactory.getCurrentSession().persist(blockVO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficultySince(long number) {
|
||||
|
||||
return (BigInteger) sessionFactory.getCurrentSession().
|
||||
createQuery("select sum(cumulativeDifficulty) from BlockVO where number > :number").
|
||||
setParameter("number", number).
|
||||
uniqueResult();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficulty() {
|
||||
|
@ -137,41 +119,6 @@ public class BlockStoreImpl implements BlockStore {
|
|||
return new Block(vo.rlp);
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Block> getAllBlocks() {
|
||||
|
||||
List<BlockVO> result = sessionFactory.getCurrentSession().
|
||||
createQuery("from BlockVO").list();
|
||||
|
||||
ArrayList<Block> blocks = new ArrayList<>();
|
||||
for (BlockVO blockVO : result) {
|
||||
blocks.add(new Block(blockVO.getRlp()));
|
||||
}
|
||||
|
||||
return blocks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
sessionFactory.getCurrentSession().
|
||||
createQuery("delete from BlockVO").executeUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
|
||||
|
||||
List result = sessionFactory.getCurrentSession().
|
||||
createQuery("from TransactionReceiptVO where hash = :hash").
|
||||
setParameter("hash", hash).list();
|
||||
|
||||
if (result.size() == 0) return null;
|
||||
TransactionReceiptVO vo = (TransactionReceiptVO) result.get(0);
|
||||
|
||||
return new TransactionReceipt(vo.rlp);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
}
|
||||
|
@ -183,5 +130,5 @@ public class BlockStoreImpl implements BlockStore {
|
|||
@Override
|
||||
public void setSessionFactory(SessionFactory sessionFactory) {
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -260,6 +260,7 @@ public class ContractDetailsImpl implements ContractDetails {
|
|||
result += address.length;
|
||||
result += code.length;
|
||||
result += storageTrie.getCache().getAllocatedMemorySize();
|
||||
result += keysSize;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -10,6 +10,7 @@ import java.util.HashMap;
|
|||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import static java.lang.String.format;
|
||||
import static org.ethereum.util.ByteUtil.wrap;
|
||||
|
@ -19,7 +20,7 @@ public class DetailsDataStore {
|
|||
private static final Logger gLogger = LoggerFactory.getLogger("general");
|
||||
|
||||
private DatabaseImpl db = null;
|
||||
private HashMap<ByteArrayWrapper, ContractDetails> cache = new HashMap<>();
|
||||
private Map<ByteArrayWrapper, ContractDetails> cache = new ConcurrentHashMap<>();
|
||||
private Set<ByteArrayWrapper> removes = new HashSet<>();
|
||||
|
||||
public void setDB(DatabaseImpl db) {
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 09.07.2015
|
||||
*/
|
||||
public interface DiskStore {
|
||||
|
||||
void open();
|
||||
|
||||
void close();
|
||||
}
|
|
@ -1,67 +1,22 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.datasource.QueueDataSource;
|
||||
import org.ethereum.datasource.mapdb.MapDBQueueDataSource;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 07.07.2015
|
||||
* @since 09.07.2015
|
||||
*/
|
||||
public class HashStore {
|
||||
public interface HashStore extends DiskStore {
|
||||
|
||||
private static final String DEFAULT_NAME = "hashstore";
|
||||
void add(byte[] hash);
|
||||
|
||||
private QueueDataSource hashes;
|
||||
void addFirst(byte[] hash);
|
||||
|
||||
private HashStore() {
|
||||
}
|
||||
byte[] peek();
|
||||
|
||||
public void add(byte[] hash) {
|
||||
hashes.offerLast(hash);
|
||||
}
|
||||
byte[] poll();
|
||||
|
||||
public byte[] peek() {
|
||||
return hashes.peekFirst();
|
||||
}
|
||||
boolean isEmpty();
|
||||
|
||||
public byte[] poll() {
|
||||
return hashes.pollFirst();
|
||||
}
|
||||
|
||||
public void addFirst(byte[] hash) {
|
||||
hashes.offerFirst(hash);
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return hashes.isEmpty();
|
||||
}
|
||||
|
||||
public void close() {
|
||||
hashes.close();
|
||||
}
|
||||
|
||||
static class Builder {
|
||||
private String name = DEFAULT_NAME;
|
||||
private Class<? extends QueueDataSource> dataSourceClass = MapDBQueueDataSource.class;
|
||||
|
||||
public Builder withDataSource(Class<? extends QueueDataSource> dataSourceClass) {
|
||||
this.dataSourceClass = dataSourceClass;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder withName(String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HashStore build() throws IllegalAccessException, InstantiationException {
|
||||
HashStore store = new HashStore();
|
||||
store.hashes = dataSourceClass.newInstance();
|
||||
store.hashes.setName(name);
|
||||
store.hashes.init();
|
||||
return store;
|
||||
}
|
||||
}
|
||||
Set<Long> getKeys();
|
||||
}
|
||||
|
|
|
@ -0,0 +1,100 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.datasource.mapdb.MapDBFactory;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 07.07.2015
|
||||
*/
|
||||
public class HashStoreImpl implements HashStore {
|
||||
|
||||
private MapDBFactory mapDBFactory;
|
||||
|
||||
private Map<Long, byte[]> hashes;
|
||||
private List<Long> index;
|
||||
|
||||
@Override
|
||||
public void open() {
|
||||
hashes = mapDBFactory.createHashStoreMap();
|
||||
index = new ArrayList<>(hashes.keySet());
|
||||
sortIndex();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
mapDBFactory.destroy(hashes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void add(byte[] hash) {
|
||||
addInner(false, hash);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void addFirst(byte[] hash) {
|
||||
addInner(true, hash);
|
||||
}
|
||||
|
||||
private void addInner(boolean first, byte[] hash) {
|
||||
Long idx = createIndex(first);
|
||||
hashes.put(idx, hash);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized byte[] peek() {
|
||||
if(!index.isEmpty()) {
|
||||
Long idx = index.get(0);
|
||||
return hashes.get(idx);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized byte[] poll() {
|
||||
if(!index.isEmpty()) {
|
||||
Long idx = index.get(0);
|
||||
byte[] hash = hashes.get(idx);
|
||||
hashes.remove(idx);
|
||||
index.remove(0);
|
||||
return hash;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return index.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Long> getKeys() {
|
||||
return hashes.keySet();
|
||||
}
|
||||
|
||||
private Long createIndex(boolean first) {
|
||||
Long idx;
|
||||
if(index.isEmpty()) {
|
||||
idx = 0L;
|
||||
index.add(idx);
|
||||
} else if(first) {
|
||||
idx = index.get(0) - 1;
|
||||
index.add(0, idx);
|
||||
} else {
|
||||
idx = index.get(index.size() - 1) + 1;
|
||||
index.add(idx);
|
||||
}
|
||||
return idx;
|
||||
}
|
||||
|
||||
private void sortIndex() {
|
||||
Collections.sort(index);
|
||||
}
|
||||
|
||||
public void setMapDBFactory(MapDBFactory mapDBFactory) {
|
||||
this.mapDBFactory = mapDBFactory;
|
||||
}
|
||||
}
|
|
@ -70,7 +70,8 @@ public class InMemoryBlockStore implements BlockStore{
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
|
||||
public List<byte[]> getListHashesEndWith(byte[] hash, long qty){
|
||||
|
||||
|
||||
Block startBlock = hashIndex.get(wrap(hash));
|
||||
|
||||
|
@ -87,12 +88,6 @@ public class InMemoryBlockStore implements BlockStore{
|
|||
return hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteBlocksSince(long number) {
|
||||
|
||||
// todo: delete blocks sinse
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveBlock(Block block, List<TransactionReceipt> receipts) {
|
||||
ByteArrayWrapper wHash = wrap(block.getHash());
|
||||
|
@ -102,13 +97,6 @@ public class InMemoryBlockStore implements BlockStore{
|
|||
totalDifficulty = totalDifficulty.add(block.getCumulativeDifficulty());
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficultySince(long number) {
|
||||
|
||||
// todo calculate from db + from cache
|
||||
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficulty() {
|
||||
|
@ -121,23 +109,6 @@ public class InMemoryBlockStore implements BlockStore{
|
|||
return blocks.get(blocks.size() - 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Block> getAllBlocks() {
|
||||
return blocks;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reset() {
|
||||
blocks.clear();
|
||||
hashIndex.clear();
|
||||
numberIndex.clear();
|
||||
}
|
||||
|
||||
@Override
|
||||
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// FIXME: wrap from here in to db class
|
||||
|
||||
public byte[] dbGetBlockHashByNumber(long blockNumber) {
|
||||
|
|
|
@ -11,7 +11,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class IndexedBlockStore {
|
||||
public class IndexedBlockStore{
|
||||
|
||||
IndexedBlockStore cache;
|
||||
Map<Long, List<BlockInfo>> index;
|
||||
|
@ -26,6 +26,15 @@ public class IndexedBlockStore {
|
|||
this.blocks = blocks;
|
||||
}
|
||||
|
||||
public Block getBestBlock(){
|
||||
return getChainBlockByNumber(getMaxNumber());
|
||||
}
|
||||
|
||||
public byte[] getBlockHashByNumber(long blockNumber){
|
||||
return getChainBlockByNumber(blockNumber).getHash(); // FIXME: can be improved by accessing the hash directly in the index
|
||||
}
|
||||
|
||||
|
||||
public void flush(){
|
||||
|
||||
for (byte[] hash : cache.blocks.keys()){
|
||||
|
@ -39,11 +48,11 @@ public class IndexedBlockStore {
|
|||
}
|
||||
|
||||
|
||||
public void addBlock(Block block, BigInteger cummDifficulty, boolean mainChain){
|
||||
public void saveBlock(Block block, BigInteger cummDifficulty, boolean mainChain){
|
||||
if (cache == null)
|
||||
addInternalBlock(block, cummDifficulty, mainChain);
|
||||
else
|
||||
cache.addBlock(block, cummDifficulty, mainChain);
|
||||
cache.saveBlock(block, cummDifficulty, mainChain);
|
||||
}
|
||||
|
||||
private void addInternalBlock(Block block, BigInteger cummDifficulty, boolean mainChain){
|
||||
|
@ -164,11 +173,11 @@ public class IndexedBlockStore {
|
|||
return bestIndex - 1L;
|
||||
}
|
||||
|
||||
public List<byte[]> getNHashesEndWith(byte[] hash, long number){
|
||||
public List<byte[]> getListHashesEndWith(byte[] hash, long number){
|
||||
|
||||
List<byte[]> cachedHashes = new ArrayList<>();
|
||||
if (cache != null)
|
||||
cachedHashes = cache.getNHashesEndWith(hash, number);
|
||||
cachedHashes = cache.getListHashesEndWith(hash, number);
|
||||
|
||||
byte[] rlp = blocks.get(hash);
|
||||
if (rlp == null) return cachedHashes;
|
||||
|
@ -184,8 +193,8 @@ public class IndexedBlockStore {
|
|||
return cachedHashes;
|
||||
}
|
||||
|
||||
public List<byte[]> getNHashesStartWith(long number, long maxBlocks){
|
||||
|
||||
public List<byte[]> getListHashesStartWith(long number, long maxBlocks){
|
||||
|
||||
List<byte[]> result = new ArrayList<>();
|
||||
|
||||
|
@ -205,7 +214,7 @@ public class IndexedBlockStore {
|
|||
maxBlocks -= i;
|
||||
|
||||
if (cache != null)
|
||||
result.addAll( cache.getNHashesStartWith(number, maxBlocks) );
|
||||
result.addAll( cache.getListHashesStartWith(number, maxBlocks) );
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -11,12 +11,14 @@ import org.ethereum.json.JSONHelper;
|
|||
import org.ethereum.trie.SecureTrie;
|
||||
import org.ethereum.trie.Trie;
|
||||
import org.ethereum.trie.TrieImpl;
|
||||
import org.ethereum.util.Functional;
|
||||
import org.ethereum.vm.DataWord;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
|
@ -26,7 +28,10 @@ import java.util.HashMap;
|
|||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
import static java.lang.Thread.sleep;
|
||||
import static org.ethereum.config.SystemProperties.CONFIG;
|
||||
import static org.ethereum.crypto.SHA3Helper.sha3;
|
||||
import static org.ethereum.util.ByteUtil.wrap;
|
||||
|
@ -50,14 +55,18 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
private DatabaseImpl stateDB = null;
|
||||
|
||||
KeyValueDataSource detailsDS = null;
|
||||
KeyValueDataSource stateDS = null;
|
||||
private KeyValueDataSource detailsDS = null;
|
||||
private KeyValueDataSource stateDS = null;
|
||||
|
||||
private final ReentrantLock lock = new ReentrantLock();
|
||||
private final AtomicInteger accessCounter = new AtomicInteger();
|
||||
|
||||
|
||||
public RepositoryImpl() {
|
||||
this(DETAILS_DB, STATE_DB);
|
||||
}
|
||||
|
||||
public RepositoryImpl(boolean createDb){
|
||||
public RepositoryImpl(boolean createDb) {
|
||||
}
|
||||
|
||||
public RepositoryImpl(KeyValueDataSource detailsDS, KeyValueDataSource stateDS) {
|
||||
|
@ -88,26 +97,36 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
@Override
|
||||
public void reset() {
|
||||
close();
|
||||
doWithLockedAccess(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
close();
|
||||
|
||||
detailsDS.init();
|
||||
detailsDB = new DatabaseImpl(detailsDS);
|
||||
detailsDS.init();
|
||||
detailsDB = new DatabaseImpl(detailsDS);
|
||||
|
||||
stateDS.init();
|
||||
stateDB = new DatabaseImpl(stateDS);
|
||||
worldState = new SecureTrie(stateDB.getDb());
|
||||
stateDS.init();
|
||||
stateDB = new DatabaseImpl(stateDS);
|
||||
worldState = new SecureTrie(stateDB.getDb());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (this.detailsDB != null) {
|
||||
detailsDB.close();
|
||||
detailsDB = null;
|
||||
}
|
||||
if (this.stateDB != null) {
|
||||
stateDB.close();
|
||||
stateDB = null;
|
||||
}
|
||||
doWithLockedAccess(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
if (detailsDB != null) {
|
||||
detailsDB.close();
|
||||
detailsDB = null;
|
||||
}
|
||||
if (stateDB != null) {
|
||||
stateDB.close();
|
||||
stateDB = null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -127,8 +146,7 @@ public class RepositoryImpl implements Repository {
|
|||
ContractDetails contractDetails = detailsCache.get(hash);
|
||||
|
||||
if (accountState.isDeleted()) {
|
||||
worldState.delete(hash.getData());
|
||||
dds.remove(hash.getData());
|
||||
delete(hash.getData());
|
||||
logger.debug("delete: [{}]",
|
||||
Hex.toHexString(hash.getData()));
|
||||
|
||||
|
@ -136,8 +154,8 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
if (!contractDetails.isDirty()) continue;
|
||||
|
||||
ContractDetailsCacheImpl contractDetailsCache = (ContractDetailsCacheImpl)contractDetails;
|
||||
if (contractDetailsCache.origContract == null){
|
||||
ContractDetailsCacheImpl contractDetailsCache = (ContractDetailsCacheImpl) contractDetails;
|
||||
if (contractDetailsCache.origContract == null) {
|
||||
contractDetailsCache.origContract = new ContractDetailsImpl();
|
||||
contractDetailsCache.origContract.setAddress(hash.getData());
|
||||
contractDetailsCache.commit();
|
||||
|
@ -145,21 +163,20 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
contractDetails = contractDetailsCache.origContract;
|
||||
|
||||
dds.update(hash.getData(), contractDetails);
|
||||
byte[] data = hash.getData();
|
||||
updateContractDetails(data, contractDetails);
|
||||
|
||||
accountState.setStateRoot(contractDetails.getStorageHash());
|
||||
accountState.setCodeHash(sha3(contractDetails.getCode()));
|
||||
worldState.update(hash.getData(), accountState.getEncoded());
|
||||
updateAccountState(hash.getData(), accountState);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("update: [{}],nonce: [{}] balance: [{}] \n [{}]",
|
||||
Hex.toHexString(hash.getData()),
|
||||
accountState.getNonce(),
|
||||
accountState.getBalance(),
|
||||
contractDetails.getStorage());
|
||||
}
|
||||
|
||||
|
||||
logger.debug("update: [{}],nonce: [{}] balance: [{}] \n [{}]",
|
||||
Hex.toHexString(hash.getData()),
|
||||
accountState.getNonce(),
|
||||
accountState.getBalance(),
|
||||
contractDetails.getStorage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,30 +187,53 @@ public class RepositoryImpl implements Repository {
|
|||
detailsCache.clear();
|
||||
}
|
||||
|
||||
private void updateContractDetails(final byte[] address, final ContractDetails contractDetails) {
|
||||
doWithAccessCounting(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
dds.update(address, contractDetails);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flushNoReconnect(){
|
||||
public void flushNoReconnect() {
|
||||
doWithLockedAccess(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
gLogger.info("flushing to disk");
|
||||
|
||||
gLogger.info("flushing to disk");
|
||||
|
||||
dds.flush();
|
||||
worldState.sync();
|
||||
dds.flush();
|
||||
worldState.sync();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
gLogger.info("flushing to disk");
|
||||
doWithLockedAccess(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
gLogger.info("flushing to disk");
|
||||
|
||||
dds.flush();
|
||||
worldState.sync();
|
||||
dds.flush();
|
||||
worldState.sync();
|
||||
|
||||
byte[] root = worldState.getRootHash();
|
||||
reset();
|
||||
worldState.setRoot(root);
|
||||
byte[] root = worldState.getRootHash();
|
||||
reset();
|
||||
worldState.setRoot(root);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public int getAllocatedMemorySize() {
|
||||
return dds.getAllocatedMemorySize() + ((TrieImpl) worldState).getCache().getAllocatedMemorySize();
|
||||
return doWithAccessCounting(new Functional.InvokeWrapperWithResult<Integer>() {
|
||||
@Override
|
||||
public Integer invoke() {
|
||||
return dds.getAllocatedMemorySize() + ((TrieImpl) worldState).getCache().getAllocatedMemorySize();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -207,8 +247,13 @@ public class RepositoryImpl implements Repository {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void syncToRoot(byte[] root) {
|
||||
worldState.setRoot(root);
|
||||
public void syncToRoot(final byte[] root) {
|
||||
doWithAccessCounting(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
worldState.setRoot(root);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -279,8 +324,13 @@ public class RepositoryImpl implements Repository {
|
|||
}
|
||||
}
|
||||
|
||||
public String getTrieDump(){
|
||||
return worldState.getTrieDump();
|
||||
public String getTrieDump() {
|
||||
return doWithAccessCounting(new Functional.InvokeWrapperWithResult<String>() {
|
||||
@Override
|
||||
public String invoke() {
|
||||
return worldState.getTrieDump();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void dumpTrie(Block block) {
|
||||
|
@ -294,7 +344,7 @@ public class RepositoryImpl implements Repository {
|
|||
FileWriter fw = null;
|
||||
BufferedWriter bw = null;
|
||||
|
||||
String dump = this.worldState.getTrieDump();
|
||||
String dump = getTrieDump();
|
||||
|
||||
try {
|
||||
|
||||
|
@ -321,56 +371,45 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
@Override
|
||||
public Set<byte[]> getAccountsKeys() {
|
||||
return doWithAccessCounting(new Functional.InvokeWrapperWithResult<Set<byte[]>>() {
|
||||
@Override
|
||||
public Set<byte[]> invoke() {
|
||||
Set<byte[]> result = new HashSet<>();
|
||||
for (ByteArrayWrapper key : dds.keys()) {
|
||||
result.add(key.getData());
|
||||
}
|
||||
|
||||
Set<byte[]> result = new HashSet<>();
|
||||
for (ByteArrayWrapper key : dds.keys() ){
|
||||
result.add(key.getData());
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger addBalance(byte[] addr, BigInteger value) {
|
||||
|
||||
AccountState account = getAccountState(addr);
|
||||
|
||||
if (account == null)
|
||||
account = createAccount(addr);
|
||||
AccountState account = getAccountStateOrCreateNew(addr);
|
||||
|
||||
BigInteger result = account.addToBalance(value);
|
||||
worldState.update(addr, account.getEncoded());
|
||||
updateAccountState(addr, account);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getBalance(byte[] addr) {
|
||||
|
||||
AccountState account = getAccountState(addr);
|
||||
|
||||
if (account == null)
|
||||
return BigInteger.ZERO;
|
||||
|
||||
return account.getBalance();
|
||||
return (account == null) ? BigInteger.ZERO : account.getBalance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataWord getStorageValue(byte[] addr, DataWord key) {
|
||||
|
||||
ContractDetails details = getContractDetails(addr);
|
||||
|
||||
if (details == null)
|
||||
return null;
|
||||
|
||||
return details.get(key);
|
||||
return (details == null) ? null : details.get(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addStorageRow(byte[] addr, DataWord key, DataWord value) {
|
||||
|
||||
ContractDetails details = getContractDetails(addr);
|
||||
|
||||
if (details == null) {
|
||||
createAccount(addr);
|
||||
details = getContractDetails(addr);
|
||||
|
@ -378,18 +417,13 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
details.put(key, value);
|
||||
|
||||
dds.update(addr, details);
|
||||
updateContractDetails(addr, details);
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getCode(byte[] addr) {
|
||||
|
||||
ContractDetails details = getContractDetails(addr);
|
||||
|
||||
if (details == null)
|
||||
return null;
|
||||
|
||||
return details.getCode();
|
||||
return (details == null) ? null : details.getCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -403,79 +437,92 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
details.setCode(code);
|
||||
|
||||
dds.update(addr, details);
|
||||
updateContractDetails(addr, details);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public BigInteger getNonce(byte[] addr) {
|
||||
return getAccountStateOrCreateNew(addr).getNonce();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private AccountState getAccountStateOrCreateNew(byte[] addr) {
|
||||
AccountState account = getAccountState(addr);
|
||||
|
||||
if (account == null)
|
||||
account = createAccount(addr);
|
||||
|
||||
return account.getNonce();
|
||||
return (account == null) ? createAccount(addr) : account;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger increaseNonce(byte[] addr) {
|
||||
|
||||
AccountState account = getAccountState(addr);
|
||||
|
||||
if (account == null)
|
||||
account = createAccount(addr);
|
||||
AccountState account = getAccountStateOrCreateNew(addr);
|
||||
|
||||
account.incrementNonce();
|
||||
worldState.update(addr, account.getEncoded());
|
||||
updateAccountState(addr, account);
|
||||
|
||||
return account.getNonce();
|
||||
}
|
||||
|
||||
public BigInteger setNonce(byte[] addr, BigInteger nonce) {
|
||||
private void updateAccountState(final byte[] addr, final AccountState accountState) {
|
||||
doWithAccessCounting(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
worldState.update(addr, accountState.getEncoded());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
AccountState account = getAccountState(addr);
|
||||
|
||||
if (account == null)
|
||||
account = createAccount(addr);
|
||||
public BigInteger setNonce(final byte[] addr, final BigInteger nonce) {
|
||||
AccountState account = getAccountStateOrCreateNew(addr);
|
||||
|
||||
account.setNonce(nonce);
|
||||
worldState.update(addr, account.getEncoded());
|
||||
updateAccountState(addr, account);
|
||||
|
||||
return account.getNonce();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void delete(byte[] addr) {
|
||||
worldState.delete(addr);
|
||||
dds.remove(addr);
|
||||
public void delete(final byte[] addr) {
|
||||
doWithAccessCounting(new Functional.InvokeWrapper() {
|
||||
@Override
|
||||
public void invoke() {
|
||||
worldState.delete(addr);
|
||||
dds.remove(addr);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContractDetails getContractDetails(byte[] addr) {
|
||||
return dds.get(addr);
|
||||
public ContractDetails getContractDetails(final byte[] addr) {
|
||||
return doWithAccessCounting(new Functional.InvokeWrapperWithResult<ContractDetails>() {
|
||||
@Override
|
||||
public ContractDetails invoke() {
|
||||
return dds.get(addr);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public AccountState getAccountState(byte[] addr) {
|
||||
public AccountState getAccountState(final byte[] addr) {
|
||||
return doWithAccessCounting(new Functional.InvokeWrapperWithResult<AccountState>() {
|
||||
@Override
|
||||
public AccountState invoke() {
|
||||
AccountState result = null;
|
||||
byte[] accountData = worldState.get(addr);
|
||||
|
||||
AccountState result = null;
|
||||
byte[] accountData = worldState.get(addr);
|
||||
if (accountData.length != 0)
|
||||
result = new AccountState(accountData);
|
||||
|
||||
if (accountData.length != 0)
|
||||
result = new AccountState(accountData);
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public AccountState createAccount(byte[] addr) {
|
||||
|
||||
public AccountState createAccount(final byte[] addr) {
|
||||
AccountState accountState = new AccountState();
|
||||
worldState.update(addr, accountState.getEncoded());
|
||||
|
||||
dds.update(addr, new ContractDetailsImpl());
|
||||
updateAccountState(addr, accountState);
|
||||
updateContractDetails(addr, new ContractDetailsImpl());
|
||||
|
||||
return accountState;
|
||||
}
|
||||
|
@ -506,4 +553,53 @@ public class RepositoryImpl implements Repository {
|
|||
public byte[] getRoot() {
|
||||
return worldState.getRootHash();
|
||||
}
|
||||
|
||||
private void doWithLockedAccess(Functional.InvokeWrapper wrapper) {
|
||||
lock.lock();
|
||||
try {
|
||||
while (accessCounter.get() > 0) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("waiting for access ...");
|
||||
}
|
||||
try {
|
||||
sleep(100);
|
||||
} catch (InterruptedException e) {
|
||||
logger.error("Error occurred during access waiting: ", e);
|
||||
}
|
||||
}
|
||||
|
||||
wrapper.invoke();
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
public <R> R doWithAccessCounting(Functional.InvokeWrapperWithResult<R> wrapper) {
|
||||
while (lock.isLocked()) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("waiting for lock releasing ...");
|
||||
}
|
||||
try {
|
||||
sleep(100);
|
||||
} catch (InterruptedException e) {
|
||||
logger.error("Error occurred during locked access waiting: ", e);
|
||||
}
|
||||
}
|
||||
accessCounter.incrementAndGet();
|
||||
try {
|
||||
return wrapper.invoke();
|
||||
} finally {
|
||||
accessCounter.decrementAndGet();
|
||||
}
|
||||
}
|
||||
|
||||
public void doWithAccessCounting(final Functional.InvokeWrapper wrapper) {
|
||||
doWithAccessCounting(new Functional.InvokeWrapperWithResult<Object>() {
|
||||
@Override
|
||||
public Object invoke() {
|
||||
wrapper.invoke();
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,7 +4,6 @@ import org.ethereum.core.*;
|
|||
import org.ethereum.net.BlockQueue;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -30,9 +29,7 @@ public interface Blockchain {
|
|||
|
||||
public boolean hasParentOnTheChain(Block block);
|
||||
|
||||
public void reset();
|
||||
|
||||
public void close();
|
||||
void close();
|
||||
|
||||
public void updateTotalDifficulty(Block block);
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@ public class BlockLoader {
|
|||
|
||||
String fileSrc = CONFIG.blocksLoader();
|
||||
try {
|
||||
|
||||
FileInputStream inputStream = null;
|
||||
inputStream = new FileInputStream(fileSrc);
|
||||
scanner = new Scanner(inputStream, "UTF-8");
|
||||
|
@ -70,8 +69,4 @@ public class BlockLoader {
|
|||
System.out.println(" * Done * ");
|
||||
System.exit(0);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -201,13 +201,6 @@ public class WorldManager {
|
|||
*/
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
logger.info("Resetting blockchain ");
|
||||
repository.reset();
|
||||
blockchain.reset();
|
||||
loadBlockchain();
|
||||
}
|
||||
|
||||
|
||||
@PreDestroy
|
||||
public void close() {
|
||||
|
|
|
@ -111,7 +111,7 @@ public class Cache {
|
|||
|
||||
float flushSize = (float) this.allocatedMemorySize / 1048576;
|
||||
float flushTime = (float) (finish - start) / 1_000_000;
|
||||
logger.info(format("Flush state in: %02.2f ms, %d nodes, %02.2fMB", flushTime, batch.size(), flushSize));
|
||||
logger.info(format("Flush '%s' in: %02.2f ms, %d nodes, %02.2fMB", dataSource.getName(), flushTime, batch.size(), flushSize));
|
||||
|
||||
this.allocatedMemorySize = 0;
|
||||
}
|
||||
|
|
|
@ -59,5 +59,15 @@ public interface Functional {
|
|||
*/
|
||||
R apply(T t);
|
||||
}
|
||||
|
||||
public static interface InvokeWrapper {
|
||||
|
||||
void invoke();
|
||||
}
|
||||
|
||||
public static interface InvokeWrapperWithResult<R> {
|
||||
|
||||
R invoke();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -759,7 +759,7 @@ public class RLP {
|
|||
if (isNullOrZeroArray(srcData))
|
||||
return new byte[]{(byte) OFFSET_SHORT_ITEM};
|
||||
else if (isSingleZero(srcData))
|
||||
return srcData;
|
||||
return srcData;
|
||||
else if (srcData.length == 1 && (srcData[0] & 0xFF) < 0x80) {
|
||||
return srcData;
|
||||
} else if (srcData.length < SIZE_THRESHOLD) {
|
||||
|
|
|
@ -3,6 +3,7 @@ package org.ethereum.vm;
|
|||
import org.ethereum.crypto.ECKey;
|
||||
import org.ethereum.crypto.HashUtil;
|
||||
import org.ethereum.util.ByteUtil;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
/**
|
||||
* @author Roman Mandeleil
|
||||
|
@ -121,7 +122,9 @@ public class PrecompiledContracts {
|
|||
System.arraycopy(data, 0, h, 0, 32);
|
||||
System.arraycopy(data, 32, v, 0, 32);
|
||||
System.arraycopy(data, 64, r, 0, 32);
|
||||
System.arraycopy(data, 96, s, 0, 32);
|
||||
|
||||
int sLength = data.length < 128 ? data.length - 96 : 32;
|
||||
System.arraycopy(data, 96, s, 0, sLength);
|
||||
|
||||
|
||||
ECKey.ECDSASignature signature = ECKey.ECDSASignature.fromComponents(r, s, v[31]);
|
||||
|
|
|
@ -16,6 +16,7 @@ import java.math.BigInteger;
|
|||
import java.util.*;
|
||||
|
||||
import static java.lang.String.format;
|
||||
import static java.math.BigInteger.*;
|
||||
import static org.ethereum.util.BIUtil.*;
|
||||
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
|
||||
|
@ -343,7 +344,7 @@ public class Program {
|
|||
|
||||
// [4] TRANSFER THE BALANCE
|
||||
track.addBalance(senderAddress, endowment.negate());
|
||||
BigInteger newBalance = BigInteger.ZERO;
|
||||
BigInteger newBalance = ZERO;
|
||||
if (!invokeData.byTestingSuite()) {
|
||||
newBalance = track.addBalance(newAddress, endowment);
|
||||
}
|
||||
|
@ -458,7 +459,7 @@ public class Program {
|
|||
|
||||
trackRepository.addBalance(senderAddress, endowment.negate());
|
||||
|
||||
BigInteger contextBalance = BigInteger.ZERO;
|
||||
BigInteger contextBalance = ZERO;
|
||||
if (!invokeData.byTestingSuite()) {
|
||||
contextBalance = trackRepository.addBalance(contextAddress, endowment);
|
||||
}
|
||||
|
@ -896,6 +897,9 @@ public class Program {
|
|||
|
||||
byte[] senderAddress = this.getOwnerAddress().getLast20Bytes();
|
||||
byte[] codeAddress = msg.getCodeAddress().getLast20Bytes();
|
||||
byte[] contextAddress = msg.getType() == MsgType.STATELESS ? senderAddress : codeAddress;
|
||||
|
||||
|
||||
BigInteger endowment = msg.getEndowment().value();
|
||||
BigInteger senderBalance = track.getBalance(senderAddress);
|
||||
if (senderBalance.compareTo(endowment) < 0) {
|
||||
|
@ -907,7 +911,8 @@ public class Program {
|
|||
byte[] data = this.memoryChunk(msg.getInDataOffs().intValue(),
|
||||
msg.getInDataSize().intValue());
|
||||
|
||||
transfer(track, senderAddress, codeAddress, msg.getEndowment().value());
|
||||
// Charge for endowment - is not reversible by rollback
|
||||
transfer(track, senderAddress, contextAddress, msg.getEndowment().value());
|
||||
|
||||
if (invokeData.byTestingSuite()) {
|
||||
// This keeps track of the calls created for a test
|
||||
|
|
|
@ -58,7 +58,7 @@ public class VM {
|
|||
private static final Logger logger = LoggerFactory.getLogger("VM");
|
||||
private static final Logger dumpLogger = LoggerFactory.getLogger("dump");
|
||||
private static BigInteger _32_ = BigInteger.valueOf(32);
|
||||
private static String logString = "[{}]\t Op: [{}] Gas: [{}] Deep: [{}] Hint: [{}]";
|
||||
private static String logString = "{} Op: [{}] Gas: [{}] Deep: [{}] Hint: [{}]";
|
||||
|
||||
private static BigInteger MAX_GAS = BigInteger.valueOf(Long.MAX_VALUE);
|
||||
|
||||
|
@ -1037,7 +1037,7 @@ public class VM {
|
|||
DataWord inSize = program.stackPop();
|
||||
|
||||
if (logger.isInfoEnabled())
|
||||
logger.info(logString, program.getPC(),
|
||||
logger.info(logString, String.format("%5s", "[" + program.getPC() + "]"),
|
||||
String.format("%-12s", op.name()),
|
||||
program.getGas().value(),
|
||||
program.invokeData.getCallDeep(), hint);
|
||||
|
@ -1068,7 +1068,7 @@ public class VM {
|
|||
+ " gas: " + gas.shortHex()
|
||||
+ " inOff: " + inDataOffs.shortHex()
|
||||
+ " inSize: " + inDataSize.shortHex();
|
||||
logger.info(logString, program.getPC(),
|
||||
logger.info(logString, String.format("%5s", "[" + program.getPC() + "]"),
|
||||
String.format("%-12s", op.name()),
|
||||
program.getGas().value(),
|
||||
program.invokeData.getCallDeep(), hint);
|
||||
|
@ -1125,8 +1125,10 @@ public class VM {
|
|||
program.setPreviouslyExecutedOp(op.val());
|
||||
|
||||
if (logger.isInfoEnabled() && !op.equals(CALL)
|
||||
&& !op.equals(CALLCODE)
|
||||
&& !op.equals(CREATE))
|
||||
logger.info(logString, stepBefore, String.format("%-12s",
|
||||
logger.info(logString, String.format("%5s", "[" + program.getPC() + "]"),
|
||||
String.format("%-12s",
|
||||
op.name()), program.getGas().longValue(),
|
||||
program.invokeData.getCallDeep(), hint);
|
||||
|
||||
|
|
|
@ -26,13 +26,13 @@ peer.discovery.ip.list = poc-7.ethdev.com:30303,\
|
|||
#peer.active.port = 30300
|
||||
#peer.active.nodeid = 4e94cab3e9a85a22b59f69a2ad1f10ff1eaff5f8d94a0025df18c936a687b6ac99b3fb655677e8b9d08087319bca69ad2ab0b80a9d0ab47296bdc54c8cb09853
|
||||
|
||||
peer.active.ip = 46.101.244.204
|
||||
peer.active.port = 30303
|
||||
peer.active.nodeid = 8f4dd2cc9b97143985ed129493069b253a570a6f2e55bb61004316b3db9639d8bac77a7d59188f87c747c9984f94e7b999aea285b772a3f8ca5743accb1d3927
|
||||
#peer.active.ip = 46.101.244.204
|
||||
#peer.active.port = 30303
|
||||
#peer.active.nodeid = 8f4dd2cc9b97143985ed129493069b253a570a6f2e55bb61004316b3db9639d8bac77a7d59188f87c747c9984f94e7b999aea285b772a3f8ca5743accb1d3927
|
||||
|
||||
# peer.active.ip = 139.162.13.89
|
||||
# peer.active.port = 30303
|
||||
# peer.active.nodeid = bf01b54b6bc7faa203286dfb8359ce11d7b1fe822968fb4991f508d6f5a36ab7d9ae8af9b0d61c0467fb08567e0fb71cfb9925a370b69f9ede97927db473d1f5
|
||||
peer.active.ip = 139.162.13.89
|
||||
peer.active.port = 30303
|
||||
peer.active.nodeid = bf01b54b6bc7faa203286dfb8359ce11d7b1fe822968fb4991f508d6f5a36ab7d9ae8af9b0d61c0467fb08567e0fb71cfb9925a370b69f9ede97927db473d1f5
|
||||
|
||||
#peer.active.ip = 192.168.122.90
|
||||
#peer.active.port = 30303
|
||||
|
|
|
@ -0,0 +1,187 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.config.SystemProperties;
|
||||
import org.ethereum.core.Block;
|
||||
import org.ethereum.core.Genesis;
|
||||
import org.ethereum.datasource.mapdb.MapDBFactory;
|
||||
import org.ethereum.datasource.mapdb.MapDBFactoryImpl;
|
||||
import org.ethereum.util.FileUtil;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 09.07.2015
|
||||
*/
|
||||
public class BlockQueueTest {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger("test");
|
||||
|
||||
private BlockQueue blockQueue;
|
||||
private List<Block> blocks = new ArrayList<>();
|
||||
private String testDb;
|
||||
|
||||
@Before
|
||||
public void setup() throws InstantiationException, IllegalAccessException, URISyntaxException, IOException {
|
||||
URL scenario1 = ClassLoader
|
||||
.getSystemResource("blockstore/load.dmp");
|
||||
|
||||
File file = new File(scenario1.toURI());
|
||||
List<String> strData = Files.readAllLines(file.toPath(), StandardCharsets.UTF_8);
|
||||
|
||||
Block genesis = Genesis.getInstance();
|
||||
blocks.add(genesis);
|
||||
|
||||
for (String blockRLP : strData) {
|
||||
Block block = new Block(
|
||||
Hex.decode(blockRLP)
|
||||
);
|
||||
|
||||
if (block.getNumber() % 1000 == 0)
|
||||
logger.info("adding block.hash: [{}] block.number: [{}]",
|
||||
block.getShortHash(),
|
||||
block.getNumber());
|
||||
|
||||
blocks.add(block);
|
||||
}
|
||||
|
||||
logger.info("total blocks loaded: {}", blocks.size());
|
||||
|
||||
BigInteger bi = new BigInteger(32, new Random());
|
||||
testDb = "test_db_" + bi;
|
||||
SystemProperties.CONFIG.setDataBaseDir(testDb);
|
||||
|
||||
MapDBFactory mapDBFactory = new MapDBFactoryImpl();
|
||||
blockQueue = new BlockQueueImpl();
|
||||
((BlockQueueImpl)blockQueue).setMapDBFactory(mapDBFactory);
|
||||
blockQueue.open();
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
blockQueue.close();
|
||||
FileUtil.recursiveDelete(testDb);
|
||||
}
|
||||
|
||||
@Test // basic checks
|
||||
public void test1() {
|
||||
blockQueue.add(blocks.get(0));
|
||||
|
||||
// testing: peek()
|
||||
Block block = blockQueue.peek();
|
||||
|
||||
assertNotNull(block);
|
||||
|
||||
// testing: validity of loaded block
|
||||
assertArrayEquals(blocks.get(0).getEncoded(), block.getEncoded());
|
||||
|
||||
blockQueue.poll();
|
||||
|
||||
// testing: addAll(), close(), open()
|
||||
blockQueue.addAll(blocks);
|
||||
|
||||
blockQueue.close();
|
||||
blockQueue.open();
|
||||
|
||||
assertEquals(blocks.size(), blockQueue.size());
|
||||
|
||||
// testing: poll()
|
||||
long prevNumber = -1;
|
||||
for(int i = 0; i < blocks.size(); i++) {
|
||||
block = blockQueue.poll();
|
||||
assertTrue(block.getNumber() > prevNumber);
|
||||
prevNumber = block.getNumber();
|
||||
}
|
||||
|
||||
assertNull(blockQueue.peek());
|
||||
assertNull(blockQueue.poll());
|
||||
assertTrue(blockQueue.isEmpty());
|
||||
|
||||
// testing: add()
|
||||
for(Block b : blocks) {
|
||||
blockQueue.add(b);
|
||||
}
|
||||
|
||||
prevNumber = -1;
|
||||
for(int i = 0; i < blocks.size(); i++) {
|
||||
block = blockQueue.poll();
|
||||
assertTrue(block.getNumber() > prevNumber);
|
||||
prevNumber = block.getNumber();
|
||||
}
|
||||
}
|
||||
|
||||
@Test // concurrency
|
||||
public void test2() throws InterruptedException {
|
||||
new Thread(new Writer(1)).start();
|
||||
new Thread(new Reader(1)).start();
|
||||
Thread r2 = new Thread(new Reader(2));
|
||||
r2.start();
|
||||
r2.join();
|
||||
}
|
||||
|
||||
private class Reader implements Runnable {
|
||||
|
||||
private int index;
|
||||
|
||||
public Reader(int index) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
int nullsCount = 0;
|
||||
while (nullsCount < 10) {
|
||||
Block b = blockQueue.poll();
|
||||
logger.info("reader {}: {}", index, b == null ? null : b.getShortHash());
|
||||
if(b == null) {
|
||||
nullsCount++;
|
||||
} else {
|
||||
nullsCount = 0;
|
||||
}
|
||||
Thread.sleep(50);
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
logger.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class Writer implements Runnable {
|
||||
|
||||
private int index;
|
||||
|
||||
public Writer(int index) {
|
||||
this.index = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
for(int i = 0; i < 50; i++) {
|
||||
Block b = blocks.get(i);
|
||||
blockQueue.add(b);
|
||||
logger.info("writer {}: {}", index, b.getShortHash());
|
||||
Thread.sleep(50);
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
logger.error(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,143 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.config.SystemProperties;
|
||||
import org.ethereum.core.Block;
|
||||
import org.ethereum.datasource.mapdb.MapDBFactory;
|
||||
import org.ethereum.datasource.mapdb.MapDBFactoryImpl;
|
||||
import org.ethereum.datasource.mapdb.Serializers;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.mapdb.DB;
|
||||
import org.mapdb.Serializer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author Mikhail Kalinin
|
||||
* @since 10.07.2015
|
||||
*/
|
||||
@Ignore("long stress test")
|
||||
public class BlockStressTest {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger("test");
|
||||
|
||||
private static final String TEST_DB_DIR = "test_db/block_stress/";
|
||||
private static final String BLOCK_SOURCE = "block_src";
|
||||
private Map<byte[], Block> blockSource;
|
||||
private DB blockSourceDB;
|
||||
private MapDBFactory mapDBFactory;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
SystemProperties.CONFIG.setDataBaseDir(TEST_DB_DIR);
|
||||
|
||||
mapDBFactory = new MapDBFactoryImpl();
|
||||
blockSourceDB = mapDBFactory.createDB(BLOCK_SOURCE);
|
||||
blockSource = blockSourceDB.hashMapCreate(BLOCK_SOURCE)
|
||||
.keySerializer(Serializer.BYTE_ARRAY)
|
||||
.valueSerializer(Serializers.BLOCK)
|
||||
.makeOrGet();
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
blockSourceDB.close();
|
||||
}
|
||||
|
||||
@Test // loads blocks from file and store them into disk DB
|
||||
public void prepareData() throws URISyntaxException, IOException {
|
||||
URL dataURL = ClassLoader.getSystemResource("blockstore/big_data.dmp");
|
||||
|
||||
File file = new File(dataURL.toURI());
|
||||
|
||||
BufferedReader reader = new BufferedReader(new FileReader(file));
|
||||
String blockRLP;
|
||||
while(null != (blockRLP = reader.readLine())) {
|
||||
Block block = new Block(
|
||||
Hex.decode(blockRLP)
|
||||
);
|
||||
blockSource.put(block.getHash(), block);
|
||||
|
||||
if (block.getNumber() % 10000 == 0)
|
||||
logger.info(
|
||||
"adding block.hash: [{}] block.number: [{}]",
|
||||
block.getShortHash(),
|
||||
block.getNumber()
|
||||
);
|
||||
}
|
||||
logger.info("total blocks loaded: {}", blockSource.size());
|
||||
}
|
||||
|
||||
@Test // interesting how much time will take reading block by its hash
|
||||
public void testBlockSource() {
|
||||
long start, end;
|
||||
|
||||
start = System.currentTimeMillis();
|
||||
Set<byte[]> hashes = blockSource.keySet();
|
||||
end = System.currentTimeMillis();
|
||||
|
||||
logger.info("getKeys: {} ms", end - start);
|
||||
|
||||
start = System.currentTimeMillis();
|
||||
int counter = 0;
|
||||
for(byte[] hash : hashes) {
|
||||
blockSource.get(hash);
|
||||
if(++counter % 10000 == 0) {
|
||||
logger.info("reading: {} done from {}", counter, hashes.size());
|
||||
}
|
||||
}
|
||||
end = System.currentTimeMillis();
|
||||
|
||||
logger.info("reading: total time {} ms", end - start);
|
||||
logger.info("reading: time per block {} ms", (end - start) / (float)hashes.size());
|
||||
}
|
||||
|
||||
@Test // benchmarking block queue, writing and reading
|
||||
public void testBlockQueue() {
|
||||
long start, end;
|
||||
|
||||
BlockQueue blockQueue = new BlockQueueImpl();
|
||||
((BlockQueueImpl)blockQueue).setMapDBFactory(mapDBFactory);
|
||||
blockQueue.open();
|
||||
|
||||
Set<byte[]> hashes = blockSource.keySet();
|
||||
|
||||
start = System.currentTimeMillis();
|
||||
int counter = 0;
|
||||
for(byte[] hash : hashes) {
|
||||
Block block = blockSource.get(hash);
|
||||
blockQueue.add(block);
|
||||
if(++counter % 10000 == 0) {
|
||||
logger.info("writing: {} done from {}", counter, hashes.size());
|
||||
}
|
||||
}
|
||||
end = System.currentTimeMillis();
|
||||
|
||||
logger.info("writing: total time {} ms", end - start);
|
||||
logger.info("writing: time per block {} ms", (end - start) / (float)hashes.size());
|
||||
|
||||
start = System.currentTimeMillis();
|
||||
counter = 0;
|
||||
while(null != blockQueue.poll()) {
|
||||
if(++counter % 10000 == 0) {
|
||||
logger.info("reading: {} done from {}", counter, hashes.size());
|
||||
}
|
||||
}
|
||||
end = System.currentTimeMillis();
|
||||
|
||||
logger.info("reading: total time {} ms", end - start);
|
||||
logger.info("reading: time per block {} ms", (end - start) / (float)hashes.size());
|
||||
}
|
||||
}
|
|
@ -1,9 +1,12 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.config.SystemProperties;
|
||||
import org.ethereum.datasource.mapdb.MapDBFactory;
|
||||
import org.ethereum.datasource.mapdb.MapDBFactoryImpl;
|
||||
import org.ethereum.util.FileUtil;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -13,6 +16,7 @@ import java.math.BigInteger;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
@ -42,7 +46,11 @@ public class HashStoreTest {
|
|||
BigInteger bi = new BigInteger(32, new Random());
|
||||
testDb = "test_db_" + bi;
|
||||
SystemProperties.CONFIG.setDataBaseDir(testDb);
|
||||
hashStore = new HashStore.Builder().build();
|
||||
|
||||
MapDBFactory mapDBFactory = new MapDBFactoryImpl();
|
||||
hashStore = new HashStoreImpl();
|
||||
((HashStoreImpl)hashStore).setMapDBFactory(mapDBFactory);
|
||||
hashStore.open();
|
||||
}
|
||||
|
||||
@After
|
||||
|
@ -57,7 +65,11 @@ public class HashStoreTest {
|
|||
hashStore.add(hash);
|
||||
}
|
||||
|
||||
// testing peek and poll
|
||||
// testing: closing and opening again
|
||||
hashStore.close();
|
||||
hashStore.open();
|
||||
|
||||
// testing: peek() and poll()
|
||||
assertArrayEquals(hashes.get(0), hashStore.peek());
|
||||
for(byte[] hash : hashes) {
|
||||
assertArrayEquals(hash, hashStore.poll());
|
||||
|
@ -66,7 +78,7 @@ public class HashStoreTest {
|
|||
assertNull(hashStore.peek());
|
||||
assertNull(hashStore.poll());
|
||||
|
||||
// testing addFirst
|
||||
// testing: addFirst()
|
||||
for(int i = 0; i < 10; i++) {
|
||||
hashStore.add(hashes.get(i));
|
||||
}
|
||||
|
@ -89,6 +101,61 @@ public class HashStoreTest {
|
|||
r2.join();
|
||||
}
|
||||
|
||||
@Ignore("long stress test")
|
||||
@Test // big data
|
||||
public void test3() {
|
||||
int itemsCount = 1_000_000;
|
||||
int iterCount = 2;
|
||||
Random rnd = new Random(System.currentTimeMillis());
|
||||
long start, end;
|
||||
|
||||
for(int i = 0; i < iterCount; i++) {
|
||||
logger.info("ITERATION {}", i+1);
|
||||
logger.info("====================");
|
||||
|
||||
start = System.currentTimeMillis();
|
||||
for(int k = 0; k < itemsCount; k++) {
|
||||
BigInteger val = new BigInteger(32*8, rnd);
|
||||
hashStore.add(val.toByteArray());
|
||||
if(k > 0 && k % (itemsCount / 20) == 0) {
|
||||
logger.info("writing: {}% done", 100 * k / itemsCount);
|
||||
}
|
||||
}
|
||||
end = System.currentTimeMillis();
|
||||
logger.info("writing: 100% done");
|
||||
|
||||
logger.info("writing: total time {} ms", end - start);
|
||||
logger.info("writing: time per item {} ms", (end - start) / (float)itemsCount);
|
||||
|
||||
start = System.currentTimeMillis();
|
||||
Set<Long> keys = hashStore.getKeys();
|
||||
assertEquals(keys.size(), itemsCount);
|
||||
end = System.currentTimeMillis();
|
||||
logger.info("getKeys time: {} ms", end - start);
|
||||
|
||||
int part = itemsCount / 10;
|
||||
for(int k = 0; k < itemsCount - part; k++) {
|
||||
hashStore.poll();
|
||||
if(k > 0 && k % (itemsCount / 20) == 0) {
|
||||
logger.info("reading: {}% done", 100 * k / itemsCount);
|
||||
}
|
||||
}
|
||||
start = System.currentTimeMillis();
|
||||
for(int k = 0; k < part; k++) {
|
||||
hashStore.poll();
|
||||
if(k % (itemsCount / 20) == 0) {
|
||||
logger.info("reading: {}% done", 100 * (itemsCount - part + k) / itemsCount);
|
||||
}
|
||||
}
|
||||
end = System.currentTimeMillis();
|
||||
logger.info("reading: 100% done");
|
||||
|
||||
logger.info("reading: total time {} ms", end - start);
|
||||
logger.info("reading: time per item {} ms", (end - start) / (float)part);
|
||||
logger.info("====================");
|
||||
}
|
||||
}
|
||||
|
||||
private class Reader implements Runnable {
|
||||
|
||||
private int index;
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.core.Block;
|
||||
import org.h2.engine.Session;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
|
@ -260,9 +259,4 @@ public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
|
|||
blockStore.flush();
|
||||
}
|
||||
|
||||
private SessionFactory sessionFactory() {
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -76,7 +76,7 @@ public class IndexedBlockStoreTest {
|
|||
BigInteger cummDiff = BigInteger.ZERO;
|
||||
for (Block block : blocks){
|
||||
cummDiff = cummDiff.add( block.getCumulativeDifficulty() );
|
||||
indexedBlockStore.addBlock(block, cummDiff, true);
|
||||
indexedBlockStore.saveBlock(block, cummDiff, true);
|
||||
}
|
||||
|
||||
// testing: getTotalDifficulty()
|
||||
|
@ -149,10 +149,10 @@ public class IndexedBlockStoreTest {
|
|||
int blocksNum = indexedBlockStore.getBlocksByNumber(10000).size();
|
||||
assertEquals(0, blocksNum);
|
||||
|
||||
// testing: getNHashesEndWith(byte[], long)
|
||||
// testing: getListHashesEndWith(byte[], long)
|
||||
|
||||
block = blocks.get(8003);
|
||||
List<byte[]> hashList = indexedBlockStore.getNHashesEndWith(block.getHash(), 100);
|
||||
List<byte[]> hashList = indexedBlockStore.getListHashesEndWith(block.getHash(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(8003 - i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -160,10 +160,10 @@ public class IndexedBlockStoreTest {
|
|||
assertEquals(hash_, hash);
|
||||
}
|
||||
|
||||
// testing: getNHashesStartWith(long, long)
|
||||
// testing: getListHashesStartWith(long, long)
|
||||
|
||||
block = blocks.get(7003);
|
||||
hashList = indexedBlockStore.getNHashesStartWith(block.getNumber(), 100);
|
||||
hashList = indexedBlockStore.getListHashesStartWith(block.getNumber(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(7003 + i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -185,7 +185,7 @@ public class IndexedBlockStoreTest {
|
|||
BigInteger cummDiff = BigInteger.ZERO;
|
||||
for (Block block : blocks){
|
||||
cummDiff = cummDiff.add( block.getCumulativeDifficulty() );
|
||||
indexedBlockStore.addBlock(block, cummDiff, true);
|
||||
indexedBlockStore.saveBlock(block, cummDiff, true);
|
||||
}
|
||||
|
||||
// testing: getTotalDifficulty()
|
||||
|
@ -258,10 +258,10 @@ public class IndexedBlockStoreTest {
|
|||
int blocksNum = indexedBlockStore.getBlocksByNumber(10000).size();
|
||||
assertEquals(0, blocksNum);
|
||||
|
||||
// testing: getNHashesEndWith(byte[], long)
|
||||
// testing: getListHashesEndWith(byte[], long)
|
||||
|
||||
block = blocks.get(8003);
|
||||
List<byte[]> hashList = indexedBlockStore.getNHashesEndWith(block.getHash(), 100);
|
||||
List<byte[]> hashList = indexedBlockStore.getListHashesEndWith(block.getHash(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(8003 - i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -269,10 +269,10 @@ public class IndexedBlockStoreTest {
|
|||
assertEquals(hash_, hash);
|
||||
}
|
||||
|
||||
// testing: getNHashesStartWith(long, long)
|
||||
// testing: getListHashesStartWith(long, long)
|
||||
|
||||
block = blocks.get(7003);
|
||||
hashList = indexedBlockStore.getNHashesStartWith(block.getNumber(), 100);
|
||||
hashList = indexedBlockStore.getListHashesStartWith(block.getNumber(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(7003 + i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -294,7 +294,7 @@ public class IndexedBlockStoreTest {
|
|||
BigInteger cummDiff = BigInteger.ZERO;
|
||||
for (Block block : blocks){
|
||||
cummDiff = cummDiff.add( block.getCumulativeDifficulty() );
|
||||
indexedBlockStore.addBlock(block, cummDiff, true);
|
||||
indexedBlockStore.saveBlock(block, cummDiff, true);
|
||||
}
|
||||
|
||||
indexedBlockStore.flush();
|
||||
|
@ -369,10 +369,10 @@ public class IndexedBlockStoreTest {
|
|||
int blocksNum = indexedBlockStore.getBlocksByNumber(10000).size();
|
||||
assertEquals(0, blocksNum);
|
||||
|
||||
// testing: getNHashesEndWith(byte[], long)
|
||||
// testing: getListHashesEndWith(byte[], long)
|
||||
|
||||
block = blocks.get(8003);
|
||||
List<byte[]> hashList = indexedBlockStore.getNHashesEndWith(block.getHash(), 100);
|
||||
List<byte[]> hashList = indexedBlockStore.getListHashesEndWith(block.getHash(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(8003 - i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -380,10 +380,10 @@ public class IndexedBlockStoreTest {
|
|||
assertEquals(hash_, hash);
|
||||
}
|
||||
|
||||
// testing: getNHashesStartWith(long, long)
|
||||
// testing: getListHashesStartWith(long, long)
|
||||
|
||||
block = blocks.get(7003);
|
||||
hashList = indexedBlockStore.getNHashesStartWith(block.getNumber(), 100);
|
||||
hashList = indexedBlockStore.getListHashesStartWith(block.getNumber(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(7003 + i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -415,7 +415,7 @@ public class IndexedBlockStoreTest {
|
|||
BigInteger cummDiff = BigInteger.ZERO;
|
||||
for (Block block : blocks){
|
||||
cummDiff = cummDiff.add( block.getCumulativeDifficulty() );
|
||||
indexedBlockStore.addBlock(block, cummDiff, true);
|
||||
indexedBlockStore.saveBlock(block, cummDiff, true);
|
||||
}
|
||||
|
||||
// testing: getTotalDifficulty()
|
||||
|
@ -488,10 +488,10 @@ public class IndexedBlockStoreTest {
|
|||
int blocksNum = indexedBlockStore.getBlocksByNumber(10000).size();
|
||||
assertEquals(0, blocksNum);
|
||||
|
||||
// testing: getNHashesEndWith(byte[], long)
|
||||
// testing: getListHashesEndWith(byte[], long)
|
||||
|
||||
block = blocks.get(8003);
|
||||
List<byte[]> hashList = indexedBlockStore.getNHashesEndWith(block.getHash(), 100);
|
||||
List<byte[]> hashList = indexedBlockStore.getListHashesEndWith(block.getHash(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(8003 - i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -499,10 +499,10 @@ public class IndexedBlockStoreTest {
|
|||
assertEquals(hash_, hash);
|
||||
}
|
||||
|
||||
// testing: getNHashesStartWith(long, long)
|
||||
// testing: getListHashesStartWith(long, long)
|
||||
|
||||
block = blocks.get(7003);
|
||||
hashList = indexedBlockStore.getNHashesStartWith(block.getNumber(), 100);
|
||||
hashList = indexedBlockStore.getListHashesStartWith(block.getNumber(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(7003 + i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -513,6 +513,7 @@ public class IndexedBlockStoreTest {
|
|||
blocksDB.close();
|
||||
db.close();
|
||||
|
||||
|
||||
// testing after: REOPEN
|
||||
|
||||
db = createMapDB(testDir);
|
||||
|
@ -524,10 +525,10 @@ public class IndexedBlockStoreTest {
|
|||
indexedBlockStore = new IndexedBlockStore();
|
||||
indexedBlockStore.init(indexDB, blocksDB, null);
|
||||
|
||||
// testing: getNHashesStartWith(long, long)
|
||||
// testing: getListHashesStartWith(long, long)
|
||||
|
||||
block = blocks.get(7003);
|
||||
hashList = indexedBlockStore.getNHashesStartWith(block.getNumber(), 100);
|
||||
hashList = indexedBlockStore.getListHashesStartWith(block.getNumber(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(7003 + i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -535,6 +536,8 @@ public class IndexedBlockStoreTest {
|
|||
assertEquals(hash_, hash);
|
||||
}
|
||||
|
||||
blocksDB.close();
|
||||
db.close();
|
||||
FileUtil.recursiveDelete(testDir);
|
||||
}
|
||||
|
||||
|
@ -565,7 +568,7 @@ public class IndexedBlockStoreTest {
|
|||
for (int i = 0; i < preloadSize; ++i){
|
||||
Block block = blocks.get(i);
|
||||
cummDiff = cummDiff.add( block.getCumulativeDifficulty() );
|
||||
indexedBlockStore.addBlock(block, cummDiff, true);
|
||||
indexedBlockStore.saveBlock(block, cummDiff, true);
|
||||
}
|
||||
|
||||
indexedBlockStore.flush();
|
||||
|
@ -573,7 +576,7 @@ public class IndexedBlockStoreTest {
|
|||
for (int i = preloadSize; i < blocks.size(); ++i){
|
||||
Block block = blocks.get(i);
|
||||
cummDiff = cummDiff.add( block.getCumulativeDifficulty() );
|
||||
indexedBlockStore.addBlock(block, cummDiff, true);
|
||||
indexedBlockStore.saveBlock(block, cummDiff, true);
|
||||
}
|
||||
|
||||
// testing: getTotalDifficulty()
|
||||
|
@ -646,10 +649,10 @@ public class IndexedBlockStoreTest {
|
|||
int blocksNum = indexedBlockStore.getBlocksByNumber(10000).size();
|
||||
assertEquals(0, blocksNum);
|
||||
|
||||
// testing: getNHashesEndWith(byte[], long)
|
||||
// testing: getListHashesEndWith(byte[], long)
|
||||
|
||||
block = blocks.get(8003);
|
||||
List<byte[]> hashList = indexedBlockStore.getNHashesEndWith(block.getHash(), 100);
|
||||
List<byte[]> hashList = indexedBlockStore.getListHashesEndWith(block.getHash(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(8003 - i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -657,10 +660,10 @@ public class IndexedBlockStoreTest {
|
|||
assertEquals(hash_, hash);
|
||||
}
|
||||
|
||||
// testing: getNHashesStartWith(long, long)
|
||||
// testing: getListHashesStartWith(long, long)
|
||||
|
||||
block = blocks.get(7003);
|
||||
hashList = indexedBlockStore.getNHashesStartWith(block.getNumber(), 100);
|
||||
hashList = indexedBlockStore.getListHashesStartWith(block.getNumber(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(7003 + i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -672,7 +675,6 @@ public class IndexedBlockStoreTest {
|
|||
indexedBlockStore.flush();
|
||||
blocksDB.close();
|
||||
db.close();
|
||||
|
||||
// testing after: REOPEN
|
||||
|
||||
db = createMapDB(testDir);
|
||||
|
@ -685,10 +687,10 @@ public class IndexedBlockStoreTest {
|
|||
indexedBlockStore.init(indexDB, blocksDB, null);
|
||||
|
||||
|
||||
// testing: getNHashesStartWith(long, long)
|
||||
// testing: getListHashesStartWith(long, long)
|
||||
|
||||
block = blocks.get(7003);
|
||||
hashList = indexedBlockStore.getNHashesStartWith(block.getNumber(), 100);
|
||||
hashList = indexedBlockStore.getListHashesStartWith(block.getNumber(), 100);
|
||||
for (int i = 0; i < 100; ++i){
|
||||
block = blocks.get(7003 + i);
|
||||
String hash = Hex.toHexString(hashList.get(i));
|
||||
|
@ -703,7 +705,8 @@ public class IndexedBlockStoreTest {
|
|||
|
||||
}
|
||||
|
||||
|
||||
|
||||
// todo: test this
|
||||
// public Block getBestBlock()
|
||||
// public byte[] getBlockHashByNumber(long blockNumber)
|
||||
|
||||
}
|
||||
|
|
|
@ -26,10 +26,8 @@ public class GitHubStateTest {
|
|||
@Test // this method is mostly for hands-on convenient testing
|
||||
public void stSingleTest() throws ParseException, IOException {
|
||||
|
||||
// String shacommit = "cfae68e67aa922e08428c274d1ddbbc2741a975b";
|
||||
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stCallCreateCallCodeTest.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, "CallRecursiveBombPreCall");
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stPreCompiledContracts.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, "CallRipemd160_5");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -71,7 +69,7 @@ public class GitHubStateTest {
|
|||
|
||||
@Test
|
||||
public void stPreCompiledContracts() throws ParseException, IOException {
|
||||
String shacommit = "baf4b8479c0b524560137d27e61d7e573dc4ab17";
|
||||
|
||||
Set<String> excluded = new HashSet<>();
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stPreCompiledContracts.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, excluded);
|
||||
|
|
Loading…
Reference in New Issue