Merged latest develop changes.

This commit is contained in:
Adrian Tiberius 2015-07-08 14:23:41 +02:00
parent 805234deb7
commit 74eca228dc
38 changed files with 283 additions and 135 deletions

View File

@ -89,6 +89,11 @@ public class LevelDbDataSource implements KeyValueDataSource {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public byte[] get(byte[] key) {
return db.get(key);

View File

@ -46,6 +46,11 @@ public class MapDBDataSource implements KeyValueDataSource {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public byte[] get(byte[] key) {
return map.get(wrap(key));

View File

@ -129,7 +129,7 @@ dependencies {
compile "com.h2database:h2:1.4.187"
// compile "org.mapdb:mapdb:1.0.7"
compile "org.mapdb:mapdb:2.0-alpha3"
compile "org.mapdb:mapdb:2.0-beta1"
compile "org.slf4j:slf4j-log4j12:${slf4jVersion}"
compile "log4j:apache-log4j-extras:${log4jVersion}"

View File

@ -295,6 +295,12 @@ public class BlockchainImpl implements Blockchain {
track.commit();
storeBlock(block, receipts);
if (block.getNumber() == 650_000){
repository.flush();
blockStore.flush();
System.exit(-1);
}
if (needFlush(block)) {
repository.flush();
blockStore.flush();
@ -322,13 +328,16 @@ public class BlockchainImpl implements Blockchain {
}
private boolean needFlush(Block block) {
boolean possibleFlush = CONFIG.flushBlocksIgnoreConsensus() || adminInfo.isConsensus();
if (!possibleFlush)return false;
if (CONFIG.flushBlocksRepoSize() > 0 && repository.getClass().isAssignableFrom(RepositoryImpl.class)) {
return ((RepositoryImpl) repository).getAllocatedMemorySize() > CONFIG.flushBlocksRepoSize();
} else {
boolean isBatchReached = block.getNumber() % CONFIG.flushBlocksBatchSize() == 0;
boolean isConsensus = CONFIG.flushBlocksIgnoreConsensus() || adminInfo.isConsensus();
return isConsensus && isBatchReached;
return isBatchReached;
}
}

View File

@ -8,6 +8,7 @@ import org.ethereum.util.ByteUtil;
import org.ethereum.util.RLP;
import org.ethereum.util.RLPList;
import org.ethereum.vm.GasCost;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -108,6 +109,22 @@ public class Transaction {
this.signature = signature;
}
public long transactionCost(){
if (!parsed) rlpParse();
long nonZeroes = nonZeroDataBytes();
long zeroVals = getDataSize() - nonZeroes;
return GasCost.TRANSACTION + zeroVals * GasCost.TX_ZERO_DATA + nonZeroes * GasCost.TX_NO_ZERO_DATA;
}
private int getDataSize(){
if (data == null)
return 0;
else
return data.length;
}
public void rlpParse() {

View File

@ -237,6 +237,10 @@ public class TransactionExecutor {
if (vm == null) return;
try {
// Charge basic cost of the transaction
program.spendGas(tx.transactionCost(), "TRANSACTION COST");
if (CONFIG.playVM())
vm.play(program);

View File

@ -1,21 +1,43 @@
package org.ethereum.datasource;
import java.util.Map;
import org.slf4j.Logger;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static org.slf4j.LoggerFactory.getLogger;
public class DataSourcePool {
private static Map<String, KeyValueDataSource> pool = new ConcurrentHashMap<>();
private static Logger logger = getLogger("db");
private static ConcurrentMap<String, DataSource> pool = new ConcurrentHashMap<>();
public static KeyValueDataSource levelDbByName(String name) {
KeyValueDataSource dataSource = pool.get(name);
if (dataSource == null) {
dataSource = new LevelDbDataSource(name);
dataSource.init();
pool.put(name, dataSource);
return (KeyValueDataSource) getDataSourceFromPool(name, new LevelDbDataSource(name));
}
return dataSource;
private static DataSource getDataSourceFromPool(String name, DataSource dataSource) {
DataSource result = pool.putIfAbsent(name, dataSource);
if (result == null) {
synchronized (dataSource) {
dataSource.init();
result = dataSource;
}
logger.info("Data source '{}' created and added to pool.", dataSource.getName());
} else {
logger.info("Data source '{}' returned from pool.", dataSource.getName());
}
return result;
}
public static void closeDataSource(String name){
DataSource dataSource = pool.remove(name);
if (dataSource != null){
synchronized (dataSource) {
dataSource.close();
logger.info("Data source '{}' closed and removed from pool.", dataSource.getName());
}
}
}
}

View File

@ -51,6 +51,11 @@ public class HashMapDB implements KeyValueDataSource {
}
@Override
public String getName() {
return "in-memory";
}
@Override
public Set<byte[]> keys() {
Set<byte[]> keys = new HashSet<>();
@ -69,6 +74,6 @@ public class HashMapDB implements KeyValueDataSource {
@Override
public void close() {
this.storage.clear();
}
}

View File

@ -7,11 +7,7 @@ import java.util.Set;
* @author Roman Mandeleil
* @since 18.01.2015
*/
public interface KeyValueDataSource {
void init();
void setName(String name);
public interface KeyValueDataSource extends DataSource {
byte[] get(byte[] key);
@ -22,6 +18,4 @@ public interface KeyValueDataSource {
Set<byte[]> keys();
void updateBatch(Map<byte[], byte[]> rows);
void close();
}

View File

@ -51,10 +51,6 @@ public class LevelDbDataSource implements KeyValueDataSource {
File dbLocation = fileLocation.getParentFile();
if (!dbLocation.exists()) dbLocation.mkdirs();
if (CONFIG.databaseReset()) {
destroyDB(fileLocation);
}
logger.debug("Initializing new or existing database: '{}'", name);
db = factory.open(fileLocation, options);
} catch (IOException ioe) {
@ -79,6 +75,11 @@ public class LevelDbDataSource implements KeyValueDataSource {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public byte[] get(byte[] key) {
return db.get(key);

View File

@ -42,6 +42,11 @@ public class MapDBDataSource implements KeyValueDataSource {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public byte[] get(byte[] key) {
return map.get(key);

View File

@ -53,6 +53,11 @@ public class RedisDataSource extends RedisMap<byte[], byte[]> implements KeyValu
super.setName(name);
}
@Override
public String getName() {
return new String(getNameBytes());
}
@Override
public byte[] get(byte[] key) {
return super.get(key);

View File

@ -33,7 +33,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
return pooledWithResult(new Function<Jedis, Integer>() {
@Override
public Integer apply(Jedis jedis) {
return jedis.hlen(getName()).intValue();
return jedis.hlen(getNameBytes()).intValue();
}
});
}
@ -48,7 +48,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
return pooledWithResult(new Function<Jedis, Boolean>() {
@Override
public Boolean apply(Jedis jedis) {
return jedis.hexists(getName(), serializeKey((K) key));
return jedis.hexists(getNameBytes(), serializeKey((K) key));
}
});
}
@ -63,7 +63,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
return pooledWithResult(new Function<Jedis, V>() {
@Override
public V apply(Jedis jedis) {
byte[] value = jedis.hget(getName(), serializeKey((K) key));
byte[] value = jedis.hget(getNameBytes(), serializeKey((K) key));
return deserialize(value);
}
});
@ -75,8 +75,8 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
@Override
public V apply(Jedis jedis) {
byte[] serializedKey = serializeKey(key);
byte[] oldValue = jedis.hget(getName(), serializedKey);
jedis.hset(getName(), serializedKey, serialize(value));
byte[] oldValue = jedis.hget(getNameBytes(), serializedKey);
jedis.hset(getNameBytes(), serializedKey, serialize(value));
return deserialize(oldValue);
}
});
@ -88,8 +88,8 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
@Override
public V apply(Jedis jedis) {
byte[] serializedKey = serializeKey((K) key);
byte[] oldValue = jedis.hget(getName(), serializedKey);
jedis.hdel(getName(), serializedKey);
byte[] oldValue = jedis.hget(getNameBytes(), serializedKey);
jedis.hdel(getNameBytes(), serializedKey);
return deserialize(oldValue);
}
});
@ -104,7 +104,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
for (Entry<? extends K, ? extends V> entry : m.entrySet()) {
map.put(serializeKey(entry.getKey()), serialize(entry.getValue()));
}
jedis.hmset(getName(), map);
jedis.hmset(getNameBytes(), map);
}
});
}
@ -114,7 +114,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
pooled(new Consumer<Jedis>() {
@Override
public void accept(Jedis jedis) {
jedis.del(getName());
jedis.del(getNameBytes());
}
});
}
@ -125,7 +125,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
@Override
public Set<K> apply(Jedis jedis) {
Set<K> result = new HashSet<K>();
collect(jedis.hkeys(getName()), new Transformer<byte[], K>() {
collect(jedis.hkeys(getNameBytes()), new Transformer<byte[], K>() {
@Override
public K transform(byte[] input) {
return deserializeKey(input);
@ -141,7 +141,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
return pooledWithResult(new Function<Jedis, Collection<V>>() {
@Override
public Collection<V> apply(Jedis jedis) {
return deserialize(jedis.hvals(getName()));
return deserialize(jedis.hvals(getNameBytes()));
}
});
}
@ -152,7 +152,7 @@ public class RedisMap<K, V> extends RedisStorage<V> implements Map<K, V> {
@Override
public Set<Entry<K, V>> apply(Jedis jedis) {
Set<Entry<K, V>> result = new HashSet<Entry<K, V>>();
collect(jedis.hgetAll(getName()).entrySet(), new Transformer<Entry<byte[], byte[]>, Entry<K, V>>() {
collect(jedis.hgetAll(getNameBytes()).entrySet(), new Transformer<Entry<byte[], byte[]>, Entry<K, V>>() {
@Override
public Entry<K, V> transform(Entry<byte[], byte[]> input) {
K key = deserializeKey(input.getKey());

View File

@ -23,7 +23,7 @@ public class RedisSet<T> extends RedisStorage<T> implements Set<T> {
return pooledWithResult(new Function<Jedis, Integer>() {
@Override
public Integer apply(Jedis jedis) {
return jedis.scard(getName()).intValue();
return jedis.scard(getNameBytes()).intValue();
}
});
}
@ -38,7 +38,7 @@ public class RedisSet<T> extends RedisStorage<T> implements Set<T> {
return pooledWithResult(new Function<Jedis, Boolean>() {
@Override
public Boolean apply(Jedis jedis) {
return jedis.sismember(getName(), serialize((T) o));
return jedis.sismember(getNameBytes(), serialize((T) o));
}
});
}
@ -52,7 +52,7 @@ public class RedisSet<T> extends RedisStorage<T> implements Set<T> {
Set<byte[]> members = pooledWithResult(new Function<Jedis, Set<byte[]>>() {
@Override
public Set<byte[]> apply(Jedis jedis) {
return jedis.smembers(getName());
return jedis.smembers(getNameBytes());
}
});
return deserialize(members);
@ -88,7 +88,7 @@ public class RedisSet<T> extends RedisStorage<T> implements Set<T> {
return pooledWithResult(new Function<Jedis, Boolean>() {
@Override
public Boolean apply(Jedis jedis) {
return jedis.sadd(getName(), serialize(c)) == c.size();
return jedis.sadd(getNameBytes(), serialize(c)) == c.size();
}
});
}
@ -101,7 +101,7 @@ public class RedisSet<T> extends RedisStorage<T> implements Set<T> {
byte[] tempName = temporaryName();
try {
jedis.sadd(tempName, serialize(c));
return jedis.scard(getName()) != jedis.sinterstore(getName(), getName(), tempName);
return jedis.scard(getNameBytes()) != jedis.sinterstore(getNameBytes(), getNameBytes(), tempName);
} finally {
jedis.del(tempName);
}
@ -114,7 +114,7 @@ public class RedisSet<T> extends RedisStorage<T> implements Set<T> {
return CollectionUtils.isEmpty(c) || pooledWithResult(new Function<Jedis, Boolean>() {
@Override
public Boolean apply(Jedis jedis) {
return jedis.srem(getName(), serialize(c)) == c.size();
return jedis.srem(getNameBytes(), serialize(c)) == c.size();
}
});
}
@ -124,7 +124,7 @@ public class RedisSet<T> extends RedisStorage<T> implements Set<T> {
pooled(new Consumer<Jedis>() {
@Override
public void accept(Jedis jedis) {
jedis.del(getName());
jedis.del(getNameBytes());
}
});
}

View File

@ -27,7 +27,7 @@ public abstract class RedisStorage<T> {
this.serializer = serializer;
}
protected byte[] getName() {
protected byte[] getNameBytes() {
return name;
}
@ -36,7 +36,7 @@ public abstract class RedisStorage<T> {
}
protected byte[] formatName(String suffix) {
return Bytes.concat(getName(), suffix.getBytes());
return Bytes.concat(getNameBytes(), suffix.getBytes());
}
protected byte[] temporaryName() {

View File

@ -1,6 +1,7 @@
package org.ethereum.db;
import org.ethereum.config.SystemProperties;
import org.ethereum.datasource.DataSourcePool;
import org.ethereum.datasource.KeyValueDataSource;
import org.ethereum.trie.SecureTrie;
import org.ethereum.util.RLP;
@ -215,6 +216,8 @@ public class ContractDetailsImpl implements ContractDetails {
if (externalStorage) {
storageTrie.getCache().setDB(getExternalStorageDataSource());
storageTrie.sync();
DataSourcePool.closeDataSource("details-storage/" + toHexString(address));
}
}

View File

@ -170,12 +170,26 @@ public class RepositoryImpl implements Repository {
detailsCache.clear();
}
@Override
public void flushNoReconnect(){
gLogger.info("flushing to disk");
dds.flush();
worldState.sync();
}
@Override
public void flush() {
gLogger.info("flushing to disk");
dds.flush();
worldState.sync();
byte[] root = worldState.getRootHash();
reset();
worldState.setRoot(root);
}
public int getAllocatedMemorySize() {

View File

@ -244,6 +244,10 @@ public class RepositoryTrack implements Repository {
throw new UnsupportedOperationException();
}
@Override
public void flushNoReconnect() {
throw new UnsupportedOperationException();
}
@Override
public void commit() {

View File

@ -24,6 +24,8 @@ public class CommonConfig {
private RedisConnection redisConnection;
private MapDBFactory mapDBFactory;
Repository repository() {
return new RepositoryImpl(keyValueDataSource(), keyValueDataSource());
}

View File

@ -6,9 +6,11 @@ import org.ethereum.di.components.DaggerEthereumComponent;
import org.ethereum.net.eth.EthHandler;
import org.ethereum.net.shh.ShhHandler;
import org.ethereum.util.FileUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.ethereum.config.SystemProperties.CONFIG;
/**
* @author Roman Mandeleil
@ -22,6 +24,16 @@ public class EthereumFactory {
public static Ethereum createEthereum() {
if (CONFIG.databaseReset()){
FileUtil.recursiveDelete(CONFIG.databaseDir());
logger.info("Database reset done");
}
return createEthereum(null);
}
public static Ethereum createEthereum(Class clazz) {
logger.info("capability eth version: [{}]", EthHandler.VERSION);
logger.info("capability shh version: [{}]", ShhHandler.VERSION);

View File

@ -11,6 +11,8 @@ import org.hibernate.SessionFactory;
*/
public class RemoteConfig {
//@Autowired
CommonConfig commonConfig;
public BlockStore blockStore(SessionFactory sessionFactory){

View File

@ -151,6 +151,7 @@ public interface Repository {
public Repository startTracking();
public void flush();
public void flushNoReconnect();
/**

View File

@ -71,7 +71,8 @@ public class TestProgramInvokeFactory implements ProgramInvokeFactory {
/*** CALLDATALOAD op ***/
/*** CALLDATACOPY op ***/
/*** CALLDATASIZE op ***/
byte[] data = tx.getData() == null ? ByteUtil.EMPTY_BYTE_ARRAY : tx.getData();
byte[] data = tx.isContractCreation() ? ByteUtil.EMPTY_BYTE_ARRAY :( tx.getData() == null ? ByteUtil.EMPTY_BYTE_ARRAY : tx.getData() );
// byte[] data = tx.getData() == null ? ByteUtil.EMPTY_BYTE_ARRAY : tx.getData() ;
/*** PREVHASH op ***/
byte[] lastHash = env.getPreviousHash();

View File

@ -21,6 +21,8 @@ public class Utils {
data = data.substring(2);
if (data.equals("")) return EMPTY_BYTE_ARRAY;
if (data.length() % 2 == 1) data = "0" + data;
return Hex.decode(data);
}

View File

@ -36,7 +36,7 @@ public class RepositoryBuilder {
RepositoryImpl repositoryDummy = new RepositoryImpl(new HashMapDB(), new HashMapDB());
repositoryDummy.updateBatch(stateBatch, detailsBatch);
repositoryDummy.flush();
repositoryDummy.flushNoReconnect();
return repositoryDummy;
}

View File

@ -63,7 +63,7 @@ public class StateTestRunner {
}
track.commit();
repository.flush();
repository.flushNoReconnect();
List<LogInfo> origLogs = executor.getResult().getLogInfoList();
List<LogInfo> postLogs = LogBuilder.build(stateTestCase2.getLogs());

View File

@ -1,13 +1,11 @@
package org.ethereum.net.p2p;
import com.google.common.base.Joiner;
import org.ethereum.net.client.Capability;
import org.ethereum.util.ByteUtil;
import org.ethereum.util.RLP;
import org.ethereum.util.RLPElement;
import org.ethereum.util.RLPList;
import com.google.common.base.Joiner;
import org.spongycastle.util.encoders.Hex;
import java.util.ArrayList;
@ -64,7 +62,6 @@ public class HelloMessage extends P2pMessage {
byte[] p2pVersionBytes = paramsList.get(0).getRLPData();
this.p2pVersion = p2pVersionBytes != null ? p2pVersionBytes[0] : 0;
try {
byte[] clientIdBytes = paramsList.get(1).getRLPData();
this.clientId = new String(clientIdBytes != null ? clientIdBytes : EMPTY_BYTE_ARRAY);
@ -87,10 +84,6 @@ public class HelloMessage extends P2pMessage {
byte[] peerIdBytes = paramsList.get(4).getRLPData();
this.peerId = Hex.toHexString(peerIdBytes);
}
catch ( Exception e ) {
System.out.println(e.getMessage());
}
this.parsed = true;
}
@ -102,7 +95,7 @@ public class HelloMessage extends P2pMessage {
Capability capability = this.capabilities.get(i);
capabilities[i] = RLP.encodeList(
RLP.encodeElement(capability.getName().getBytes()),
RLP.encodeElement(new byte[]{capability.getVersion()}));
RLP.encodeInt(capability.getVersion()));
}
byte[] capabilityList = RLP.encodeList(capabilities);
byte[] peerPort = RLP.encodeInt(this.listenPort);

View File

@ -759,7 +759,7 @@ public class RLP {
if (isNullOrZeroArray(srcData))
return new byte[]{(byte) OFFSET_SHORT_ITEM};
else if (isSingleZero(srcData))
return new byte[]{00};
return srcData;
else if (srcData.length == 1 && (srcData[0] & 0xFF) < 0x80) {
return srcData;
} else if (srcData.length < SIZE_THRESHOLD) {

View File

@ -29,8 +29,6 @@ public interface ProgramInvoke {
public byte[] getDataCopy(DataWord offsetData, DataWord lengthData);
public int countNonZeroData();
public DataWord getPrevHash();
public DataWord getCoinbase();

View File

@ -66,7 +66,7 @@ public class ProgramInvokeFactoryImpl implements ProgramInvokeFactory {
/*** CALLDATALOAD op ***/
/*** CALLDATACOPY op ***/
/*** CALLDATASIZE op ***/
byte[] data = tx.getData() == null ? ByteUtil.EMPTY_BYTE_ARRAY : tx.getData();
byte[] data = tx.isContractCreation() ? ByteUtil.EMPTY_BYTE_ARRAY :( tx.getData() == null ? ByteUtil.EMPTY_BYTE_ARRAY : tx.getData() );
/*** PREVHASH op ***/
byte[] lastHash = lastBlock.getHash();

View File

@ -194,17 +194,6 @@ public class ProgramInvokeImpl implements ProgramInvoke {
}
@Override
public int countNonZeroData() {
int counter = 0;
for (byte aMsgData : msgData) {
if (aMsgData != 0) ++counter;
}
return counter;
}
/* PREVHASH op */
public DataWord getPrevHash() {
return prevHash;

View File

@ -157,18 +157,6 @@ public class ProgramInvokeMockImpl implements ProgramInvoke {
return new DataWord(prevHash);
}
@Override
public int countNonZeroData() {
int counter = 0;
for (byte aMsgData : msgData) {
if (aMsgData != 0) ++counter;
}
return counter;
}
@Override
public DataWord getCoinbase() {
byte[] coinBase = Hex.decode("E559DE5527492BCB42EC68D07DF0742A98EC3F1E");

View File

@ -119,6 +119,12 @@ public class Storage implements Repository, ProgramTraceListenerAware {
repository.flush();
}
@Override
public void flushNoReconnect() {
throw new UnsupportedOperationException();
}
@Override
public void commit() {
repository.commit();

View File

@ -1144,19 +1144,6 @@ public class VM {
public void play(Program program) {
try {
// In case the program invoked by wire got
// transaction, this will be the gas cost,
// otherwise the call done by other contract
// charged by CALL op
if (program.invokeData.byTransaction()) {
program.spendGas(GasCost.TRANSACTION, "TRANSACTION");
int dataSize = program.invokeData.getDataSize().intValue();
int nonZeroesVals = program.invokeData.countNonZeroData();
int zeroVals = dataSize - nonZeroesVals;
program.spendGas(GasCost.TX_NO_ZERO_DATA * nonZeroesVals, "DATA");
program.spendGas(GasCost.TX_ZERO_DATA * zeroVals, "DATA");
}
if (program.invokeData.byTestingSuite()) return;

View File

@ -1,9 +1,18 @@
package org.ethereum;
import org.ethereum.db.IndexedBlockStore;
import org.ethereum.vm.DataWord;
import org.mapdb.DB;
import org.mapdb.DBMaker;
import org.mapdb.Serializer;
import java.io.File;
import java.util.List;
import java.util.Map;
import java.util.Random;
import static org.ethereum.db.IndexedBlockStore.BLOCK_INFO_SERIALIZER;
public final class TestUtils {
private TestUtils() {
@ -22,4 +31,30 @@ public final class TestUtils {
public static byte[] randomAddress() {
return randomBytes(20);
}
public static Map<Long, List<IndexedBlockStore.BlockInfo>> createIndexMap(DB db){
Map<Long, List<IndexedBlockStore.BlockInfo>> index = db.hashMapCreate("index")
.keySerializer(Serializer.LONG)
.valueSerializer(BLOCK_INFO_SERIALIZER)
.makeOrGet();
return index;
}
public static DB createMapDB(String testDBDir){
String blocksIndexFile = testDBDir + "/blocks/index";
File dbFile = new File(blocksIndexFile);
if (!dbFile.getParentFile().exists()) dbFile.getParentFile().mkdirs();
DB db = DBMaker.fileDB(dbFile)
.transactionDisable()
.closeOnJvmShutdown()
.make();
return db;
}
}

View File

@ -19,19 +19,22 @@ import static org.ethereum.jsontestsuite.JSONReader.getFileNamesForTreeSha;
public class GitHubStateTest {
//SHACOMMIT of tested commit, ethereum/tests.git
public String shacommit = "baf4b8479c0b524560137d27e61d7e573dc4ab17";
public String shacommit = "cfae68e67aa922e08428c274d1ddbbc2741a975b";
@Ignore
@Test // this method is mostly for hands-on convenient testing
public void stSingleTest() throws ParseException, IOException {
String json = JSONReader.loadJSONFromCommit("StateTests/stPreCompiledContracts.json", shacommit);
GitHubJSONTestSuite.runStateTest(json, "CallEcrecover0_BonusGas");
// String shacommit = "cfae68e67aa922e08428c274d1ddbbc2741a975b";
String json = JSONReader.loadJSONFromCommit("StateTests/stCallCreateCallCodeTest.json", shacommit);
GitHubJSONTestSuite.runStateTest(json, "CallRecursiveBombPreCall");
}
@Test
public void stExample() throws ParseException, IOException {
Set<String> excluded = new HashSet<>();
String json = JSONReader.loadJSONFromCommit("StateTests/stExample.json", shacommit);
GitHubJSONTestSuite.runStateTest(json, excluded);
@ -39,6 +42,7 @@ public class GitHubStateTest {
@Test
public void stCallCreateCallCodeTest() throws ParseException, IOException {
Set<String> excluded = new HashSet<>();
excluded.add("createJS_ExampleContract"); //FIXME Bug on CPP testrunner, storage/SSTORE
excluded.add("Callcode1024OOG");
@ -46,7 +50,7 @@ public class GitHubStateTest {
excluded.add("callcodeWithHighValue");
excluded.add("callWithHighValue");
excluded.add("Call1024PreCalls");
excluded.add("CallRecursiveBombPreCall");
excluded.add("CallRecursiveBombPreCall"); // FIXME gas not BI limit
String json = JSONReader.loadJSONFromCommit("StateTests/stCallCreateCallCodeTest.json", shacommit);
GitHubJSONTestSuite.runStateTest(json, excluded);
}
@ -67,7 +71,7 @@ public class GitHubStateTest {
@Test
public void stPreCompiledContracts() throws ParseException, IOException {
String shacommit = "baf4b8479c0b524560137d27e61d7e573dc4ab17";
Set<String> excluded = new HashSet<>();
String json = JSONReader.loadJSONFromCommit("StateTests/stPreCompiledContracts.json", shacommit);
GitHubJSONTestSuite.runStateTest(json, excluded);
@ -100,7 +104,6 @@ public class GitHubStateTest {
GitHubJSONTestSuite.runStateTest(json, excluded);
}
@Ignore
@Test
public void stSolidityTest() throws ParseException, IOException {
Set<String> excluded = new HashSet<>();
@ -126,11 +129,9 @@ public class GitHubStateTest {
@Test
public void stSpecialTest() throws ParseException, IOException {
Set<String> excluded = new HashSet<>();
excluded.add("txfrom0_deja"); // (!!!) FIXME fix them as soon as possible
excluded.add("JUMPDEST_AttackwithJump"); // (!!!) FIXME fix them as soon as possible
excluded.add("JUMPDEST_Attack"); // (!!!) FIXME fix them as soon as possible
String json = JSONReader.loadJSONFromCommit("StateTests/stSpecialTest.json", shacommit);
GitHubJSONTestSuite.runStateTest(json, excluded);
}
@ -141,7 +142,6 @@ public class GitHubStateTest {
GitHubJSONTestSuite.runStateTest(json);
}
//@Ignore
@Test
public void stSystemOperationsTest() throws IOException {
@ -155,6 +155,7 @@ public class GitHubStateTest {
@Test
public void stTransactionTest() throws ParseException, IOException {
Set<String> excluded = new HashSet<>();
excluded.add("OverflowGasRequire"); //FIXME wont work until we use gaslimit as long
excluded.add("EmptyTransaction2"); // Buggy testcase

View File

@ -5,6 +5,8 @@ import org.ethereum.crypto.HashUtil;
import com.cedarsoftware.util.DeepEquals;
import org.ethereum.db.ByteArrayWrapper;
import org.ethereum.net.client.Capability;
import org.ethereum.net.p2p.HelloMessage;
import org.junit.Ignore;
import org.junit.Test;
@ -1052,4 +1054,40 @@ public class RLPTest {
}
@Test // capabilities: (eth:60, bzz:0, shh:2)
public void testEncodeHelloMessageCap0(){
List<Capability> capabilities = new ArrayList<>();
capabilities.add(new Capability("eth", (byte) 0x60));
capabilities.add(new Capability("shh", (byte) 0x02));
capabilities.add(new Capability("bzz", (byte) 0x00));
HelloMessage helloMessage = new HelloMessage((byte)4,
"Geth/v0.9.29-4182e20e/windows/go1.4.2",
capabilities , 30303,
"a52205ce10b39be86507e28f6c3dc08ab4c3e8250e062ec47c6b7fa13cf4a4312d68d6c340315ef953ada7e19d69123a1b902ea84ec00aa5386e5d550e6c550e");
byte[] rlp = helloMessage.getEncoded();
HelloMessage helloMessage_ = new HelloMessage(rlp);
String eth = helloMessage_.getCapabilities().get(0).getName();
byte eth_60 = helloMessage_.getCapabilities().get(0).getVersion();
assertEquals("eth", eth);
assertEquals(0x60, eth_60);
String shh = helloMessage_.getCapabilities().get(1).getName();
byte shh_02 = helloMessage_.getCapabilities().get(1).getVersion();
assertEquals("shh", shh);
assertEquals(0x02, shh_02);
String bzz = helloMessage_.getCapabilities().get(2).getName();
byte bzz_00 = helloMessage_.getCapabilities().get(2).getVersion();
assertEquals("bzz", bzz);
assertEquals(0x00, bzz_00);
}
}