Merged latest develop changes.
This commit is contained in:
parent
b36e784bd5
commit
0250ffbd65
|
@ -13,6 +13,8 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.ethereum.android.jsonrpc.JsonRpcServer;
|
||||
|
||||
import static org.ethereum.config.SystemProperties.CONFIG;
|
||||
|
||||
public class EthereumManager {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger("manager");
|
||||
|
@ -38,10 +40,13 @@ public class EthereumManager {
|
|||
|
||||
public void connect() {
|
||||
|
||||
ethereum.connect(SystemProperties.CONFIG.activePeerIP(),
|
||||
SystemProperties.CONFIG.activePeerPort(),
|
||||
SystemProperties.CONFIG.activePeerNodeid());
|
||||
//ethereum.getBlockchain();
|
||||
if (CONFIG.blocksLoader().equals("")) {
|
||||
ethereum.connect(SystemProperties.CONFIG.activePeerIP(),
|
||||
SystemProperties.CONFIG.activePeerPort(),
|
||||
SystemProperties.CONFIG.activePeerNodeid());
|
||||
} else {
|
||||
ethereum.getBlockLoader().loadBlocks();
|
||||
}
|
||||
}
|
||||
|
||||
public void startPeerDiscovery() {
|
||||
|
|
|
@ -110,4 +110,14 @@ public class BlockStoreImpl implements BlockStore {
|
|||
|
||||
return database.getTransactionReceiptByHash(hash);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void load() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ repositories {
|
|||
sourceCompatibility = 1.7
|
||||
|
||||
mainClassName = 'org.ethereum.Start'
|
||||
applicationDefaultJvmArgs = ["-server", "-Xms2g"]
|
||||
applicationDefaultJvmArgs = ["-server", "-Xms3g", "-Xss32m"]
|
||||
|
||||
ext.generatedSrcDir = file('src/gen/java')
|
||||
|
||||
|
@ -155,7 +155,7 @@ dependencies {
|
|||
compile 'org.apache.commons:commons-collections4:4.0'
|
||||
compile 'commons-io:commons-io:2.4'
|
||||
compile "commons-codec:commons-codec:1.10"
|
||||
compile 'org.hsqldb:hsqldb:1.8.0.10' // best performance - do not upgrade!
|
||||
compile "com.h2database:h2:1.4.187"
|
||||
compile "org.hibernate:hibernate-core:${hibernateVersion}"
|
||||
compile "org.hibernate:hibernate-entitymanager:${hibernateVersion}"
|
||||
|
||||
|
@ -168,4 +168,4 @@ dependencies {
|
|||
testCompile "junit:junit:${junitVersion}"
|
||||
testCompile 'com.google.dagger:dagger:2.1-SNAPSHOT'
|
||||
testCompile 'com.google.dagger:dagger-compiler:2.0'
|
||||
}
|
||||
}
|
|
@ -71,6 +71,7 @@ public class AccountState {
|
|||
}
|
||||
|
||||
public void setNonce(BigInteger nonce) {
|
||||
rlpEncoded = null;
|
||||
this.nonce = nonce;
|
||||
}
|
||||
|
||||
|
|
|
@ -56,7 +56,6 @@ public class Block {
|
|||
public Block(byte[] rawData) {
|
||||
logger.debug("new from [" + Hex.toHexString(rawData) + "]");
|
||||
this.rlpEncoded = rawData;
|
||||
this.parsed = false;
|
||||
}
|
||||
|
||||
public Block(BlockHeader header, List<Transaction> transactionsList, List<BlockHeader> uncleList) {
|
||||
|
@ -360,7 +359,7 @@ public class Block {
|
|||
parseTxs(txTransactions);
|
||||
String calculatedRoot = Hex.toHexString(txsState.getRootHash());
|
||||
if (!calculatedRoot.equals(Hex.toHexString(expectedRoot)))
|
||||
logger.error("Added tx receipts don't match the given txsStateRoot");
|
||||
logger.error("Transactions trie root validation failed for block #{}", this.header.getNumber());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package org.ethereum.core;
|
||||
|
||||
import org.ethereum.config.Constants;
|
||||
import org.ethereum.config.SystemProperties;
|
||||
import org.ethereum.crypto.HashUtil;
|
||||
import org.ethereum.db.BlockStore;
|
||||
import org.ethereum.facade.Blockchain;
|
||||
|
@ -24,8 +25,8 @@ import java.io.BufferedWriter;
|
|||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.math.BigDecimal;
|
||||
import java.math.BigInteger;
|
||||
import java.util.*;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
@ -34,9 +35,7 @@ import javax.inject.Singleton;
|
|||
import static org.ethereum.config.Constants.*;
|
||||
import static org.ethereum.config.SystemProperties.CONFIG;
|
||||
import static org.ethereum.core.Denomination.SZABO;
|
||||
import static org.ethereum.core.ImportResult.EXIST;
|
||||
import static org.ethereum.core.ImportResult.NO_PARENT;
|
||||
import static org.ethereum.core.ImportResult.SUCCESS;
|
||||
import static org.ethereum.core.ImportResult.*;
|
||||
|
||||
/**
|
||||
* The Ethereum blockchain is in many ways similar to the Bitcoin blockchain,
|
||||
|
@ -110,6 +109,7 @@ public class BlockchainImpl implements Blockchain {
|
|||
public BlockchainImpl() {
|
||||
}
|
||||
|
||||
|
||||
//todo: autowire over constructor
|
||||
@Inject
|
||||
public BlockchainImpl(BlockStore blockStore, Repository repository,
|
||||
|
@ -172,14 +172,13 @@ public class BlockchainImpl implements Blockchain {
|
|||
|
||||
public ImportResult tryToConnect(Block block) {
|
||||
|
||||
recordBlock(block);
|
||||
|
||||
if (logger.isInfoEnabled())
|
||||
logger.info("Try connect block hash: {}, number: {}",
|
||||
Hex.toHexString(block.getHash()).substring(0, 6),
|
||||
block.getNumber());
|
||||
|
||||
if (blockStore.getBlockByHash(block.getHash()) != null) {
|
||||
if (blockStore.getBestBlock().getNumber() >= block.getNumber() &&
|
||||
blockStore.getBlockByHash(block.getHash()) != null) {
|
||||
|
||||
if (logger.isDebugEnabled())
|
||||
logger.debug("Block already exist hash: {}, number: {}",
|
||||
|
@ -194,6 +193,7 @@ public class BlockchainImpl implements Blockchain {
|
|||
// to connect to the main chain
|
||||
if (bestBlock.isParentOf(block)) {
|
||||
add(block);
|
||||
recordBlock(block);
|
||||
return SUCCESS;
|
||||
} else {
|
||||
if (1 == 1) // FIXME: WORKARROUND
|
||||
|
@ -292,11 +292,14 @@ public class BlockchainImpl implements Blockchain {
|
|||
//System.out.println(" Receipts listroot is: " + receiptListHash + " logbloomlisthash is " + logBloomListHash);
|
||||
|
||||
track.commit();
|
||||
repository.flush(); // saving to the disc
|
||||
|
||||
|
||||
storeBlock(block, receipts);
|
||||
|
||||
|
||||
if (block.getNumber() % 20_000 == 0) {
|
||||
repository.flush();
|
||||
blockStore.flush();
|
||||
}
|
||||
|
||||
// Remove all wallet transactions as they already approved by the net
|
||||
wallet.removeTransactions(block.getTransactionsList());
|
||||
|
||||
|
@ -304,13 +307,12 @@ public class BlockchainImpl implements Blockchain {
|
|||
clearPendingTransactions(block.getTransactionsList());
|
||||
|
||||
listener.trace(String.format("Block chain size: [ %d ]", this.getSize()));
|
||||
listener.onBlock(block);
|
||||
listener.onBlockReciepts(receipts);
|
||||
listener.onBlock(block, receipts);
|
||||
|
||||
if (blockQueue != null &&
|
||||
blockQueue.size() == 0 &&
|
||||
!syncDoneCalled &&
|
||||
channelManager.isAllSync()) {
|
||||
blockQueue.size() == 0 &&
|
||||
!syncDoneCalled &&
|
||||
channelManager.isAllSync()) {
|
||||
|
||||
logger.info("Sync done");
|
||||
syncDoneCalled = true;
|
||||
|
@ -489,7 +491,7 @@ public class BlockchainImpl implements Blockchain {
|
|||
programInvokeFactory, block, listener, totalGasUsed);
|
||||
|
||||
executor.init();
|
||||
executor.execute2();
|
||||
executor.execute();
|
||||
executor.go();
|
||||
executor.finalization();
|
||||
|
||||
|
@ -567,18 +569,20 @@ public class BlockchainImpl implements Blockchain {
|
|||
/* Debug check to see if the state is still as expected */
|
||||
String blockStateRootHash = Hex.toHexString(block.getStateRoot());
|
||||
String worldStateRootHash = Hex.toHexString(repository.getRoot());
|
||||
if (!blockStateRootHash.equals(worldStateRootHash)) {
|
||||
|
||||
stateLogger.error("BLOCK: STATE CONFLICT! block: {} worldstate {} mismatch", block.getNumber(), worldStateRootHash);
|
||||
adminInfo.lostConsensus();
|
||||
if(!SystemProperties.CONFIG.blockChainOnly())
|
||||
if (!blockStateRootHash.equals(worldStateRootHash)) {
|
||||
|
||||
System.out.println("CONFLICT: BLOCK #" + block.getNumber() );
|
||||
System.exit(1);
|
||||
// in case of rollback hard move the root
|
||||
// Block parentBlock = blockStore.getBlockByHash(block.getParentHash());
|
||||
// repository.syncToRoot(parentBlock.getStateRoot());
|
||||
// todo: after the rollback happens other block should be requested
|
||||
}
|
||||
stateLogger.error("BLOCK: STATE CONFLICT! block: {} worldstate {} mismatch", block.getNumber(), worldStateRootHash);
|
||||
adminInfo.lostConsensus();
|
||||
|
||||
System.out.println("CONFLICT: BLOCK #" + block.getNumber() );
|
||||
// System.exit(1);
|
||||
// in case of rollback hard move the root
|
||||
// Block parentBlock = blockStore.getBlockByHash(block.getParentHash());
|
||||
// repository.syncToRoot(parentBlock.getStateRoot());
|
||||
// todo: after the rollback happens other block should be requested
|
||||
}
|
||||
|
||||
blockStore.saveBlock(block, receipts);
|
||||
setBestBlock(block);
|
||||
|
@ -652,11 +656,10 @@ public class BlockchainImpl implements Blockchain {
|
|||
if (!CONFIG.recordBlocks()) return;
|
||||
|
||||
if (block.getNumber() == 1) {
|
||||
//FileSystemUtils.deleteRecursively(new File(CONFIG.dumpDir()));
|
||||
try {
|
||||
FileUtils.forceDelete(new File(CONFIG.dumpDir()));
|
||||
FileUtils.forceDelete(new File(CONFIG.dumpDir()));
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
logger.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ public class TransactionExecutor {
|
|||
private VM vm;
|
||||
private Program program;
|
||||
|
||||
PrecompiledContracts.PrecompiledContract precompiledContract;
|
||||
|
||||
long m_endGas = 0;
|
||||
long basicTxCost = 0;
|
||||
|
@ -138,7 +139,7 @@ public class TransactionExecutor {
|
|||
readyToExecute = true;
|
||||
}
|
||||
|
||||
public void execute2() {
|
||||
public void execute() {
|
||||
|
||||
if (!readyToExecute) return;
|
||||
|
||||
|
@ -148,6 +149,7 @@ public class TransactionExecutor {
|
|||
BigInteger txGasCost = toBI(tx.getGasPrice()).multiply(toBI(txGasLimit));
|
||||
track.addBalance(tx.getSender(), txGasCost.negate());
|
||||
|
||||
|
||||
if (logger.isInfoEnabled())
|
||||
logger.info("Paying: txGasCost: [{}], gasPrice: [{}], gasLimit: [{}]", txGasCost, toBI(tx.getGasPrice()), txGasLimit);
|
||||
|
||||
|
@ -162,17 +164,43 @@ public class TransactionExecutor {
|
|||
if (!readyToExecute) return;
|
||||
|
||||
byte[] targetAddress = tx.getReceiveAddress();
|
||||
byte[] code = track.getCode(targetAddress);
|
||||
if (code.length > 0) {
|
||||
ProgramInvoke programInvoke =
|
||||
programInvokeFactory.createProgramInvoke(tx, currentBlock, cacheTrack, blockStore);
|
||||
|
||||
this.vm = new VM();
|
||||
this.program = new Program(code, programInvoke);
|
||||
precompiledContract =
|
||||
PrecompiledContracts.getContractForAddress(new DataWord(targetAddress));
|
||||
|
||||
if (precompiledContract != null) {
|
||||
|
||||
long requiredGas = precompiledContract.getGasForData(tx.getData());
|
||||
long txGasLimit = toBI(tx.getGasLimit()).longValue();
|
||||
|
||||
if (requiredGas > txGasLimit) {
|
||||
// no refund
|
||||
// no endowment
|
||||
return;
|
||||
} else {
|
||||
|
||||
m_endGas = txGasLimit - requiredGas - basicTxCost;
|
||||
// BigInteger refundCost = toBI(m_endGas * toBI( tx.getGasPrice() ).longValue() );
|
||||
// track.addBalance(tx.getSender(), refundCost);
|
||||
|
||||
// FIXME: save return for vm trace
|
||||
byte[] out = precompiledContract.execute(tx.getData());
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
m_endGas = toBI(tx.getGasLimit()).longValue() - basicTxCost;
|
||||
byte[] code = track.getCode(targetAddress);
|
||||
if (code.length > 0) {
|
||||
ProgramInvoke programInvoke =
|
||||
programInvokeFactory.createProgramInvoke(tx, currentBlock, cacheTrack, blockStore);
|
||||
|
||||
this.vm = new VM();
|
||||
this.program = new Program(code, programInvoke);
|
||||
|
||||
} else {
|
||||
|
||||
m_endGas = toBI(tx.getGasLimit()).longValue() - basicTxCost;
|
||||
}
|
||||
}
|
||||
|
||||
BigInteger endowment = toBI(tx.getValue());
|
||||
|
@ -224,6 +252,8 @@ public class TransactionExecutor {
|
|||
m_endGas -= returnDataGasValue.longValue();
|
||||
initCode = result.getHReturn();
|
||||
cacheTrack.saveCode(tx.getContractAddress(), initCode);
|
||||
} else {
|
||||
result.setHReturn(initCode);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -257,7 +287,7 @@ public class TransactionExecutor {
|
|||
|
||||
|
||||
public void finalization() {
|
||||
if (!readyToExecute) return;
|
||||
if (!readyToExecute ) return;
|
||||
|
||||
cacheTrack.commit();
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.ethereum.crypto;
|
||||
|
||||
import org.ethereum.crypto.cryptohash.Keccak256;
|
||||
import org.ethereum.util.RLP;
|
||||
import org.ethereum.util.Utils;
|
||||
import org.spongycastle.crypto.Digest;
|
||||
|
@ -35,7 +36,9 @@ public class HashUtil {
|
|||
}
|
||||
|
||||
public static byte[] sha3(byte[] input) {
|
||||
return SHA3Helper.sha3(input);
|
||||
Keccak256 digest = new Keccak256();
|
||||
digest.update(input);
|
||||
return digest.digest();
|
||||
}
|
||||
|
||||
public static byte[] sha3(byte[] input, int start, int length) {
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
// $Id: Digest.java 232 2010-06-17 14:19:24Z tp $
|
||||
|
||||
package org.ethereum.crypto.cryptohash;
|
||||
|
||||
/**
|
||||
* <p>This interface documents the API for a hash function. This
|
||||
* interface somewhat mimics the standard {@code
|
||||
* java.security.MessageDigest} class. We do not extend that class in
|
||||
* order to provide compatibility with reduced Java implementations such
|
||||
* as J2ME. Implementing a {@code java.security.Provider} compatible
|
||||
* with Sun's JCA ought to be easy.</p>
|
||||
*
|
||||
* <p>A {@code Digest} object maintains a running state for a hash
|
||||
* function computation. Data is inserted with {@code update()} calls;
|
||||
* the result is obtained from a {@code digest()} method (where some
|
||||
* final data can be inserted as well). When a digest output has been
|
||||
* produced, the objet is automatically resetted, and can be used
|
||||
* immediately for another digest operation. The state of a computation
|
||||
* can be cloned with the {@link #copy} method; this can be used to get
|
||||
* a partial hash result without interrupting the complete
|
||||
* computation.</p>
|
||||
*
|
||||
* <p>{@code Digest} objects are stateful and hence not thread-safe;
|
||||
* however, distinct {@code Digest} objects can be accessed concurrently
|
||||
* without any problem.</p>
|
||||
*
|
||||
* <pre>
|
||||
* ==========================(LICENSE BEGIN)============================
|
||||
*
|
||||
* Copyright (c) 2007-2010 Projet RNRT SAPHIR
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*
|
||||
* ===========================(LICENSE END)=============================
|
||||
* </pre>
|
||||
*
|
||||
* @version $Revision: 232 $
|
||||
* @author Thomas Pornin <thomas.pornin@cryptolog.com>
|
||||
*/
|
||||
|
||||
public interface Digest {
|
||||
|
||||
/**
|
||||
* Insert one more input data byte.
|
||||
*
|
||||
* @param in the input byte
|
||||
*/
|
||||
public void update(byte in);
|
||||
|
||||
/**
|
||||
* Insert some more bytes.
|
||||
*
|
||||
* @param inbuf the data bytes
|
||||
*/
|
||||
public void update(byte[] inbuf);
|
||||
|
||||
/**
|
||||
* Insert some more bytes.
|
||||
*
|
||||
* @param inbuf the data buffer
|
||||
* @param off the data offset in {@code inbuf}
|
||||
* @param len the data length (in bytes)
|
||||
*/
|
||||
public void update(byte[] inbuf, int off, int len);
|
||||
|
||||
/**
|
||||
* Finalize the current hash computation and return the hash value
|
||||
* in a newly-allocated array. The object is resetted.
|
||||
*
|
||||
* @return the hash output
|
||||
*/
|
||||
public byte[] digest();
|
||||
|
||||
/**
|
||||
* Input some bytes, then finalize the current hash computation
|
||||
* and return the hash value in a newly-allocated array. The object
|
||||
* is resetted.
|
||||
*
|
||||
* @param inbuf the input data
|
||||
* @return the hash output
|
||||
*/
|
||||
public byte[] digest(byte[] inbuf);
|
||||
|
||||
/**
|
||||
* Finalize the current hash computation and store the hash value
|
||||
* in the provided output buffer. The {@code len} parameter
|
||||
* contains the maximum number of bytes that should be written;
|
||||
* no more bytes than the natural hash function output length will
|
||||
* be produced. If {@code len} is smaller than the natural
|
||||
* hash output length, the hash output is truncated to its first
|
||||
* {@code len} bytes. The object is resetted.
|
||||
*
|
||||
* @param outbuf the output buffer
|
||||
* @param off the output offset within {@code outbuf}
|
||||
* @param len the requested hash output length (in bytes)
|
||||
* @return the number of bytes actually written in {@code outbuf}
|
||||
*/
|
||||
public int digest(byte[] outbuf, int off, int len);
|
||||
|
||||
/**
|
||||
* Get the natural hash function output length (in bytes).
|
||||
*
|
||||
* @return the digest output length (in bytes)
|
||||
*/
|
||||
public int getDigestLength();
|
||||
|
||||
/**
|
||||
* Reset the object: this makes it suitable for a new hash
|
||||
* computation. The current computation, if any, is discarded.
|
||||
*/
|
||||
public void reset();
|
||||
|
||||
/**
|
||||
* Clone the current state. The returned object evolves independantly
|
||||
* of this object.
|
||||
*
|
||||
* @return the clone
|
||||
*/
|
||||
public Digest copy();
|
||||
|
||||
/**
|
||||
* <p>Return the "block length" for the hash function. This
|
||||
* value is naturally defined for iterated hash functions
|
||||
* (Merkle-Damgard). It is used in HMAC (that's what the
|
||||
* <a href="http://tools.ietf.org/html/rfc2104">HMAC specification</a>
|
||||
* names the "{@code B}" parameter).</p>
|
||||
*
|
||||
* <p>If the function is "block-less" then this function may
|
||||
* return {@code -n} where {@code n} is an integer such that the
|
||||
* block length for HMAC ("{@code B}") will be inferred from the
|
||||
* key length, by selecting the smallest multiple of {@code n}
|
||||
* which is no smaller than the key length. For instance, for
|
||||
* the Fugue-xxx hash functions, this function returns -4: the
|
||||
* virtual block length B is the HMAC key length, rounded up to
|
||||
* the next multiple of 4.</p>
|
||||
*
|
||||
* @return the internal block length (in bytes), or {@code -n}
|
||||
*/
|
||||
public int getBlockLength();
|
||||
|
||||
/**
|
||||
* <p>Get the display name for this function (e.g. {@code "SHA-1"}
|
||||
* for SHA-1).</p>
|
||||
*
|
||||
* @see Object
|
||||
*/
|
||||
public String toString();
|
||||
}
|
|
@ -0,0 +1,266 @@
|
|||
// $Id: DigestEngine.java 229 2010-06-16 20:22:27Z tp $
|
||||
|
||||
package org.ethereum.crypto.cryptohash;
|
||||
|
||||
/**
|
||||
* <p>This class is a template which can be used to implement hash
|
||||
* functions. It takes care of some of the API, and also provides an
|
||||
* internal data buffer whose length is equal to the hash function
|
||||
* internal block length.</p>
|
||||
*
|
||||
* <p>Classes which use this template MUST provide a working {@link
|
||||
* #getBlockLength} method even before initialization (alternatively,
|
||||
* they may define a custom {@link #getInternalBlockLength} which does
|
||||
* not call {@link #getBlockLength}. The {@link #getDigestLength} should
|
||||
* also be operational from the beginning, but it is acceptable that it
|
||||
* returns 0 while the {@link #doInit} method has not been called
|
||||
* yet.</p>
|
||||
*
|
||||
* <pre>
|
||||
* ==========================(LICENSE BEGIN)============================
|
||||
*
|
||||
* Copyright (c) 2007-2010 Projet RNRT SAPHIR
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*
|
||||
* ===========================(LICENSE END)=============================
|
||||
* </pre>
|
||||
*
|
||||
* @version $Revision: 229 $
|
||||
* @author Thomas Pornin <thomas.pornin@cryptolog.com>
|
||||
*/
|
||||
|
||||
public abstract class DigestEngine implements Digest {
|
||||
|
||||
/**
|
||||
* Reset the hash algorithm state.
|
||||
*/
|
||||
protected abstract void engineReset();
|
||||
|
||||
/**
|
||||
* Process one block of data.
|
||||
*
|
||||
* @param data the data block
|
||||
*/
|
||||
protected abstract void processBlock(byte[] data);
|
||||
|
||||
/**
|
||||
* Perform the final padding and store the result in the
|
||||
* provided buffer. This method shall call {@link #flush}
|
||||
* and then {@link #update} with the appropriate padding
|
||||
* data in order to get the full input data.
|
||||
*
|
||||
* @param buf the output buffer
|
||||
* @param off the output offset
|
||||
*/
|
||||
protected abstract void doPadding(byte[] buf, int off);
|
||||
|
||||
/**
|
||||
* This function is called at object creation time; the
|
||||
* implementation should use it to perform initialization tasks.
|
||||
* After this method is called, the implementation should be ready
|
||||
* to process data or meaningfully honour calls such as
|
||||
* {@link #getDigestLength}
|
||||
*/
|
||||
protected abstract void doInit();
|
||||
|
||||
private int digestLen, blockLen, inputLen;
|
||||
private byte[] inputBuf, outputBuf;
|
||||
private long blockCount;
|
||||
|
||||
/**
|
||||
* Instantiate the engine.
|
||||
*/
|
||||
public DigestEngine()
|
||||
{
|
||||
doInit();
|
||||
digestLen = getDigestLength();
|
||||
blockLen = getInternalBlockLength();
|
||||
inputBuf = new byte[blockLen];
|
||||
outputBuf = new byte[digestLen];
|
||||
inputLen = 0;
|
||||
blockCount = 0;
|
||||
}
|
||||
|
||||
private void adjustDigestLen()
|
||||
{
|
||||
if (digestLen == 0) {
|
||||
digestLen = getDigestLength();
|
||||
outputBuf = new byte[digestLen];
|
||||
}
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public byte[] digest()
|
||||
{
|
||||
adjustDigestLen();
|
||||
byte[] result = new byte[digestLen];
|
||||
digest(result, 0, digestLen);
|
||||
return result;
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public byte[] digest(byte[] input)
|
||||
{
|
||||
update(input, 0, input.length);
|
||||
return digest();
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public int digest(byte[] buf, int offset, int len)
|
||||
{
|
||||
adjustDigestLen();
|
||||
if (len >= digestLen) {
|
||||
doPadding(buf, offset);
|
||||
reset();
|
||||
return digestLen;
|
||||
} else {
|
||||
doPadding(outputBuf, 0);
|
||||
System.arraycopy(outputBuf, 0, buf, offset, len);
|
||||
reset();
|
||||
return len;
|
||||
}
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public void reset()
|
||||
{
|
||||
engineReset();
|
||||
inputLen = 0;
|
||||
blockCount = 0;
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public void update(byte input)
|
||||
{
|
||||
inputBuf[inputLen ++] = (byte)input;
|
||||
if (inputLen == blockLen) {
|
||||
processBlock(inputBuf);
|
||||
blockCount ++;
|
||||
inputLen = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public void update(byte[] input)
|
||||
{
|
||||
update(input, 0, input.length);
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public void update(byte[] input, int offset, int len)
|
||||
{
|
||||
while (len > 0) {
|
||||
int copyLen = blockLen - inputLen;
|
||||
if (copyLen > len)
|
||||
copyLen = len;
|
||||
System.arraycopy(input, offset, inputBuf, inputLen,
|
||||
copyLen);
|
||||
offset += copyLen;
|
||||
inputLen += copyLen;
|
||||
len -= copyLen;
|
||||
if (inputLen == blockLen) {
|
||||
processBlock(inputBuf);
|
||||
blockCount ++;
|
||||
inputLen = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the internal block length. This is the length (in
|
||||
* bytes) of the array which will be passed as parameter to
|
||||
* {@link #processBlock}. The default implementation of this
|
||||
* method calls {@link #getBlockLength} and returns the same
|
||||
* value. Overriding this method is useful when the advertised
|
||||
* block length (which is used, for instance, by HMAC) is
|
||||
* suboptimal with regards to internal buffering needs.
|
||||
*
|
||||
* @return the internal block length (in bytes)
|
||||
*/
|
||||
protected int getInternalBlockLength()
|
||||
{
|
||||
return getBlockLength();
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush internal buffers, so that less than a block of data
|
||||
* may at most be upheld.
|
||||
*
|
||||
* @return the number of bytes still unprocessed after the flush
|
||||
*/
|
||||
protected final int flush()
|
||||
{
|
||||
return inputLen;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a reference to an internal buffer with the same size
|
||||
* than a block. The contents of that buffer are defined only
|
||||
* immediately after a call to {@link #flush()}: if
|
||||
* {@link #flush()} return the value {@code n}, then the
|
||||
* first {@code n} bytes of the array returned by this method
|
||||
* are the {@code n} bytes of input data which are still
|
||||
* unprocessed. The values of the remaining bytes are
|
||||
* undefined and may be altered at will.
|
||||
*
|
||||
* @return a block-sized internal buffer
|
||||
*/
|
||||
protected final byte[] getBlockBuffer()
|
||||
{
|
||||
return inputBuf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the "block count": this is the number of times the
|
||||
* {@link #processBlock} method has been invoked for the
|
||||
* current hash operation. That counter is incremented
|
||||
* <em>after</em> the call to {@link #processBlock}.
|
||||
*
|
||||
* @return the block count
|
||||
*/
|
||||
protected long getBlockCount()
|
||||
{
|
||||
return blockCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function copies the internal buffering state to some
|
||||
* other instance of a class extending {@code DigestEngine}.
|
||||
* It returns a reference to the copy. This method is intended
|
||||
* to be called by the implementation of the {@link #copy}
|
||||
* method.
|
||||
*
|
||||
* @param dest the copy
|
||||
* @return the value {@code dest}
|
||||
*/
|
||||
protected Digest copyState(DigestEngine dest)
|
||||
{
|
||||
dest.inputLen = inputLen;
|
||||
dest.blockCount = blockCount;
|
||||
System.arraycopy(inputBuf, 0, dest.inputBuf, 0,
|
||||
inputBuf.length);
|
||||
adjustDigestLen();
|
||||
dest.adjustDigestLen();
|
||||
System.arraycopy(outputBuf, 0, dest.outputBuf, 0,
|
||||
outputBuf.length);
|
||||
return dest;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
// $Id: Keccak256.java 189 2010-05-14 21:21:46Z tp $
|
||||
|
||||
package org.ethereum.crypto.cryptohash;
|
||||
|
||||
/**
|
||||
* <p>This class implements the Keccak-256 digest algorithm under the
|
||||
* {@link org.ethereum.crypto.cryptohash.Digest} API.</p>
|
||||
*
|
||||
* <pre>
|
||||
* ==========================(LICENSE BEGIN)============================
|
||||
*
|
||||
* Copyright (c) 2007-2010 Projet RNRT SAPHIR
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*
|
||||
* ===========================(LICENSE END)=============================
|
||||
* </pre>
|
||||
*
|
||||
* @version $Revision: 189 $
|
||||
* @author Thomas Pornin <thomas.pornin@cryptolog.com>
|
||||
*/
|
||||
|
||||
public class Keccak256 extends KeccakCore {
|
||||
|
||||
/**
|
||||
* Create the engine.
|
||||
*/
|
||||
public Keccak256()
|
||||
{
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public Digest copy()
|
||||
{
|
||||
return copyState(new Keccak256());
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public int getDigestLength()
|
||||
{
|
||||
return 32;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,585 @@
|
|||
// $Id: KeccakCore.java 258 2011-07-15 22:16:50Z tp $
|
||||
|
||||
package org.ethereum.crypto.cryptohash;
|
||||
|
||||
/**
|
||||
* This class implements the core operations for the Keccak digest
|
||||
* algorithm.
|
||||
*
|
||||
* <pre>
|
||||
* ==========================(LICENSE BEGIN)============================
|
||||
*
|
||||
* Copyright (c) 2007-2010 Projet RNRT SAPHIR
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining
|
||||
* a copy of this software and associated documentation files (the
|
||||
* "Software"), to deal in the Software without restriction, including
|
||||
* without limitation the rights to use, copy, modify, merge, publish,
|
||||
* distribute, sublicense, and/or sell copies of the Software, and to
|
||||
* permit persons to whom the Software is furnished to do so, subject to
|
||||
* the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be
|
||||
* included in all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
*
|
||||
* ===========================(LICENSE END)=============================
|
||||
* </pre>
|
||||
*
|
||||
* @version $Revision: 258 $
|
||||
* @author Thomas Pornin <thomas.pornin@cryptolog.com>
|
||||
*/
|
||||
|
||||
abstract class KeccakCore extends DigestEngine {
|
||||
|
||||
KeccakCore()
|
||||
{
|
||||
}
|
||||
|
||||
private long[] A;
|
||||
private byte[] tmpOut;
|
||||
|
||||
private static final long[] RC = {
|
||||
0x0000000000000001L, 0x0000000000008082L,
|
||||
0x800000000000808AL, 0x8000000080008000L,
|
||||
0x000000000000808BL, 0x0000000080000001L,
|
||||
0x8000000080008081L, 0x8000000000008009L,
|
||||
0x000000000000008AL, 0x0000000000000088L,
|
||||
0x0000000080008009L, 0x000000008000000AL,
|
||||
0x000000008000808BL, 0x800000000000008BL,
|
||||
0x8000000000008089L, 0x8000000000008003L,
|
||||
0x8000000000008002L, 0x8000000000000080L,
|
||||
0x000000000000800AL, 0x800000008000000AL,
|
||||
0x8000000080008081L, 0x8000000000008080L,
|
||||
0x0000000080000001L, 0x8000000080008008L
|
||||
};
|
||||
|
||||
/**
|
||||
* Encode the 64-bit word {@code val} into the array
|
||||
* {@code buf} at offset {@code off}, in little-endian
|
||||
* convention (least significant byte first).
|
||||
*
|
||||
* @param val the value to encode
|
||||
* @param buf the destination buffer
|
||||
* @param off the destination offset
|
||||
*/
|
||||
private static final void encodeLELong(long val, byte[] buf, int off)
|
||||
{
|
||||
buf[off + 0] = (byte)val;
|
||||
buf[off + 1] = (byte)(val >>> 8);
|
||||
buf[off + 2] = (byte)(val >>> 16);
|
||||
buf[off + 3] = (byte)(val >>> 24);
|
||||
buf[off + 4] = (byte)(val >>> 32);
|
||||
buf[off + 5] = (byte)(val >>> 40);
|
||||
buf[off + 6] = (byte)(val >>> 48);
|
||||
buf[off + 7] = (byte)(val >>> 56);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a 64-bit little-endian word from the array {@code buf}
|
||||
* at offset {@code off}.
|
||||
*
|
||||
* @param buf the source buffer
|
||||
* @param off the source offset
|
||||
* @return the decoded value
|
||||
*/
|
||||
private static final long decodeLELong(byte[] buf, int off)
|
||||
{
|
||||
return (buf[off + 0] & 0xFFL)
|
||||
| ((buf[off + 1] & 0xFFL) << 8)
|
||||
| ((buf[off + 2] & 0xFFL) << 16)
|
||||
| ((buf[off + 3] & 0xFFL) << 24)
|
||||
| ((buf[off + 4] & 0xFFL) << 32)
|
||||
| ((buf[off + 5] & 0xFFL) << 40)
|
||||
| ((buf[off + 6] & 0xFFL) << 48)
|
||||
| ((buf[off + 7] & 0xFFL) << 56);
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.DigestEngine */
|
||||
protected void engineReset()
|
||||
{
|
||||
doReset();
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.DigestEngine */
|
||||
protected void processBlock(byte[] data)
|
||||
{
|
||||
/* Input block */
|
||||
for (int i = 0; i < data.length; i += 8)
|
||||
A[i >>> 3] ^= decodeLELong(data, i);
|
||||
|
||||
long t0, t1, t2, t3, t4;
|
||||
long tt0, tt1, tt2, tt3, tt4;
|
||||
long t, kt;
|
||||
long c0, c1, c2, c3, c4, bnn;
|
||||
|
||||
/*
|
||||
* Unrolling four rounds kills performance big time
|
||||
* on Intel x86 Core2, in both 32-bit and 64-bit modes
|
||||
* (less than 1 MB/s instead of 55 MB/s on x86-64).
|
||||
* Unrolling two rounds appears to be fine.
|
||||
*/
|
||||
for (int j = 0; j < 24; j += 2) {
|
||||
|
||||
tt0 = A[ 1] ^ A[ 6];
|
||||
tt1 = A[11] ^ A[16];
|
||||
tt0 ^= A[21] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[ 4] ^ A[ 9];
|
||||
tt3 = A[14] ^ A[19];
|
||||
tt0 ^= A[24];
|
||||
tt2 ^= tt3;
|
||||
t0 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[ 2] ^ A[ 7];
|
||||
tt1 = A[12] ^ A[17];
|
||||
tt0 ^= A[22] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[ 0] ^ A[ 5];
|
||||
tt3 = A[10] ^ A[15];
|
||||
tt0 ^= A[20];
|
||||
tt2 ^= tt3;
|
||||
t1 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[ 3] ^ A[ 8];
|
||||
tt1 = A[13] ^ A[18];
|
||||
tt0 ^= A[23] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[ 1] ^ A[ 6];
|
||||
tt3 = A[11] ^ A[16];
|
||||
tt0 ^= A[21];
|
||||
tt2 ^= tt3;
|
||||
t2 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[ 4] ^ A[ 9];
|
||||
tt1 = A[14] ^ A[19];
|
||||
tt0 ^= A[24] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[ 2] ^ A[ 7];
|
||||
tt3 = A[12] ^ A[17];
|
||||
tt0 ^= A[22];
|
||||
tt2 ^= tt3;
|
||||
t3 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[ 0] ^ A[ 5];
|
||||
tt1 = A[10] ^ A[15];
|
||||
tt0 ^= A[20] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[ 3] ^ A[ 8];
|
||||
tt3 = A[13] ^ A[18];
|
||||
tt0 ^= A[23];
|
||||
tt2 ^= tt3;
|
||||
t4 = tt0 ^ tt2;
|
||||
|
||||
A[ 0] = A[ 0] ^ t0;
|
||||
A[ 5] = A[ 5] ^ t0;
|
||||
A[10] = A[10] ^ t0;
|
||||
A[15] = A[15] ^ t0;
|
||||
A[20] = A[20] ^ t0;
|
||||
A[ 1] = A[ 1] ^ t1;
|
||||
A[ 6] = A[ 6] ^ t1;
|
||||
A[11] = A[11] ^ t1;
|
||||
A[16] = A[16] ^ t1;
|
||||
A[21] = A[21] ^ t1;
|
||||
A[ 2] = A[ 2] ^ t2;
|
||||
A[ 7] = A[ 7] ^ t2;
|
||||
A[12] = A[12] ^ t2;
|
||||
A[17] = A[17] ^ t2;
|
||||
A[22] = A[22] ^ t2;
|
||||
A[ 3] = A[ 3] ^ t3;
|
||||
A[ 8] = A[ 8] ^ t3;
|
||||
A[13] = A[13] ^ t3;
|
||||
A[18] = A[18] ^ t3;
|
||||
A[23] = A[23] ^ t3;
|
||||
A[ 4] = A[ 4] ^ t4;
|
||||
A[ 9] = A[ 9] ^ t4;
|
||||
A[14] = A[14] ^ t4;
|
||||
A[19] = A[19] ^ t4;
|
||||
A[24] = A[24] ^ t4;
|
||||
A[ 5] = (A[ 5] << 36) | (A[ 5] >>> (64 - 36));
|
||||
A[10] = (A[10] << 3) | (A[10] >>> (64 - 3));
|
||||
A[15] = (A[15] << 41) | (A[15] >>> (64 - 41));
|
||||
A[20] = (A[20] << 18) | (A[20] >>> (64 - 18));
|
||||
A[ 1] = (A[ 1] << 1) | (A[ 1] >>> (64 - 1));
|
||||
A[ 6] = (A[ 6] << 44) | (A[ 6] >>> (64 - 44));
|
||||
A[11] = (A[11] << 10) | (A[11] >>> (64 - 10));
|
||||
A[16] = (A[16] << 45) | (A[16] >>> (64 - 45));
|
||||
A[21] = (A[21] << 2) | (A[21] >>> (64 - 2));
|
||||
A[ 2] = (A[ 2] << 62) | (A[ 2] >>> (64 - 62));
|
||||
A[ 7] = (A[ 7] << 6) | (A[ 7] >>> (64 - 6));
|
||||
A[12] = (A[12] << 43) | (A[12] >>> (64 - 43));
|
||||
A[17] = (A[17] << 15) | (A[17] >>> (64 - 15));
|
||||
A[22] = (A[22] << 61) | (A[22] >>> (64 - 61));
|
||||
A[ 3] = (A[ 3] << 28) | (A[ 3] >>> (64 - 28));
|
||||
A[ 8] = (A[ 8] << 55) | (A[ 8] >>> (64 - 55));
|
||||
A[13] = (A[13] << 25) | (A[13] >>> (64 - 25));
|
||||
A[18] = (A[18] << 21) | (A[18] >>> (64 - 21));
|
||||
A[23] = (A[23] << 56) | (A[23] >>> (64 - 56));
|
||||
A[ 4] = (A[ 4] << 27) | (A[ 4] >>> (64 - 27));
|
||||
A[ 9] = (A[ 9] << 20) | (A[ 9] >>> (64 - 20));
|
||||
A[14] = (A[14] << 39) | (A[14] >>> (64 - 39));
|
||||
A[19] = (A[19] << 8) | (A[19] >>> (64 - 8));
|
||||
A[24] = (A[24] << 14) | (A[24] >>> (64 - 14));
|
||||
bnn = ~A[12];
|
||||
kt = A[ 6] | A[12];
|
||||
c0 = A[ 0] ^ kt;
|
||||
kt = bnn | A[18];
|
||||
c1 = A[ 6] ^ kt;
|
||||
kt = A[18] & A[24];
|
||||
c2 = A[12] ^ kt;
|
||||
kt = A[24] | A[ 0];
|
||||
c3 = A[18] ^ kt;
|
||||
kt = A[ 0] & A[ 6];
|
||||
c4 = A[24] ^ kt;
|
||||
A[ 0] = c0;
|
||||
A[ 6] = c1;
|
||||
A[12] = c2;
|
||||
A[18] = c3;
|
||||
A[24] = c4;
|
||||
bnn = ~A[22];
|
||||
kt = A[ 9] | A[10];
|
||||
c0 = A[ 3] ^ kt;
|
||||
kt = A[10] & A[16];
|
||||
c1 = A[ 9] ^ kt;
|
||||
kt = A[16] | bnn;
|
||||
c2 = A[10] ^ kt;
|
||||
kt = A[22] | A[ 3];
|
||||
c3 = A[16] ^ kt;
|
||||
kt = A[ 3] & A[ 9];
|
||||
c4 = A[22] ^ kt;
|
||||
A[ 3] = c0;
|
||||
A[ 9] = c1;
|
||||
A[10] = c2;
|
||||
A[16] = c3;
|
||||
A[22] = c4;
|
||||
bnn = ~A[19];
|
||||
kt = A[ 7] | A[13];
|
||||
c0 = A[ 1] ^ kt;
|
||||
kt = A[13] & A[19];
|
||||
c1 = A[ 7] ^ kt;
|
||||
kt = bnn & A[20];
|
||||
c2 = A[13] ^ kt;
|
||||
kt = A[20] | A[ 1];
|
||||
c3 = bnn ^ kt;
|
||||
kt = A[ 1] & A[ 7];
|
||||
c4 = A[20] ^ kt;
|
||||
A[ 1] = c0;
|
||||
A[ 7] = c1;
|
||||
A[13] = c2;
|
||||
A[19] = c3;
|
||||
A[20] = c4;
|
||||
bnn = ~A[17];
|
||||
kt = A[ 5] & A[11];
|
||||
c0 = A[ 4] ^ kt;
|
||||
kt = A[11] | A[17];
|
||||
c1 = A[ 5] ^ kt;
|
||||
kt = bnn | A[23];
|
||||
c2 = A[11] ^ kt;
|
||||
kt = A[23] & A[ 4];
|
||||
c3 = bnn ^ kt;
|
||||
kt = A[ 4] | A[ 5];
|
||||
c4 = A[23] ^ kt;
|
||||
A[ 4] = c0;
|
||||
A[ 5] = c1;
|
||||
A[11] = c2;
|
||||
A[17] = c3;
|
||||
A[23] = c4;
|
||||
bnn = ~A[ 8];
|
||||
kt = bnn & A[14];
|
||||
c0 = A[ 2] ^ kt;
|
||||
kt = A[14] | A[15];
|
||||
c1 = bnn ^ kt;
|
||||
kt = A[15] & A[21];
|
||||
c2 = A[14] ^ kt;
|
||||
kt = A[21] | A[ 2];
|
||||
c3 = A[15] ^ kt;
|
||||
kt = A[ 2] & A[ 8];
|
||||
c4 = A[21] ^ kt;
|
||||
A[ 2] = c0;
|
||||
A[ 8] = c1;
|
||||
A[14] = c2;
|
||||
A[15] = c3;
|
||||
A[21] = c4;
|
||||
A[ 0] = A[ 0] ^ RC[j + 0];
|
||||
|
||||
tt0 = A[ 6] ^ A[ 9];
|
||||
tt1 = A[ 7] ^ A[ 5];
|
||||
tt0 ^= A[ 8] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[24] ^ A[22];
|
||||
tt3 = A[20] ^ A[23];
|
||||
tt0 ^= A[21];
|
||||
tt2 ^= tt3;
|
||||
t0 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[12] ^ A[10];
|
||||
tt1 = A[13] ^ A[11];
|
||||
tt0 ^= A[14] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[ 0] ^ A[ 3];
|
||||
tt3 = A[ 1] ^ A[ 4];
|
||||
tt0 ^= A[ 2];
|
||||
tt2 ^= tt3;
|
||||
t1 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[18] ^ A[16];
|
||||
tt1 = A[19] ^ A[17];
|
||||
tt0 ^= A[15] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[ 6] ^ A[ 9];
|
||||
tt3 = A[ 7] ^ A[ 5];
|
||||
tt0 ^= A[ 8];
|
||||
tt2 ^= tt3;
|
||||
t2 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[24] ^ A[22];
|
||||
tt1 = A[20] ^ A[23];
|
||||
tt0 ^= A[21] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[12] ^ A[10];
|
||||
tt3 = A[13] ^ A[11];
|
||||
tt0 ^= A[14];
|
||||
tt2 ^= tt3;
|
||||
t3 = tt0 ^ tt2;
|
||||
|
||||
tt0 = A[ 0] ^ A[ 3];
|
||||
tt1 = A[ 1] ^ A[ 4];
|
||||
tt0 ^= A[ 2] ^ tt1;
|
||||
tt0 = (tt0 << 1) | (tt0 >>> 63);
|
||||
tt2 = A[18] ^ A[16];
|
||||
tt3 = A[19] ^ A[17];
|
||||
tt0 ^= A[15];
|
||||
tt2 ^= tt3;
|
||||
t4 = tt0 ^ tt2;
|
||||
|
||||
A[ 0] = A[ 0] ^ t0;
|
||||
A[ 3] = A[ 3] ^ t0;
|
||||
A[ 1] = A[ 1] ^ t0;
|
||||
A[ 4] = A[ 4] ^ t0;
|
||||
A[ 2] = A[ 2] ^ t0;
|
||||
A[ 6] = A[ 6] ^ t1;
|
||||
A[ 9] = A[ 9] ^ t1;
|
||||
A[ 7] = A[ 7] ^ t1;
|
||||
A[ 5] = A[ 5] ^ t1;
|
||||
A[ 8] = A[ 8] ^ t1;
|
||||
A[12] = A[12] ^ t2;
|
||||
A[10] = A[10] ^ t2;
|
||||
A[13] = A[13] ^ t2;
|
||||
A[11] = A[11] ^ t2;
|
||||
A[14] = A[14] ^ t2;
|
||||
A[18] = A[18] ^ t3;
|
||||
A[16] = A[16] ^ t3;
|
||||
A[19] = A[19] ^ t3;
|
||||
A[17] = A[17] ^ t3;
|
||||
A[15] = A[15] ^ t3;
|
||||
A[24] = A[24] ^ t4;
|
||||
A[22] = A[22] ^ t4;
|
||||
A[20] = A[20] ^ t4;
|
||||
A[23] = A[23] ^ t4;
|
||||
A[21] = A[21] ^ t4;
|
||||
A[ 3] = (A[ 3] << 36) | (A[ 3] >>> (64 - 36));
|
||||
A[ 1] = (A[ 1] << 3) | (A[ 1] >>> (64 - 3));
|
||||
A[ 4] = (A[ 4] << 41) | (A[ 4] >>> (64 - 41));
|
||||
A[ 2] = (A[ 2] << 18) | (A[ 2] >>> (64 - 18));
|
||||
A[ 6] = (A[ 6] << 1) | (A[ 6] >>> (64 - 1));
|
||||
A[ 9] = (A[ 9] << 44) | (A[ 9] >>> (64 - 44));
|
||||
A[ 7] = (A[ 7] << 10) | (A[ 7] >>> (64 - 10));
|
||||
A[ 5] = (A[ 5] << 45) | (A[ 5] >>> (64 - 45));
|
||||
A[ 8] = (A[ 8] << 2) | (A[ 8] >>> (64 - 2));
|
||||
A[12] = (A[12] << 62) | (A[12] >>> (64 - 62));
|
||||
A[10] = (A[10] << 6) | (A[10] >>> (64 - 6));
|
||||
A[13] = (A[13] << 43) | (A[13] >>> (64 - 43));
|
||||
A[11] = (A[11] << 15) | (A[11] >>> (64 - 15));
|
||||
A[14] = (A[14] << 61) | (A[14] >>> (64 - 61));
|
||||
A[18] = (A[18] << 28) | (A[18] >>> (64 - 28));
|
||||
A[16] = (A[16] << 55) | (A[16] >>> (64 - 55));
|
||||
A[19] = (A[19] << 25) | (A[19] >>> (64 - 25));
|
||||
A[17] = (A[17] << 21) | (A[17] >>> (64 - 21));
|
||||
A[15] = (A[15] << 56) | (A[15] >>> (64 - 56));
|
||||
A[24] = (A[24] << 27) | (A[24] >>> (64 - 27));
|
||||
A[22] = (A[22] << 20) | (A[22] >>> (64 - 20));
|
||||
A[20] = (A[20] << 39) | (A[20] >>> (64 - 39));
|
||||
A[23] = (A[23] << 8) | (A[23] >>> (64 - 8));
|
||||
A[21] = (A[21] << 14) | (A[21] >>> (64 - 14));
|
||||
bnn = ~A[13];
|
||||
kt = A[ 9] | A[13];
|
||||
c0 = A[ 0] ^ kt;
|
||||
kt = bnn | A[17];
|
||||
c1 = A[ 9] ^ kt;
|
||||
kt = A[17] & A[21];
|
||||
c2 = A[13] ^ kt;
|
||||
kt = A[21] | A[ 0];
|
||||
c3 = A[17] ^ kt;
|
||||
kt = A[ 0] & A[ 9];
|
||||
c4 = A[21] ^ kt;
|
||||
A[ 0] = c0;
|
||||
A[ 9] = c1;
|
||||
A[13] = c2;
|
||||
A[17] = c3;
|
||||
A[21] = c4;
|
||||
bnn = ~A[14];
|
||||
kt = A[22] | A[ 1];
|
||||
c0 = A[18] ^ kt;
|
||||
kt = A[ 1] & A[ 5];
|
||||
c1 = A[22] ^ kt;
|
||||
kt = A[ 5] | bnn;
|
||||
c2 = A[ 1] ^ kt;
|
||||
kt = A[14] | A[18];
|
||||
c3 = A[ 5] ^ kt;
|
||||
kt = A[18] & A[22];
|
||||
c4 = A[14] ^ kt;
|
||||
A[18] = c0;
|
||||
A[22] = c1;
|
||||
A[ 1] = c2;
|
||||
A[ 5] = c3;
|
||||
A[14] = c4;
|
||||
bnn = ~A[23];
|
||||
kt = A[10] | A[19];
|
||||
c0 = A[ 6] ^ kt;
|
||||
kt = A[19] & A[23];
|
||||
c1 = A[10] ^ kt;
|
||||
kt = bnn & A[ 2];
|
||||
c2 = A[19] ^ kt;
|
||||
kt = A[ 2] | A[ 6];
|
||||
c3 = bnn ^ kt;
|
||||
kt = A[ 6] & A[10];
|
||||
c4 = A[ 2] ^ kt;
|
||||
A[ 6] = c0;
|
||||
A[10] = c1;
|
||||
A[19] = c2;
|
||||
A[23] = c3;
|
||||
A[ 2] = c4;
|
||||
bnn = ~A[11];
|
||||
kt = A[ 3] & A[ 7];
|
||||
c0 = A[24] ^ kt;
|
||||
kt = A[ 7] | A[11];
|
||||
c1 = A[ 3] ^ kt;
|
||||
kt = bnn | A[15];
|
||||
c2 = A[ 7] ^ kt;
|
||||
kt = A[15] & A[24];
|
||||
c3 = bnn ^ kt;
|
||||
kt = A[24] | A[ 3];
|
||||
c4 = A[15] ^ kt;
|
||||
A[24] = c0;
|
||||
A[ 3] = c1;
|
||||
A[ 7] = c2;
|
||||
A[11] = c3;
|
||||
A[15] = c4;
|
||||
bnn = ~A[16];
|
||||
kt = bnn & A[20];
|
||||
c0 = A[12] ^ kt;
|
||||
kt = A[20] | A[ 4];
|
||||
c1 = bnn ^ kt;
|
||||
kt = A[ 4] & A[ 8];
|
||||
c2 = A[20] ^ kt;
|
||||
kt = A[ 8] | A[12];
|
||||
c3 = A[ 4] ^ kt;
|
||||
kt = A[12] & A[16];
|
||||
c4 = A[ 8] ^ kt;
|
||||
A[12] = c0;
|
||||
A[16] = c1;
|
||||
A[20] = c2;
|
||||
A[ 4] = c3;
|
||||
A[ 8] = c4;
|
||||
A[ 0] = A[ 0] ^ RC[j + 1];
|
||||
t = A[ 5];
|
||||
A[ 5] = A[18];
|
||||
A[18] = A[11];
|
||||
A[11] = A[10];
|
||||
A[10] = A[ 6];
|
||||
A[ 6] = A[22];
|
||||
A[22] = A[20];
|
||||
A[20] = A[12];
|
||||
A[12] = A[19];
|
||||
A[19] = A[15];
|
||||
A[15] = A[24];
|
||||
A[24] = A[ 8];
|
||||
A[ 8] = t;
|
||||
t = A[ 1];
|
||||
A[ 1] = A[ 9];
|
||||
A[ 9] = A[14];
|
||||
A[14] = A[ 2];
|
||||
A[ 2] = A[13];
|
||||
A[13] = A[23];
|
||||
A[23] = A[ 4];
|
||||
A[ 4] = A[21];
|
||||
A[21] = A[16];
|
||||
A[16] = A[ 3];
|
||||
A[ 3] = A[17];
|
||||
A[17] = A[ 7];
|
||||
A[ 7] = t;
|
||||
}
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.DigestEngine */
|
||||
protected void doPadding(byte[] out, int off)
|
||||
{
|
||||
int ptr = flush();
|
||||
byte[] buf = getBlockBuffer();
|
||||
if ((ptr + 1) == buf.length) {
|
||||
buf[ptr] = (byte)0x81;
|
||||
} else {
|
||||
buf[ptr] = (byte)0x01;
|
||||
for (int i = ptr + 1; i < (buf.length - 1); i ++)
|
||||
buf[i] = 0;
|
||||
buf[buf.length - 1] = (byte)0x80;
|
||||
}
|
||||
processBlock(buf);
|
||||
A[ 1] = ~A[ 1];
|
||||
A[ 2] = ~A[ 2];
|
||||
A[ 8] = ~A[ 8];
|
||||
A[12] = ~A[12];
|
||||
A[17] = ~A[17];
|
||||
A[20] = ~A[20];
|
||||
int dlen = getDigestLength();
|
||||
for (int i = 0; i < dlen; i += 8)
|
||||
encodeLELong(A[i >>> 3], tmpOut, i);
|
||||
System.arraycopy(tmpOut, 0, out, off, dlen);
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.DigestEngine */
|
||||
protected void doInit()
|
||||
{
|
||||
A = new long[25];
|
||||
tmpOut = new byte[(getDigestLength() + 7) & ~7];
|
||||
doReset();
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public int getBlockLength()
|
||||
{
|
||||
return 200 - 2 * getDigestLength();
|
||||
}
|
||||
|
||||
private final void doReset()
|
||||
{
|
||||
for (int i = 0; i < 25; i ++)
|
||||
A[i] = 0;
|
||||
A[ 1] = 0xFFFFFFFFFFFFFFFFL;
|
||||
A[ 2] = 0xFFFFFFFFFFFFFFFFL;
|
||||
A[ 8] = 0xFFFFFFFFFFFFFFFFL;
|
||||
A[12] = 0xFFFFFFFFFFFFFFFFL;
|
||||
A[17] = 0xFFFFFFFFFFFFFFFFL;
|
||||
A[20] = 0xFFFFFFFFFFFFFFFFL;
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.DigestEngine */
|
||||
protected Digest copyState(KeccakCore dst)
|
||||
{
|
||||
System.arraycopy(A, 0, dst.A, 0, 25);
|
||||
return super.copyState(dst);
|
||||
}
|
||||
|
||||
/** @see org.ethereum.crypto.cryptohash.Digest */
|
||||
public String toString()
|
||||
{
|
||||
return "Keccak-" + (getDigestLength() << 3);
|
||||
}
|
||||
}
|
|
@ -3,6 +3,7 @@ package org.ethereum.db;
|
|||
import org.ethereum.core.Block;
|
||||
import org.ethereum.core.TransactionReceipt;
|
||||
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
|
@ -39,4 +40,10 @@ public interface BlockStore {
|
|||
void reset();
|
||||
|
||||
TransactionReceipt getTransactionReceiptByHash(byte[] hash);
|
||||
}
|
||||
|
||||
public void flush();
|
||||
public void load();
|
||||
//public void setSessionFactory(SessionFactory sessionFactory);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package org.ethereum.db;
|
|||
import org.ethereum.core.Block;
|
||||
import org.ethereum.core.TransactionReceipt;
|
||||
import org.ethereum.crypto.HashUtil;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
|
@ -75,4 +76,17 @@ public class BlockStoreDummy implements BlockStore {
|
|||
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void load() {
|
||||
}
|
||||
|
||||
//@Override
|
||||
public void setSessionFactory(SessionFactory sessionFactory) {
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,11 +3,9 @@ package org.ethereum.db;
|
|||
import org.ethereum.core.Block;
|
||||
import org.ethereum.core.TransactionReceipt;
|
||||
import org.ethereum.util.ByteUtil;
|
||||
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
|
@ -173,4 +171,19 @@ public class BlockStoreImpl implements BlockStore {
|
|||
return new TransactionReceipt(vo.rlp);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void load() {
|
||||
}
|
||||
|
||||
/*
|
||||
@Override
|
||||
public void setSessionFactory(SessionFactory sessionFactory) {
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
||||
|
|
|
@ -2,10 +2,7 @@ package org.ethereum.db;
|
|||
|
||||
import java.math.BigInteger;
|
||||
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Lob;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.*;
|
||||
|
||||
/**
|
||||
* @author Roman Mandeleil
|
||||
|
@ -21,6 +18,7 @@ public class BlockVO {
|
|||
Long number;
|
||||
|
||||
@Lob
|
||||
@Column(length=102400)
|
||||
byte[] rlp;
|
||||
|
||||
BigInteger cumulativeDifficulty;
|
||||
|
|
|
@ -1,12 +1,16 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.trie.SecureTrie;
|
||||
import org.ethereum.trie.Trie;
|
||||
import org.ethereum.util.*;
|
||||
import org.ethereum.vm.DataWord;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
|
||||
/**
|
||||
* @author Roman Mandeleil
|
||||
|
@ -16,33 +20,23 @@ public class ContractDetailsCacheImpl implements ContractDetails {
|
|||
|
||||
private Map<DataWord, DataWord> storage = new HashMap<>();
|
||||
|
||||
ContractDetails origContract = new ContractDetailsImpl();
|
||||
|
||||
private byte[] code = ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
|
||||
private byte[] code = EMPTY_BYTE_ARRAY;
|
||||
|
||||
private boolean dirty = false;
|
||||
private boolean deleted = false;
|
||||
|
||||
|
||||
public ContractDetailsCacheImpl() {
|
||||
}
|
||||
|
||||
public ContractDetailsCacheImpl(byte[] rlpCode) {
|
||||
decode(rlpCode);
|
||||
}
|
||||
|
||||
public ContractDetailsCacheImpl(Map<DataWord, DataWord> storage, byte[] code) {
|
||||
public ContractDetailsCacheImpl(ContractDetails origContract) {
|
||||
this.origContract = origContract;
|
||||
this.code = origContract != null ? origContract.getCode() : EMPTY_BYTE_ARRAY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(DataWord key, DataWord value) {
|
||||
|
||||
if (value.equals(DataWord.ZERO)) {
|
||||
storage.remove(key);
|
||||
} else {
|
||||
|
||||
storage.put(key, value);
|
||||
}
|
||||
|
||||
storage.put(key, value);
|
||||
this.setDirty(true);
|
||||
}
|
||||
|
||||
|
@ -50,8 +44,17 @@ public class ContractDetailsCacheImpl implements ContractDetails {
|
|||
public DataWord get(DataWord key) {
|
||||
|
||||
DataWord value = storage.get(key);
|
||||
if (value != null) value = value.clone();
|
||||
return value;
|
||||
if (value != null)
|
||||
value = value.clone();
|
||||
else{
|
||||
if (origContract == null) return null;
|
||||
value = origContract.get(key);
|
||||
}
|
||||
|
||||
if (value == null || value.isZero())
|
||||
return null;
|
||||
else
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -99,7 +102,7 @@ public class ContractDetailsCacheImpl implements ContractDetails {
|
|||
storage.put(new DataWord(key.getRLPData()), new DataWord(value.getRLPData()));
|
||||
}
|
||||
|
||||
this.code = (code.getRLPData() == null) ? ByteUtil.EMPTY_BYTE_ARRAY : code.getRLPData();
|
||||
this.code = (code.getRLPData() == null) ? EMPTY_BYTE_ARRAY : code.getRLPData();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -158,9 +161,10 @@ public class ContractDetailsCacheImpl implements ContractDetails {
|
|||
for (int i = 0; i < storageKeys.size(); ++i){
|
||||
|
||||
DataWord key = storageKeys.get(i);
|
||||
DataWord value = storageKeys.get(i);
|
||||
DataWord value = storageValues.get(i);
|
||||
|
||||
storage.put(key, value);
|
||||
if (value.isZero())
|
||||
storage.put(key, null);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -174,7 +178,7 @@ public class ContractDetailsCacheImpl implements ContractDetails {
|
|||
@Override
|
||||
public ContractDetails clone() {
|
||||
|
||||
ContractDetailsCacheImpl contractDetails = new ContractDetailsCacheImpl();
|
||||
ContractDetailsCacheImpl contractDetails = new ContractDetailsCacheImpl(origContract);
|
||||
|
||||
Object storageClone = ((HashMap<DataWord, DataWord>)storage).clone();
|
||||
|
||||
|
@ -192,5 +196,16 @@ public class ContractDetailsCacheImpl implements ContractDetails {
|
|||
return ret;
|
||||
}
|
||||
|
||||
public void commit(){
|
||||
|
||||
if (origContract == null) return;
|
||||
|
||||
for (DataWord key : storage.keySet()) {
|
||||
origContract.put(key, storage.get(key));
|
||||
}
|
||||
|
||||
origContract.setCode(code);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -1,21 +1,19 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.datasource.HashMapDB;
|
||||
import org.ethereum.trie.SecureTrie;
|
||||
import org.ethereum.trie.Trie;
|
||||
import org.ethereum.util.ByteUtil;
|
||||
import org.ethereum.util.RLP;
|
||||
import org.ethereum.util.RLPElement;
|
||||
import org.ethereum.util.RLPItem;
|
||||
import org.ethereum.util.RLPList;
|
||||
import org.ethereum.vm.DataWord;
|
||||
|
||||
import org.spongycastle.util.Arrays;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.*;
|
||||
|
||||
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
import static org.ethereum.util.ByteUtil.wrap;
|
||||
|
||||
/**
|
||||
* @author Roman Mandeleil
|
||||
|
@ -25,15 +23,13 @@ public class ContractDetailsImpl implements ContractDetails {
|
|||
|
||||
private byte[] rlpEncoded;
|
||||
|
||||
private List<DataWord> storageKeys = new ArrayList<>();
|
||||
private List<DataWord> storageValues = new ArrayList<>();
|
||||
|
||||
private byte[] code = ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
private byte[] code = EMPTY_BYTE_ARRAY;
|
||||
|
||||
private boolean dirty = false;
|
||||
private boolean deleted = false;
|
||||
|
||||
private Trie storageTrie = new SecureTrie(null);
|
||||
private SecureTrie storageTrie = new SecureTrie(new HashMapDB());
|
||||
private Set<ByteArrayWrapper> keys = new HashSet<>(); // FIXME: sync to the disk
|
||||
|
||||
public ContractDetailsImpl() {
|
||||
}
|
||||
|
@ -42,30 +38,22 @@ public class ContractDetailsImpl implements ContractDetails {
|
|||
decode(rlpCode);
|
||||
}
|
||||
|
||||
public ContractDetailsImpl(Map<DataWord, DataWord> storage, byte[] code) {
|
||||
public ContractDetailsImpl(SecureTrie storageTrie, byte[] code) {
|
||||
this.storageTrie = storageTrie;
|
||||
this.code = code;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void put(DataWord key, DataWord value) {
|
||||
|
||||
if (value.equals(DataWord.ZERO)) {
|
||||
if (value.equals(DataWord.ZERO)){
|
||||
|
||||
storageTrie.delete(key.getData());
|
||||
int index = storageKeys.indexOf(key);
|
||||
if (index != -1) {
|
||||
storageKeys.remove(index);
|
||||
storageValues.remove(index);
|
||||
}
|
||||
} else {
|
||||
keys.remove(wrap(key.getData()));
|
||||
} else{
|
||||
|
||||
storageTrie.update(key.getData(), RLP.encodeElement(value.getNoLeadZeroesData()));
|
||||
int index = storageKeys.indexOf(key);
|
||||
if (index != -1) {
|
||||
storageKeys.remove(index);
|
||||
storageValues.remove(index);
|
||||
}
|
||||
storageKeys.add(key);
|
||||
storageValues.add(value);
|
||||
keys.add(wrap(key.getData()));
|
||||
}
|
||||
|
||||
this.setDirty(true);
|
||||
|
@ -75,15 +63,14 @@ public class ContractDetailsImpl implements ContractDetails {
|
|||
@Override
|
||||
public DataWord get(DataWord key) {
|
||||
|
||||
if (storageKeys.size() == 0)
|
||||
return null;
|
||||
byte[] data = storageTrie.get(key.getData());
|
||||
|
||||
int foundIndex = storageKeys.indexOf(key);
|
||||
if (foundIndex != -1) {
|
||||
DataWord value = storageValues.get(foundIndex);
|
||||
return value.clone();
|
||||
} else
|
||||
if (data.length == 0)
|
||||
return null;
|
||||
else{
|
||||
byte[] dataDecoded = RLP.decode2(data).get(0).getRLPData();
|
||||
return new DataWord(dataDecoded);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -100,13 +87,6 @@ public class ContractDetailsImpl implements ContractDetails {
|
|||
|
||||
@Override
|
||||
public byte[] getStorageHash() {
|
||||
|
||||
storageTrie = new SecureTrie(null);
|
||||
// calc the trie for root hash
|
||||
for (int i = 0; i < storageKeys.size(); ++i) {
|
||||
storageTrie.update(storageKeys.get(i).getData(), RLP
|
||||
.encodeElement(storageValues.get(i).getNoLeadZeroesData()));
|
||||
}
|
||||
return storageTrie.getRootHash();
|
||||
}
|
||||
|
||||
|
@ -115,35 +95,36 @@ public class ContractDetailsImpl implements ContractDetails {
|
|||
RLPList data = RLP.decode2(rlpCode);
|
||||
RLPList rlpList = (RLPList) data.get(0);
|
||||
|
||||
RLPList keys = (RLPList) rlpList.get(0);
|
||||
RLPList values = (RLPList) rlpList.get(1);
|
||||
RLPElement code = rlpList.get(2);
|
||||
RLPItem storage = (RLPItem) rlpList.get(0);
|
||||
RLPElement code = rlpList.get(1);
|
||||
RLPList keys = (RLPList) rlpList.get(2);
|
||||
|
||||
if (keys.size() > 0) {
|
||||
storageKeys = new ArrayList<>();
|
||||
storageValues = new ArrayList<>();
|
||||
this.storageTrie.deserialize(storage.getRLPData());
|
||||
this.code = (code.getRLPData() == null) ? EMPTY_BYTE_ARRAY : code.getRLPData();
|
||||
|
||||
for (int i = 0; i < keys.size(); ++i){
|
||||
byte[] key = keys.get(i).getRLPData();
|
||||
this.keys.add(wrap(key));
|
||||
}
|
||||
|
||||
for (Object key : keys) {
|
||||
RLPItem rlpItem = (RLPItem) key;
|
||||
storageKeys.add(new DataWord(rlpItem.getRLPData()));
|
||||
}
|
||||
|
||||
for (Object value : values) {
|
||||
RLPItem rlpItem = (RLPItem) value;
|
||||
storageValues.add(new DataWord(rlpItem.getRLPData()));
|
||||
}
|
||||
|
||||
for (int i = 0; i < keys.size(); ++i) {
|
||||
DataWord key = storageKeys.get(i);
|
||||
DataWord value = storageValues.get(i);
|
||||
storageTrie.update(key.getData(), RLP.encodeElement(value.getNoLeadZeroesData()));
|
||||
}
|
||||
|
||||
this.code = (code.getRLPData() == null) ? ByteUtil.EMPTY_BYTE_ARRAY : code.getRLPData();
|
||||
this.rlpEncoded = rlpCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getEncoded() {
|
||||
|
||||
if (rlpEncoded == null) {
|
||||
|
||||
byte[] storage = RLP.encodeElement(storageTrie.serialize());
|
||||
byte[] rlpCode = RLP.encodeElement(code);
|
||||
byte[] rlpKeys = RLP.encodeSet(keys);
|
||||
|
||||
this.rlpEncoded = RLP.encodeList(storage, rlpCode, rlpKeys);
|
||||
}
|
||||
return rlpEncoded;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void setDirty(boolean dirty) {
|
||||
this.dirty = dirty;
|
||||
|
@ -165,76 +146,51 @@ public class ContractDetailsImpl implements ContractDetails {
|
|||
}
|
||||
|
||||
|
||||
@Override
|
||||
public byte[] getEncoded() {
|
||||
|
||||
if (rlpEncoded == null) {
|
||||
|
||||
int size = storageKeys == null ? 0 : storageKeys.size();
|
||||
|
||||
byte[][] keys = new byte[size][];
|
||||
byte[][] values = new byte[size][];
|
||||
|
||||
for (int i = 0; i < size; ++i) {
|
||||
DataWord key = storageKeys.get(i);
|
||||
keys[i] = RLP.encodeElement(key.getData());
|
||||
}
|
||||
for (int i = 0; i < size; ++i) {
|
||||
DataWord value = storageValues.get(i);
|
||||
values[i] = RLP.encodeElement(value.getNoLeadZeroesData());
|
||||
}
|
||||
|
||||
byte[] rlpKeysList = RLP.encodeList(keys);
|
||||
byte[] rlpValuesList = RLP.encodeList(values);
|
||||
byte[] rlpCode = RLP.encodeElement(code);
|
||||
|
||||
this.rlpEncoded = RLP.encodeList(rlpKeysList, rlpValuesList, rlpCode);
|
||||
}
|
||||
return rlpEncoded;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<DataWord, DataWord> getStorage() {
|
||||
|
||||
Map<DataWord, DataWord> storage = new HashMap<>();
|
||||
for (int i = 0; storageKeys != null && i < storageKeys.size(); ++i) {
|
||||
storage.put(storageKeys.get(i), storageValues.get(i));
|
||||
|
||||
for (ByteArrayWrapper keyBytes : keys){
|
||||
|
||||
DataWord key = new DataWord(keyBytes);
|
||||
DataWord value = get(key);
|
||||
storage.put(key, value);
|
||||
}
|
||||
return Collections.unmodifiableMap(storage);
|
||||
|
||||
return storage;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStorage(List<DataWord> storageKeys, List<DataWord> storageValues) {
|
||||
this.storageKeys = storageKeys;
|
||||
this.storageValues = storageValues;
|
||||
|
||||
for (int i = 0; i < storageKeys.size(); ++i)
|
||||
put(storageKeys.get(i), storageValues.get(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStorage(Map<DataWord, DataWord> storage) {
|
||||
|
||||
List<DataWord> keys = new ArrayList<>();
|
||||
keys.addAll(storage.keySet());
|
||||
|
||||
List<DataWord> values = new ArrayList<>();
|
||||
for (DataWord key : keys) {
|
||||
for (DataWord key : storage.keySet()) {
|
||||
|
||||
DataWord value = storage.get(key);
|
||||
values.add(value);
|
||||
put(key, value);
|
||||
}
|
||||
|
||||
this.storageKeys = keys;
|
||||
this.storageValues = values;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ContractDetails clone() {
|
||||
|
||||
ContractDetailsImpl contractDetails = new ContractDetailsImpl();
|
||||
// FIXME: clone is not working now !!!
|
||||
// FIXME: should be fixed
|
||||
|
||||
contractDetails.setCode(this.getCode());
|
||||
contractDetails.setStorage(new ArrayList<>(this.storageKeys),
|
||||
new ArrayList<>(this.storageValues));
|
||||
return contractDetails;
|
||||
byte[] cloneCode = Arrays.clone(this.getCode());
|
||||
|
||||
storageTrie.getRoot();
|
||||
|
||||
return new ContractDetailsImpl(null, cloneCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.ethereum.util.ByteUtil.wrap;
|
||||
|
||||
public class DetailsDataStore {
|
||||
|
||||
private static final Logger gLogger = LoggerFactory.getLogger("general");
|
||||
|
||||
private DatabaseImpl db = null;
|
||||
private HashMap<ByteArrayWrapper, ContractDetails> cache = new HashMap<>();
|
||||
private Set<ByteArrayWrapper> removes = new HashSet<>();
|
||||
|
||||
public void setDB(DatabaseImpl db){
|
||||
this.db = db;
|
||||
}
|
||||
|
||||
public ContractDetails get(byte[] key){
|
||||
|
||||
ContractDetails details = cache.get(wrap(key));
|
||||
|
||||
if (details == null){
|
||||
|
||||
if ( removes.contains(wrap(key))) return null;
|
||||
|
||||
byte[] data = db.get(key);
|
||||
if (data == null) return null;
|
||||
|
||||
details = new ContractDetailsImpl(data);
|
||||
cache.put( wrap(key), details);
|
||||
}
|
||||
|
||||
return details;
|
||||
}
|
||||
|
||||
public void update(byte[] key, ContractDetails contractDetails){
|
||||
cache.put(wrap(key), contractDetails);
|
||||
|
||||
if (removes.contains(wrap(key)))
|
||||
removes.remove(wrap(key));
|
||||
}
|
||||
|
||||
public void remove(byte[] key){
|
||||
cache.remove(wrap(key));
|
||||
removes.add(wrap(key));
|
||||
}
|
||||
|
||||
public void flush(){
|
||||
|
||||
long t = System.nanoTime();
|
||||
|
||||
Map<byte[], byte[]> batch = new HashMap<>();
|
||||
for (ByteArrayWrapper key : cache.keySet()){
|
||||
ContractDetails contractDetails = cache.get(key);
|
||||
byte[] value = contractDetails.getEncoded();
|
||||
db.put(key.getData(), value);
|
||||
batch.put(key.getData(), value);
|
||||
}
|
||||
|
||||
db.getDb().updateBatch(batch);
|
||||
|
||||
for (ByteArrayWrapper key : removes){
|
||||
db.delete(key.getData());
|
||||
}
|
||||
|
||||
long keys = cache.size();
|
||||
|
||||
cache.clear();
|
||||
removes.clear();
|
||||
|
||||
long t_ = System.nanoTime();
|
||||
gLogger.info("Flush details in: {} ms, {} keys", ((float)(t_ - t) / 1_000_000), keys);
|
||||
}
|
||||
|
||||
|
||||
public Set<ByteArrayWrapper> keys(){
|
||||
|
||||
Set<ByteArrayWrapper> keys = new HashSet<>();
|
||||
keys.addAll(cache.keySet());
|
||||
keys.addAll(db.dumpKeys());
|
||||
return keys;
|
||||
}
|
||||
}
|
|
@ -1,70 +1,95 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.core.Block;
|
||||
import org.ethereum.core.Genesis;
|
||||
import org.ethereum.core.TransactionReceipt;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.ethereum.util.ByteUtil.*;
|
||||
import static java.math.BigInteger.ZERO;
|
||||
import static org.ethereum.util.ByteUtil.wrap;
|
||||
|
||||
/**
|
||||
* @author: Roman Mandeleil
|
||||
* Created on: 29/01/2015 20:43
|
||||
*/
|
||||
|
||||
public class InMemoryBlockStore implements BlockStore {
|
||||
|
||||
final static public int MAX_BLOCKS = 1000;
|
||||
public class InMemoryBlockStore implements BlockStore{
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger("general");
|
||||
|
||||
Map<ByteArrayWrapper, Block> hashIndex = new HashMap<>();
|
||||
Map<Long, Block> numberIndex = new HashMap<>();
|
||||
List<Block> blocks = new ArrayList<>();
|
||||
|
||||
|
||||
SessionFactory sessionFactory;
|
||||
|
||||
BigInteger totalDifficulty = ZERO;
|
||||
|
||||
public InMemoryBlockStore(){
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getBlockHashByNumber(long blockNumber) {
|
||||
|
||||
Block block = numberIndex.get(blockNumber);
|
||||
if (block == null) return null;
|
||||
return block.getHash();
|
||||
|
||||
if (block == null)
|
||||
return dbGetBlockHashByNumber(blockNumber);
|
||||
else
|
||||
return block.getHash();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Block getBlockByNumber(long blockNumber) {
|
||||
return numberIndex.get(blockNumber);
|
||||
|
||||
Block block = numberIndex.get(blockNumber);
|
||||
|
||||
if (block == null)
|
||||
return dbGetBlockByNumber(blockNumber);
|
||||
else
|
||||
return block;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Block getBlockByHash(byte[] hash) {
|
||||
return hashIndex.get(wrap(hash));
|
||||
|
||||
Block block = hashIndex.get(wrap(hash));
|
||||
|
||||
if (block == null)
|
||||
return dbGetBlockByHash(hash);
|
||||
else
|
||||
return block;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<byte[]> getListOfHashesStartFrom(byte[] hash, int qty) {
|
||||
|
||||
Block startBlock = hashIndex.get(wrap(hash));
|
||||
|
||||
|
||||
long endIndex = startBlock.getNumber() + qty;
|
||||
endIndex = getBestBlock().getNumber() < endIndex ? getBestBlock().getNumber() : endIndex;
|
||||
|
||||
List<byte[]> hashes = new ArrayList<>();
|
||||
|
||||
|
||||
for (long i = startBlock.getNumber(); i <= endIndex; ++i){
|
||||
Block block = getBlockByNumber(i);
|
||||
hashes.add(block.getHash() );
|
||||
}
|
||||
|
||||
|
||||
return hashes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteBlocksSince(long number) {
|
||||
|
||||
|
||||
// todo: delete blocks sinse
|
||||
}
|
||||
|
||||
|
@ -74,22 +99,20 @@ public class InMemoryBlockStore implements BlockStore {
|
|||
blocks.add(block);
|
||||
hashIndex.put(wHash, block);
|
||||
numberIndex.put(block.getNumber(), block);
|
||||
|
||||
if (blocks.size() > MAX_BLOCKS){
|
||||
Block rBlock = blocks.remove(0);
|
||||
hashIndex.remove(wrap(rBlock.getHash()));
|
||||
numberIndex.remove(rBlock.getNumber());
|
||||
}
|
||||
totalDifficulty = totalDifficulty.add(block.getCumulativeDifficulty());
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficultySince(long number) {
|
||||
return BigInteger.ZERO;
|
||||
|
||||
// todo calculate from db + from cache
|
||||
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BigInteger getTotalDifficulty() {
|
||||
return BigInteger.ZERO;
|
||||
return totalDifficulty;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -114,4 +137,112 @@ public class InMemoryBlockStore implements BlockStore {
|
|||
public TransactionReceipt getTransactionReceiptByHash(byte[] hash) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// FIXME: wrap from here in to db class
|
||||
|
||||
public byte[] dbGetBlockHashByNumber(long blockNumber) {
|
||||
|
||||
Session s = sessionFactory.openSession();
|
||||
|
||||
List result = s.createQuery("from BlockVO where number = :number").
|
||||
setParameter("number", blockNumber).list();
|
||||
|
||||
if (result.size() == 0) return null;
|
||||
BlockVO vo = (BlockVO) result.get(0);
|
||||
|
||||
return vo.getHash();
|
||||
}
|
||||
|
||||
public Block dbGetBlockByNumber(long blockNumber) {
|
||||
|
||||
Session s = sessionFactory.openSession();
|
||||
|
||||
List result = s.createQuery("from BlockVO where number = :number").
|
||||
setParameter("number", blockNumber).list();
|
||||
|
||||
if (result.size() == 0) return null;
|
||||
BlockVO vo = (BlockVO) result.get(0);
|
||||
|
||||
s.close();
|
||||
|
||||
byte[] rlp = vo.getRlp();
|
||||
return new Block(rlp);
|
||||
}
|
||||
|
||||
public Block dbGetBlockByHash(byte[] hash) {
|
||||
|
||||
Session s = sessionFactory.openSession();
|
||||
|
||||
List result = s.createQuery("from BlockVO where hash = :hash").
|
||||
setParameter("hash", hash).list();
|
||||
|
||||
if (result.size() == 0) return null;
|
||||
BlockVO vo = (BlockVO) result.get(0);
|
||||
|
||||
s.close();
|
||||
return new Block(vo.rlp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush(){
|
||||
|
||||
long t_ = System.nanoTime();
|
||||
|
||||
Session s = sessionFactory.openSession();
|
||||
s.beginTransaction();
|
||||
for (Block block : blocks){
|
||||
BlockVO blockVO = new BlockVO(block.getNumber(), block.getHash(), block.getEncoded(), block.getCumulativeDifficulty());
|
||||
s.saveOrUpdate(blockVO);
|
||||
}
|
||||
|
||||
s.getTransaction().commit();
|
||||
|
||||
Block block = getBestBlock();
|
||||
|
||||
blocks.clear();
|
||||
hashIndex.clear();
|
||||
numberIndex.clear();
|
||||
|
||||
saveBlock(block, null);
|
||||
|
||||
long t__ = System.nanoTime();
|
||||
logger.info("Flush block store in: {} ms", ((float)(t__ - t_) / 1_000_000));
|
||||
|
||||
totalDifficulty = (BigInteger) s.createQuery("select sum(cumulativeDifficulty) from BlockVO").uniqueResult();
|
||||
|
||||
s.close();
|
||||
}
|
||||
|
||||
public void load(){
|
||||
|
||||
logger.info("loading db");
|
||||
|
||||
long t = System.nanoTime();
|
||||
Session s = sessionFactory.openSession();
|
||||
|
||||
Long bestNumber = (Long)
|
||||
s.createQuery("select max(number) from BlockVO").uniqueResult();
|
||||
|
||||
List result =
|
||||
s.createQuery("from BlockVO where number = :number").setParameter("number", bestNumber).list();
|
||||
|
||||
if (result.isEmpty()) return ;
|
||||
BlockVO vo = (BlockVO) result.get(0);
|
||||
|
||||
Block bestBlock = new Block(vo.rlp);
|
||||
saveBlock(bestBlock, null);
|
||||
|
||||
totalDifficulty = (BigInteger) s.createQuery("select sum(cumulativeDifficulty) from BlockVO").uniqueResult();
|
||||
|
||||
long t_ = System.nanoTime();
|
||||
|
||||
logger.info("Loaded db in: {} ms", ((float)(t_ - t) / 1_000_000));
|
||||
}
|
||||
|
||||
//@Override
|
||||
public void setSessionFactory(SessionFactory sessionFactory) {
|
||||
this.sessionFactory = sessionFactory;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -1,33 +1,26 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import org.ethereum.core.AccountState;
|
||||
import org.ethereum.core.Block;
|
||||
import org.ethereum.datasource.KeyValueDataSource;
|
||||
import org.ethereum.facade.Repository;
|
||||
import org.ethereum.json.EtherObjectMapper;
|
||||
import org.ethereum.json.JSONHelper;
|
||||
|
||||
import org.ethereum.trie.SecureTrie;
|
||||
import org.ethereum.trie.Trie;
|
||||
import org.ethereum.vm.DataWord;
|
||||
|
||||
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -47,10 +40,13 @@ public class RepositoryImpl implements Repository {
|
|||
public final static String STATE_DB = "state";
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger("repository");
|
||||
private static final Logger gLogger = LoggerFactory.getLogger("general");
|
||||
|
||||
private Trie worldState;
|
||||
|
||||
private DatabaseImpl detailsDB = null;
|
||||
private DetailsDataStore dds = new DetailsDataStore();
|
||||
|
||||
private DatabaseImpl stateDB = null;
|
||||
|
||||
KeyValueDataSource detailsDS = null;
|
||||
|
@ -74,12 +70,16 @@ public class RepositoryImpl implements Repository {
|
|||
this.stateDS = stateDS;
|
||||
|
||||
detailsDB = new DatabaseImpl(detailsDS);
|
||||
dds.setDB(detailsDB);
|
||||
|
||||
stateDB = new DatabaseImpl(stateDS);
|
||||
worldState = new SecureTrie(stateDB.getDb());
|
||||
}
|
||||
|
||||
public RepositoryImpl(String detailsDbName, String stateDbName) {
|
||||
detailsDB = new DatabaseImpl(detailsDbName);
|
||||
dds.setDB(detailsDB);
|
||||
|
||||
stateDB = new DatabaseImpl(stateDbName);
|
||||
worldState = new SecureTrie(stateDB.getDb());
|
||||
}
|
||||
|
@ -127,19 +127,27 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
if (accountState.isDeleted()) {
|
||||
worldState.delete(hash.getData());
|
||||
detailsDB.delete(hash.getData());
|
||||
|
||||
dds.remove(hash.getData());
|
||||
logger.debug("delete: [{}]",
|
||||
Hex.toHexString(hash.getData()));
|
||||
|
||||
} else {
|
||||
|
||||
if (accountState.isDirty() || contractDetails.isDirty()) {
|
||||
detailsDB.put(hash.getData(), contractDetails.getEncoded());
|
||||
ContractDetailsCacheImpl contractDetailsCache = (ContractDetailsCacheImpl)contractDetails;
|
||||
if (contractDetailsCache.origContract == null){
|
||||
contractDetailsCache.origContract = new ContractDetailsImpl();
|
||||
contractDetailsCache.commit();
|
||||
}
|
||||
|
||||
contractDetails = contractDetailsCache.origContract;
|
||||
|
||||
dds.update(hash.getData(), contractDetails);
|
||||
|
||||
accountState.setStateRoot(contractDetails.getStorageHash());
|
||||
accountState.setCodeHash(sha3(contractDetails.getCode()));
|
||||
worldState.update(hash.getData(), accountState.getEncoded());
|
||||
if (logger.isDebugEnabled()) {
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("update: [{}],nonce: [{}] balance: [{}] \n [{}]",
|
||||
Hex.toHexString(hash.getData()),
|
||||
accountState.getNonce(),
|
||||
|
@ -147,7 +155,6 @@ public class RepositoryImpl implements Repository {
|
|||
contractDetails.getStorage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -161,8 +168,15 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
@Override
|
||||
public void flush() {
|
||||
logger.info("flush to disk");
|
||||
gLogger.info("flushing to disk");
|
||||
|
||||
dds.flush();
|
||||
|
||||
long t = System.nanoTime();
|
||||
worldState.sync();
|
||||
long t__ = System.nanoTime();
|
||||
|
||||
gLogger.info("Flush state in: {} ms", ((float)(t__ - t) / 1_000_000));
|
||||
}
|
||||
|
||||
|
||||
|
@ -291,7 +305,13 @@ public class RepositoryImpl implements Repository {
|
|||
|
||||
@Override
|
||||
public Set<byte[]> getAccountsKeys() {
|
||||
return detailsDB.getDb().keys();
|
||||
|
||||
Set<byte[]> result = new HashSet<>();
|
||||
for (ByteArrayWrapper key : dds.keys() ){
|
||||
result.add(key.getData());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -341,7 +361,8 @@ public class RepositoryImpl implements Repository {
|
|||
}
|
||||
|
||||
details.put(key, value);
|
||||
detailsDB.put(addr, details.getEncoded());
|
||||
|
||||
dds.update(addr, details);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -365,7 +386,8 @@ public class RepositoryImpl implements Repository {
|
|||
}
|
||||
|
||||
details.setCode(code);
|
||||
detailsDB.put(addr, details.getEncoded());
|
||||
|
||||
dds.update(addr, details);
|
||||
}
|
||||
|
||||
|
||||
|
@ -411,19 +433,12 @@ public class RepositoryImpl implements Repository {
|
|||
@Override
|
||||
public void delete(byte[] addr) {
|
||||
worldState.delete(addr);
|
||||
detailsDB.delete(addr);
|
||||
dds.remove(addr);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContractDetails getContractDetails(byte[] addr) {
|
||||
|
||||
ContractDetails result = null;
|
||||
byte[] detailsData = detailsDB.get(addr);
|
||||
|
||||
if (detailsData != null)
|
||||
result = new ContractDetailsImpl(detailsData);
|
||||
|
||||
return result;
|
||||
return dds.get(addr);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -445,7 +460,8 @@ public class RepositoryImpl implements Repository {
|
|||
worldState.update(addr, accountState.getEncoded());
|
||||
|
||||
ContractDetails contractDetails = new ContractDetailsImpl();
|
||||
detailsDB.put(addr, contractDetails.getEncoded());
|
||||
|
||||
dds.update(addr, contractDetails);
|
||||
|
||||
return accountState;
|
||||
}
|
||||
|
@ -468,21 +484,16 @@ public class RepositoryImpl implements Repository {
|
|||
else
|
||||
account = account.clone();
|
||||
|
||||
if (details == null)
|
||||
details = new ContractDetailsCacheImpl();
|
||||
if (details == null) {
|
||||
details = new ContractDetailsCacheImpl(null);
|
||||
}
|
||||
else
|
||||
details = new ContractDetailsCacheImpl(details.getEncoded());
|
||||
details = new ContractDetailsCacheImpl(details);
|
||||
|
||||
cacheAccounts.put(wrap(addr), account);
|
||||
cacheDetails.put(wrap(addr), details);
|
||||
}
|
||||
|
||||
public Set<ByteArrayWrapper> getFullAddressSet() {
|
||||
Set<ByteArrayWrapper> setKeys = new HashSet<>(detailsDB.dumpKeys());
|
||||
return setKeys;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public byte[] getRoot() {
|
||||
return worldState.getRootHash();
|
||||
|
|
|
@ -47,7 +47,7 @@ public class RepositoryTrack implements Repository {
|
|||
AccountState accountState = new AccountState();
|
||||
cacheAccounts.put(wrap(addr), accountState);
|
||||
|
||||
ContractDetails contractDetails = new ContractDetailsCacheImpl();
|
||||
ContractDetails contractDetails = new ContractDetailsCacheImpl(null);
|
||||
cacheDetails.put(wrap(addr), contractDetails);
|
||||
|
||||
return accountState;
|
||||
|
@ -99,7 +99,9 @@ public class RepositoryTrack implements Repository {
|
|||
repository.loadAccount(addr, cacheAccounts, cacheDetails);
|
||||
} else {
|
||||
cacheAccounts.put(wrap(addr), accountState.clone());
|
||||
cacheDetails.put(wrap(addr), contractDetails.clone());
|
||||
|
||||
ContractDetails contractDetailsLvl2 = new ContractDetailsCacheImpl(contractDetails);
|
||||
cacheDetails.put(wrap(addr), contractDetailsLvl2);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -221,7 +223,10 @@ public class RepositoryTrack implements Repository {
|
|||
@Override
|
||||
public Repository startTracking() {
|
||||
logger.debug("start tracking");
|
||||
return new RepositoryTrack(this);
|
||||
|
||||
Repository repository = new RepositoryTrack(this);
|
||||
|
||||
return repository;
|
||||
}
|
||||
|
||||
|
||||
|
@ -233,6 +238,13 @@ public class RepositoryTrack implements Repository {
|
|||
|
||||
@Override
|
||||
public void commit() {
|
||||
|
||||
for (ContractDetails contractDetails : cacheDetails.values()) {
|
||||
|
||||
ContractDetailsCacheImpl contractDetailsCache = (ContractDetailsCacheImpl)contractDetails;
|
||||
contractDetailsCache.commit();
|
||||
}
|
||||
|
||||
repository.updateBatch(cacheAccounts, cacheDetails);
|
||||
cacheAccounts.clear();
|
||||
cacheDetails.clear();
|
||||
|
@ -262,7 +274,12 @@ public class RepositoryTrack implements Repository {
|
|||
}
|
||||
|
||||
for (ByteArrayWrapper hash : contractDetailes.keySet()) {
|
||||
cacheDetails.put(hash, contractDetailes.get(hash));
|
||||
|
||||
ContractDetailsCacheImpl contractDetailsCache = (ContractDetailsCacheImpl)contractDetailes.get(hash);
|
||||
if (contractDetailsCache.origContract != null && !(contractDetailsCache.origContract instanceof ContractDetailsImpl))
|
||||
cacheDetails.put(hash, contractDetailsCache.origContract);
|
||||
else
|
||||
cacheDetails.put(hash, contractDetailsCache);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -71,13 +71,22 @@ public class CommonConfig {
|
|||
|
||||
Properties prop = new Properties();
|
||||
|
||||
prop.put("hibernate.hbm2ddl.auto", "update");
|
||||
if (CONFIG.databaseReset())
|
||||
prop.put("hibernate.hbm2ddl.auto", "create-drop");
|
||||
else
|
||||
prop.put("hibernate.hbm2ddl.auto", "update");
|
||||
|
||||
prop.put("hibernate.format_sql", "true");
|
||||
prop.put("hibernate.connection.autocommit", "false");
|
||||
prop.put("hibernate.connection.release_mode", "after_transaction");
|
||||
prop.put("hibernate.jdbc.batch_size", "1000");
|
||||
prop.put("hibernate.order_inserts", "true");
|
||||
prop.put("hibernate.order_updates", "true");
|
||||
|
||||
// todo: useful but annoying consider define by system.properties
|
||||
// prop.put("hibernate.show_sql", "true");
|
||||
prop.put("hibernate.dialect",
|
||||
"org.hibernate.dialect.HSQLDialect");
|
||||
"org.hibernate.dialect.H2Dialect");
|
||||
return prop;
|
||||
}
|
||||
/*
|
||||
|
@ -85,6 +94,7 @@ public class CommonConfig {
|
|||
return new HibernateTransactionManager(sessionFactory());
|
||||
}
|
||||
|
||||
|
||||
public DriverManagerDataSource dataSource() {
|
||||
|
||||
logger.info("Connecting to the block store");
|
||||
|
@ -92,18 +102,16 @@ public class CommonConfig {
|
|||
System.setProperty("hsqldb.reconfig_logging", "false");
|
||||
|
||||
String url =
|
||||
String.format("jdbc:hsqldb:file:./%s/blockchain/blockchain.db;" +
|
||||
"create=true;hsqldb.default_table_type=cached",
|
||||
|
||||
String.format("jdbc:h2:./%s/blockchain/blockchain.db;CACHE_SIZE=200000",
|
||||
SystemProperties.CONFIG.databaseDir());
|
||||
|
||||
DriverManagerDataSource ds = new DriverManagerDataSource();
|
||||
ds.setDriverClassName("org.hsqldb.jdbcDriver");
|
||||
ds.setDriverClassName("org.h2.Driver");
|
||||
ds.setUrl(url);
|
||||
ds.setUsername("sa");
|
||||
|
||||
|
||||
return ds;
|
||||
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
|
|
@ -13,6 +13,9 @@ public class RemoteConfig {
|
|||
|
||||
|
||||
public BlockStore blockStore(SessionFactory sessionFactory){
|
||||
return new InMemoryBlockStore();
|
||||
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
return blockStore;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,8 +62,8 @@ public class JSONReader {
|
|||
URL url;
|
||||
HttpURLConnection conn;
|
||||
BufferedReader rd;
|
||||
StringBuilder result = new StringBuilder();
|
||||
String line;
|
||||
String result = "";
|
||||
try {
|
||||
url = new URL(urlToRead);
|
||||
conn = (HttpURLConnection) url.openConnection();
|
||||
|
@ -71,16 +71,17 @@ public class JSONReader {
|
|||
conn.setDoOutput(true);
|
||||
conn.connect();
|
||||
InputStream in = conn.getInputStream();
|
||||
rd = new BufferedReader(new InputStreamReader(in));
|
||||
rd = new BufferedReader(new InputStreamReader(in), 819200);
|
||||
|
||||
logger.info("Loading remote file: " + urlToRead);
|
||||
while ((line = rd.readLine()) != null) {
|
||||
result += line;
|
||||
result.append(line);
|
||||
}
|
||||
rd.close();
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return result;
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
public static String getTestBlobForTreeSha(String shacommit, String testcase){
|
||||
|
|
|
@ -7,6 +7,7 @@ import org.spongycastle.util.encoders.Hex;
|
|||
import java.math.BigInteger;
|
||||
|
||||
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
import static org.ethereum.util.Utils.unifiedNumericToBigInteger;
|
||||
|
||||
/**
|
||||
* @author Roman Mandeleil
|
||||
|
@ -36,7 +37,7 @@ public class Utils {
|
|||
public static byte[] parseNumericData(String data){
|
||||
|
||||
if (data == null || data.equals("")) return EMPTY_BYTE_ARRAY;
|
||||
byte[] dataB = new BigInteger(data, 10).toByteArray();
|
||||
byte[] dataB = unifiedNumericToBigInteger(data).toByteArray();
|
||||
return ByteUtil.stripLeadingZeroes(dataB);
|
||||
}
|
||||
|
||||
|
@ -45,7 +46,11 @@ public class Utils {
|
|||
}
|
||||
|
||||
public static byte parseByte(String data) {
|
||||
return data.equals("") ? 0 : Byte.parseByte(data);
|
||||
if (data.startsWith("0x")) {
|
||||
data = data.substring(2);
|
||||
return data.equals("") ? 0 : Byte.parseByte(data, 16);
|
||||
} else
|
||||
return data.equals("") ? 0 : Byte.parseByte(data);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ package org.ethereum.jsontestsuite.builder;
|
|||
import org.ethereum.core.AccountState;
|
||||
import org.ethereum.db.ContractDetailsImpl;
|
||||
import org.ethereum.jsontestsuite.model.AccountTck;
|
||||
import org.ethereum.util.Utils;
|
||||
import org.ethereum.vm.DataWord;
|
||||
|
||||
import java.math.BigInteger;
|
||||
|
@ -11,6 +12,7 @@ import java.util.Map;
|
|||
|
||||
import static org.ethereum.crypto.HashUtil.sha3;
|
||||
import static org.ethereum.jsontestsuite.Utils.parseData;
|
||||
import static org.ethereum.util.Utils.unifiedNumericToBigInteger;
|
||||
|
||||
public class AccountBuilder {
|
||||
|
||||
|
@ -21,8 +23,9 @@ public class AccountBuilder {
|
|||
details.setStorage(convertStorage(account.getStorage()));
|
||||
|
||||
AccountState state = new AccountState();
|
||||
state.addToBalance(new BigInteger(account.getBalance()));
|
||||
state.setNonce(new BigInteger(account.getNonce()));
|
||||
|
||||
state.addToBalance(unifiedNumericToBigInteger(account.getBalance()));
|
||||
state.setNonce(unifiedNumericToBigInteger(account.getNonce()));
|
||||
state.setStateRoot(details.getStorageHash());
|
||||
state.setCodeHash(sha3(details.getCode()));
|
||||
|
||||
|
|
|
@ -2,9 +2,7 @@ package org.ethereum.jsontestsuite.builder;
|
|||
|
||||
import org.ethereum.core.AccountState;
|
||||
import org.ethereum.datasource.HashMapDB;
|
||||
import org.ethereum.db.ByteArrayWrapper;
|
||||
import org.ethereum.db.ContractDetails;
|
||||
import org.ethereum.db.RepositoryImpl;
|
||||
import org.ethereum.db.*;
|
||||
import org.ethereum.facade.Repository;
|
||||
import org.ethereum.jsontestsuite.model.AccountTck;
|
||||
|
||||
|
@ -29,11 +27,12 @@ public class RepositoryBuilder {
|
|||
ContractDetails details = stateWrap.getContractDetails();
|
||||
|
||||
stateBatch.put(wrap(parseData(address)), state);
|
||||
detailsBatch.put(wrap(parseData(address)), details);
|
||||
detailsBatch.put(wrap(parseData(address)), new ContractDetailsCacheImpl(details));
|
||||
}
|
||||
|
||||
RepositoryImpl repositoryDummy = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
repositoryDummy.updateBatch(stateBatch, detailsBatch);
|
||||
repositoryDummy.flush();
|
||||
|
||||
return repositoryDummy;
|
||||
}
|
||||
|
|
|
@ -13,7 +13,6 @@ import org.ethereum.jsontestsuite.builder.*;
|
|||
import org.ethereum.jsontestsuite.validators.LogsValidator;
|
||||
import org.ethereum.jsontestsuite.validators.OutputValidator;
|
||||
import org.ethereum.jsontestsuite.validators.RepositoryValidator;
|
||||
import org.ethereum.listener.EthereumListenerAdapter;
|
||||
import org.ethereum.vm.LogInfo;
|
||||
import org.ethereum.vm.ProgramInvokeFactory;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -55,7 +54,7 @@ public class StateTestRunner {
|
|||
|
||||
try{
|
||||
executor.init();
|
||||
executor.execute2();
|
||||
executor.execute();
|
||||
executor.go();
|
||||
executor.finalization();
|
||||
} catch (StackOverflowError soe){
|
||||
|
@ -63,7 +62,6 @@ public class StateTestRunner {
|
|||
System.exit(-1);
|
||||
}
|
||||
|
||||
|
||||
track.commit();
|
||||
repository.flush();
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ public class AccountValidator {
|
|||
}
|
||||
|
||||
if (!Arrays.equals(expectedDetails.getCode(), currentDetails.getCode())) {
|
||||
String formattedString = String.format("Account: %s: has unexpected nonce, expected nonce: %s found nonce: %s",
|
||||
String formattedString = String.format("Account: %s: has unexpected code, expected code: %s found code: %s",
|
||||
address, Hex.toHexString(expectedDetails.getCode()), Hex.toHexString(currentDetails.getCode()));
|
||||
results.add(formattedString);
|
||||
}
|
||||
|
|
|
@ -29,15 +29,9 @@ public class CompositeEthereumListener implements EthereumListener {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onBlock(Block block) {
|
||||
public void onBlock(Block block, List<TransactionReceipt> receipts) {
|
||||
for (EthereumListener listener : listeners)
|
||||
listener.onBlock(block);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBlockReciepts(List<TransactionReceipt> receipts) {
|
||||
for (EthereumListener listener : listeners)
|
||||
listener.onBlockReciepts(receipts);
|
||||
listener.onBlock(block, receipts);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -94,4 +88,4 @@ public class CompositeEthereumListener implements EthereumListener {
|
|||
listeners.add(listener);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,9 +17,7 @@ public interface EthereumListener {
|
|||
|
||||
void trace(String output);
|
||||
|
||||
void onBlock(Block block);
|
||||
|
||||
void onBlockReciepts(List<TransactionReceipt> receipts);
|
||||
void onBlock(Block block, List<TransactionReceipt> receipts);
|
||||
|
||||
void onRecvMessage(Message message);
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ public class EthereumListenerAdapter implements EthereumListener {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onBlock(Block block) {
|
||||
public void onBlock(Block block, List<TransactionReceipt> receipts) {
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -54,10 +54,6 @@ public class EthereumListenerAdapter implements EthereumListener {
|
|||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onBlockReciepts(List<TransactionReceipt> receipts) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVMTraceCreated(String transactionHash, String trace) {
|
||||
|
|
|
@ -50,8 +50,11 @@ public class BlockLoader {
|
|||
block.getNumber(), ((float)(t1_ - t1) / 1_000_000));
|
||||
|
||||
System.out.println(result);
|
||||
} else
|
||||
System.out.println("Skipping block #" + block.getNumber());
|
||||
} else {
|
||||
|
||||
if (block.getNumber() % 10000 == 0)
|
||||
System.out.println("Skipping block #" + block.getNumber());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -20,6 +20,10 @@ import org.spongycastle.util.encoders.Hex;
|
|||
import java.math.BigInteger;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.annotation.PreDestroy;
|
||||
import javax.inject.Inject;
|
||||
|
@ -140,6 +144,9 @@ public class WorldManager {
|
|||
|
||||
public void loadBlockchain() {
|
||||
|
||||
if (!CONFIG.databaseReset())
|
||||
blockStore.load();
|
||||
|
||||
Block bestBlock = blockStore.getBestBlock();
|
||||
if (bestBlock == null) {
|
||||
logger.info("DB is empty - adding Genesis");
|
||||
|
@ -155,7 +162,7 @@ public class WorldManager {
|
|||
blockchain.setBestBlock(Genesis.getInstance());
|
||||
blockchain.setTotalDifficulty(Genesis.getInstance().getCumulativeDifficulty());
|
||||
|
||||
listener.onBlock(Genesis.getInstance());
|
||||
listener.onBlock(Genesis.getInstance(), new ArrayList<TransactionReceipt>() );
|
||||
repository.dumpState(Genesis.getInstance(), 0, 0, null);
|
||||
|
||||
logger.info("Genesis block loaded");
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
package org.ethereum.trie;
|
||||
|
||||
import org.ethereum.crypto.SHA3Helper;
|
||||
import org.ethereum.datasource.KeyValueDataSource;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import static org.ethereum.crypto.SHA3Helper.sha3;
|
||||
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
|
@ -63,4 +61,25 @@ public class SecureTrie extends TrieImpl implements Trie{
|
|||
public boolean validate() {
|
||||
return super.validate();
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] serialize() {
|
||||
return super.serialize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deserialize(byte[] data) {
|
||||
super.deserialize(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SecureTrie clone(){
|
||||
|
||||
this.getCache();
|
||||
|
||||
this.getRoot();
|
||||
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,23 +3,20 @@ package org.ethereum.trie;
|
|||
import org.ethereum.crypto.HashUtil;
|
||||
import org.ethereum.datasource.KeyValueDataSource;
|
||||
import org.ethereum.db.ByteArrayWrapper;
|
||||
import org.ethereum.util.RLP;
|
||||
import org.ethereum.util.RLPItem;
|
||||
import org.ethereum.util.RLPList;
|
||||
import org.ethereum.util.Value;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
import static java.util.Arrays.copyOfRange;
|
||||
import static org.ethereum.crypto.HashUtil.EMPTY_TRIE_HASH;
|
||||
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
import static org.ethereum.util.ByteUtil.matchingNibbleLength;
|
||||
import static org.ethereum.util.ByteUtil.*;
|
||||
import static org.ethereum.util.CompactEncoder.*;
|
||||
import static org.spongycastle.util.Arrays.concatenate;
|
||||
|
||||
|
@ -89,6 +86,16 @@ public class TrieImpl implements Trie {
|
|||
this.root = root;
|
||||
}
|
||||
|
||||
public void deserializeRoot(byte[] data){
|
||||
try {
|
||||
ByteArrayInputStream b = new ByteArrayInputStream(data);
|
||||
ObjectInputStream o = new ObjectInputStream(b);
|
||||
root = o.readObject();
|
||||
} catch (IOException | ClassNotFoundException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**************************************
|
||||
* Public (query) interface functions *
|
||||
**************************************/
|
||||
|
@ -480,6 +487,100 @@ public class TrieImpl implements Trie {
|
|||
}
|
||||
}
|
||||
|
||||
public void deserialize(byte[] data){
|
||||
RLPList rlpList = (RLPList) RLP.decode2(data).get(0);
|
||||
|
||||
RLPItem keysElement = (RLPItem)rlpList.get(0);
|
||||
RLPList valsList = (RLPList)rlpList.get(1);
|
||||
RLPItem root = (RLPItem)rlpList.get(2);
|
||||
|
||||
for (int i = 0; i < valsList.size(); ++i){
|
||||
|
||||
byte[] val = valsList.get(i).getRLPData();
|
||||
byte[] key = new byte[32];
|
||||
|
||||
Value value = Value.fromRlpEncoded(val);
|
||||
System.arraycopy(keysElement.getRLPData(), i * 32, key, 0, 32);
|
||||
cache.getNodes().put(wrap(key), new Node(value));
|
||||
}
|
||||
|
||||
this.deserializeRoot(root.getRLPData());
|
||||
}
|
||||
|
||||
public byte[] serialize(){
|
||||
|
||||
Map<ByteArrayWrapper, Node> map = getCache().getNodes();
|
||||
|
||||
int keysTotalSize = 0;
|
||||
int valsTotalSize = 0;
|
||||
|
||||
Set<ByteArrayWrapper> keys = map.keySet();
|
||||
for (ByteArrayWrapper key : keys){
|
||||
|
||||
byte[] keyBytes = key.getData();
|
||||
keysTotalSize += keyBytes.length;
|
||||
|
||||
byte[] valBytes = map.get(key).getValue().getData();
|
||||
valsTotalSize += valBytes.length;
|
||||
}
|
||||
|
||||
byte[] root = null;
|
||||
try {
|
||||
ByteArrayOutputStream b = new ByteArrayOutputStream();
|
||||
ObjectOutputStream o = new ObjectOutputStream(b);
|
||||
o.writeObject(this.getRoot());
|
||||
root = b.toByteArray();
|
||||
root = RLP.encodeElement(root);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
byte[] keysHeader = RLP.encodeLongElementHeader(keysTotalSize);
|
||||
byte[] valsHeader = RLP.encodeListHeader(valsTotalSize);
|
||||
byte[] listHeader = RLP.encodeListHeader(keysTotalSize + keysHeader.length +
|
||||
valsTotalSize + valsHeader.length + root.length);
|
||||
|
||||
byte[] rlpData = new byte[keysTotalSize + keysHeader.length +
|
||||
valsTotalSize + valsHeader.length + listHeader.length + root.length];
|
||||
|
||||
// copy headers:
|
||||
// [ rlp_list_header, rlp_keys_header, rlp_keys, rlp_vals_header, rlp_val]
|
||||
|
||||
System.arraycopy(listHeader, 0, rlpData, 0, listHeader.length);
|
||||
System.arraycopy(keysHeader, 0, rlpData, listHeader.length, keysHeader.length);
|
||||
System.arraycopy(valsHeader,
|
||||
0,
|
||||
rlpData,
|
||||
(listHeader.length + keysHeader.length + keysTotalSize),
|
||||
valsHeader.length);
|
||||
System.arraycopy(root,
|
||||
0,
|
||||
rlpData,
|
||||
(listHeader.length + keysHeader.length + keysTotalSize + valsTotalSize+ valsHeader.length),
|
||||
root.length);
|
||||
|
||||
|
||||
int k_1 = 0;
|
||||
int k_2 = 0;
|
||||
for (ByteArrayWrapper key : keys){
|
||||
|
||||
System.arraycopy(key.getData(), 0, rlpData,
|
||||
(listHeader.length + keysHeader.length + k_1),
|
||||
key.getData().length);
|
||||
|
||||
k_1 += key.getData().length;
|
||||
|
||||
byte[] valBytes = map.get(key).getValue().getData();
|
||||
|
||||
System.arraycopy(valBytes, 0, rlpData,
|
||||
listHeader.length + keysHeader.length + keysTotalSize + valsHeader.length + k_2,
|
||||
valBytes.length);
|
||||
k_2 += valBytes.length;
|
||||
}
|
||||
|
||||
return rlpData;
|
||||
}
|
||||
|
||||
public String getTrieDump() {
|
||||
|
||||
TraceAllNodes traceAction = new TraceAllNodes();
|
||||
|
@ -501,4 +602,6 @@ public class TrieImpl implements Trie {
|
|||
public boolean validate() {
|
||||
return cache.get(getRootHash()) != null;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
package org.ethereum.util;
|
||||
|
||||
import org.ethereum.db.ByteArrayWrapper;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.*;
|
||||
|
@ -784,6 +786,105 @@ public class RLP {
|
|||
}
|
||||
}
|
||||
|
||||
public static byte[] encodeListHeader(int size) {
|
||||
|
||||
if (size == 0) {
|
||||
return new byte[]{(byte) OFFSET_SHORT_LIST};
|
||||
}
|
||||
|
||||
int totalLength = size;
|
||||
|
||||
byte[] header;
|
||||
if (totalLength < SIZE_THRESHOLD) {
|
||||
|
||||
header = new byte[1];
|
||||
header[0] = (byte) (OFFSET_SHORT_LIST + totalLength);
|
||||
} else {
|
||||
// length of length = BX
|
||||
// prefix = [BX, [length]]
|
||||
int tmpLength = totalLength;
|
||||
byte byteNum = 0;
|
||||
while (tmpLength != 0) {
|
||||
++byteNum;
|
||||
tmpLength = tmpLength >> 8;
|
||||
}
|
||||
tmpLength = totalLength;
|
||||
|
||||
byte[] lenBytes = new byte[byteNum];
|
||||
for (int i = 0; i < byteNum; ++i) {
|
||||
lenBytes[byteNum - 1 - i] = (byte) ((tmpLength >> (8 * i)) & 0xFF);
|
||||
}
|
||||
// first byte = F7 + bytes.length
|
||||
header = new byte[1 + lenBytes.length];
|
||||
header[0] = (byte) (OFFSET_LONG_LIST + byteNum);
|
||||
System.arraycopy(lenBytes, 0, header, 1, lenBytes.length);
|
||||
|
||||
}
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
|
||||
public static byte[] encodeLongElementHeader(int length) {
|
||||
|
||||
if (length < SIZE_THRESHOLD) {
|
||||
|
||||
if (length == 0)
|
||||
return new byte[] {(byte)0x80};
|
||||
else
|
||||
return new byte[] {(byte)(0x80 + length)};
|
||||
|
||||
} else {
|
||||
|
||||
// length of length = BX
|
||||
// prefix = [BX, [length]]
|
||||
int tmpLength = length;
|
||||
byte byteNum = 0;
|
||||
while (tmpLength != 0) {
|
||||
++byteNum;
|
||||
tmpLength = tmpLength >> 8;
|
||||
}
|
||||
|
||||
byte[] lenBytes = new byte[byteNum];
|
||||
for (int i = 0; i < byteNum; ++i) {
|
||||
lenBytes[byteNum - 1 - i] = (byte) ((length >> (8 * i)) & 0xFF);
|
||||
}
|
||||
|
||||
// first byte = F7 + bytes.length
|
||||
byte[] header = new byte[1 + lenBytes.length];
|
||||
header[0] = (byte) (OFFSET_LONG_ITEM + byteNum);
|
||||
System.arraycopy(lenBytes, 0, header, 1, lenBytes.length);
|
||||
|
||||
return header;
|
||||
}
|
||||
}
|
||||
|
||||
public static byte[] encodeSet(Set<ByteArrayWrapper> data){
|
||||
|
||||
int dataLength = 0;
|
||||
Set<byte[]> encodedElements = new HashSet<>();
|
||||
for (ByteArrayWrapper element : data){
|
||||
|
||||
byte[] encodedElement = RLP.encodeElement(element.getData());
|
||||
dataLength += encodedElement.length;
|
||||
encodedElements.add(encodedElement);
|
||||
}
|
||||
|
||||
byte[] listHeader = encodeListHeader(dataLength);
|
||||
|
||||
byte[] output = new byte[listHeader.length + dataLength];
|
||||
|
||||
System.arraycopy(listHeader, 0, output, 0, listHeader.length);
|
||||
|
||||
int cummStart = listHeader.length;
|
||||
for (byte[] element : encodedElements){
|
||||
System.arraycopy(element, 0, output, cummStart, element.length);
|
||||
cummStart += element.length;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
public static byte[] encodeList(byte[]... elements) {
|
||||
|
||||
if (elements == null) {
|
||||
|
@ -872,4 +973,7 @@ public class RLP {
|
|||
throw new RuntimeException("wrong decode attempt");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -16,23 +16,26 @@ import java.util.Date;
|
|||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
//import javax.swing.*;
|
||||
import javax.swing.*;
|
||||
|
||||
public class Utils {
|
||||
|
||||
private static SecureRandom random = new SecureRandom();
|
||||
|
||||
/**
|
||||
* @param hexNum should be in form '0x34fabd34....'
|
||||
* @param number should be in form '0x34fabd34....'
|
||||
* @return String
|
||||
*/
|
||||
public static String hexStringToDecimalString(String hexNum) {
|
||||
public static BigInteger unifiedNumericToBigInteger(String number) {
|
||||
|
||||
boolean match = Pattern.matches("0[xX][0-9a-fA-F]+", hexNum);
|
||||
if (!match) throw new Error("The string doesn't contains hex num in form 0x.. : [" + hexNum + "]");
|
||||
boolean match = Pattern.matches("0[xX][0-9a-fA-F]+", number);
|
||||
if (!match)
|
||||
return (new BigInteger(number));
|
||||
else{
|
||||
|
||||
byte[] numberBytes = Hex.decode(hexNum.substring(2));
|
||||
return (new BigInteger(1, numberBytes)).toString();
|
||||
byte[] numberBytes = Hex.decode(number.substring(2));
|
||||
return (new BigInteger(1, numberBytes));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -48,11 +51,11 @@ public class Utils {
|
|||
return formatter.format(date);
|
||||
}
|
||||
|
||||
/* public static ImageIcon getImageIcon(String resource) {
|
||||
public static ImageIcon getImageIcon(String resource) {
|
||||
URL imageURL = ClassLoader.getSystemResource(resource);
|
||||
ImageIcon image = new ImageIcon(imageURL);
|
||||
return image;
|
||||
}*/
|
||||
}
|
||||
|
||||
static BigInteger _1000_ = new BigInteger("1000");
|
||||
|
||||
|
|
|
@ -87,6 +87,15 @@ public class Value {
|
|||
return ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
}
|
||||
|
||||
public String getHex(){
|
||||
return Hex.toHexString(this.encode());
|
||||
}
|
||||
|
||||
public byte[] getData(){
|
||||
return this.encode();
|
||||
}
|
||||
|
||||
|
||||
public int[] asSlice() {
|
||||
return (int[]) value;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.ethereum.vm;
|
||||
|
||||
import org.ethereum.db.ByteArrayWrapper;
|
||||
import org.ethereum.util.ByteUtil;
|
||||
import org.ethereum.util.FastByteComparisons;
|
||||
|
||||
|
@ -50,6 +51,10 @@ public class DataWord implements Comparable<DataWord> {
|
|||
this(Hex.decode(data));
|
||||
}
|
||||
|
||||
public DataWord(ByteArrayWrapper wrappedData){
|
||||
this(wrappedData.getData());
|
||||
}
|
||||
|
||||
public DataWord(byte[] data) {
|
||||
if (data == null)
|
||||
this.data = ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
|
@ -344,4 +349,8 @@ public class DataWord implements Comparable<DataWord> {
|
|||
public boolean isHex(String hex) {
|
||||
return Hex.toHexString(data).equals(hex);
|
||||
}
|
||||
|
||||
public String asString(){
|
||||
return new String(getNoLeadZeroesData());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,13 +12,10 @@ import org.slf4j.Logger;
|
|||
import org.slf4j.LoggerFactory;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import java.io.*;
|
||||
import java.math.BigInteger;
|
||||
import java.util.*;
|
||||
|
||||
import static java.lang.String.format;
|
||||
import static java.lang.System.getProperty;
|
||||
import static org.ethereum.config.SystemProperties.CONFIG;
|
||||
import static org.ethereum.util.BIUtil.*;
|
||||
import static org.ethereum.util.ByteUtil.EMPTY_BYTE_ARRAY;
|
||||
|
||||
|
@ -331,7 +328,7 @@ public class Program {
|
|||
Repository track = result.getRepository().startTracking();
|
||||
|
||||
//In case of hashing collisions, check for any balance before createAccount()
|
||||
BigInteger oldBalance = result.getRepository().getBalance(newAddress);
|
||||
BigInteger oldBalance = track.getBalance(newAddress);
|
||||
track.createAccount(newAddress);
|
||||
track.addBalance(newAddress, oldBalance);
|
||||
|
||||
|
@ -377,8 +374,8 @@ public class Program {
|
|||
// 4. CREATE THE CONTRACT OUT OF RETURN
|
||||
byte[] code = result.getHReturn();
|
||||
|
||||
long storageCost = code.length * GasCost.CREATE_DATA_BYTE;
|
||||
long afterSpend = invokeData.getGas().longValue() - storageCost - result.getGasUsed();
|
||||
long storageCost = code.length * GasCost.CREATE_DATA;
|
||||
long afterSpend = programInvoke.getGas().longValue() - storageCost - result.getGasUsed();
|
||||
if (afterSpend < 0) {
|
||||
track.saveCode(newAddress, EMPTY_BYTE_ARRAY);
|
||||
} else {
|
||||
|
@ -417,6 +414,7 @@ public class Program {
|
|||
|
||||
if (invokeData.getCallDeep() == MAX_DEPTH) {
|
||||
stackPushZero();
|
||||
this.refundGas(msg.getGas().longValue(), " call deep limit reach");
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -427,9 +425,6 @@ public class Program {
|
|||
byte[] senderAddress = this.getOwnerAddress().getLast20Bytes();
|
||||
byte[] contextAddress = msg.getType() == MsgType.STATELESS ? senderAddress : codeAddress;
|
||||
|
||||
// FETCH THE CODE
|
||||
byte[] programCode = this.result.getRepository().getCode(codeAddress);
|
||||
|
||||
if (logger.isInfoEnabled())
|
||||
logger.info(msg.getType().name() + " for existing contract: address: [{}], outDataOffs: [{}], outDataSize: [{}] ",
|
||||
Hex.toHexString(contextAddress), msg.getOutDataOffs().longValue(), msg.getOutDataSize().longValue());
|
||||
|
@ -437,7 +432,7 @@ public class Program {
|
|||
Repository trackRepository = result.getRepository().startTracking();
|
||||
|
||||
// 2.1 PERFORM THE VALUE (endowment) PART
|
||||
BigInteger endowment = msg.getEndowment().value(); //TODO #POC9 add 1024 stack check <=
|
||||
BigInteger endowment = msg.getEndowment().value();
|
||||
BigInteger senderBalance = trackRepository.getBalance(senderAddress);
|
||||
if (isNotCovers(senderBalance, endowment)) {
|
||||
stackPushZero();
|
||||
|
@ -445,6 +440,13 @@ public class Program {
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
// FETCH THE CODE
|
||||
byte[] programCode = EMPTY_BYTE_ARRAY;
|
||||
if (this.result.getRepository().isExist(codeAddress)){
|
||||
programCode = this.result.getRepository().getCode(codeAddress);
|
||||
}
|
||||
|
||||
trackRepository.addBalance(senderAddress, endowment.negate());
|
||||
|
||||
BigInteger contextBalance = BigInteger.ZERO;
|
||||
|
@ -477,7 +479,7 @@ public class Program {
|
|||
}
|
||||
|
||||
if (result != null &&
|
||||
result.getException() != null) {
|
||||
result.getException() != null) {
|
||||
gasLogger.debug("contract run halted by Exception: contract: [{}], exception: [{}]",
|
||||
Hex.toHexString(contextAddress),
|
||||
result.getException());
|
||||
|
@ -587,6 +589,11 @@ public class Program {
|
|||
|
||||
public DataWord getBalance(DataWord address) {
|
||||
if (invokeData == null) return DataWord.ZERO_EMPTY_ARRAY;
|
||||
byte[] addressBytes = address.getLast20Bytes();
|
||||
|
||||
if (!result.getRepository().isExist(addressBytes)){
|
||||
return DataWord.ZERO.clone();
|
||||
}
|
||||
|
||||
BigInteger balance = result.getRepository().getBalance(address.getLast20Bytes());
|
||||
|
||||
|
@ -701,7 +708,7 @@ public class Program {
|
|||
|
||||
StringBuilder memoryData = new StringBuilder();
|
||||
StringBuilder oneLine = new StringBuilder();
|
||||
if (memory.size() > 32)
|
||||
if (memory.size() > 320)
|
||||
memoryData.append("... Memory Folded.... ")
|
||||
.append("(")
|
||||
.append(memory.size())
|
||||
|
@ -875,6 +882,12 @@ public class Program {
|
|||
|
||||
public void callToPrecompiledAddress(MessageCall msg, PrecompiledContract contract) {
|
||||
|
||||
if (invokeData.getCallDeep() == MAX_DEPTH) {
|
||||
stackPushZero();
|
||||
this.refundGas(msg.getGas().longValue(), " call deep limit reach");
|
||||
return;
|
||||
}
|
||||
|
||||
Repository track = this.getResult().getRepository().startTracking();
|
||||
|
||||
byte[] senderAddress = this.getOwnerAddress().getLast20Bytes();
|
||||
|
@ -1015,4 +1028,4 @@ public class Program {
|
|||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.ethereum.vm;
|
|||
|
||||
import org.ethereum.crypto.ECKey;
|
||||
import org.ethereum.crypto.HashUtil;
|
||||
import org.ethereum.datasource.HashMapDB;
|
||||
import org.ethereum.datasource.LevelDbDataSource;
|
||||
import org.ethereum.db.BlockStore;
|
||||
import org.ethereum.db.BlockStoreDummy;
|
||||
|
@ -31,9 +32,9 @@ public class ProgramInvokeMockImpl implements ProgramInvoke {
|
|||
}
|
||||
|
||||
public ProgramInvokeMockImpl() {
|
||||
|
||||
|
||||
this.repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
|
||||
|
||||
this.repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
this.repository.createAccount(ownerAddress);
|
||||
|
||||
this.repository.createAccount(contractAddress);
|
||||
|
|
|
@ -68,8 +68,9 @@ public class VM {
|
|||
|
||||
public void step(Program program) {
|
||||
|
||||
if (CONFIG.vmTrace())
|
||||
if (CONFIG.vmTrace()) {
|
||||
program.saveOpTrace();
|
||||
}
|
||||
|
||||
try {
|
||||
OpCode op = OpCode.code(program.getCurrentOp());
|
||||
|
@ -168,12 +169,15 @@ public class VM {
|
|||
break;
|
||||
case CALL:
|
||||
case CALLCODE:
|
||||
|
||||
gasCost = GasCost.CALL;
|
||||
DataWord callGasWord = stack.get(stack.size() - 1);
|
||||
if (callGasWord.compareTo(program.getGas()) == 1) {
|
||||
throw Program.Exception.notEnoughOpGas(op, callGasWord, program.getGas());
|
||||
}
|
||||
|
||||
gasCost += callGasWord.longValue();
|
||||
|
||||
DataWord callAddressWord = stack.get(stack.size() - 2);
|
||||
|
||||
//check to see if account does not exist and is not a precompiled contract
|
||||
|
@ -184,8 +188,6 @@ public class VM {
|
|||
if (!stack.get(stack.size() - 3).isZero() )
|
||||
gasCost += GasCost.VT_CALL;
|
||||
|
||||
callGas = callGasWord.longValue();
|
||||
gasCost += callGas;
|
||||
BigInteger in = memNeeded(stack.get(stack.size() - 4), stack.get(stack.size() - 5)); // in offset+size
|
||||
BigInteger out = memNeeded(stack.get(stack.size() - 6), stack.get(stack.size() - 7)); // out offset+size
|
||||
newMemSize = in.max(out);
|
||||
|
@ -1051,8 +1053,9 @@ public class VM {
|
|||
DataWord codeAddress = program.stackPop();
|
||||
DataWord value = program.stackPop();
|
||||
|
||||
if( !value.isZero())
|
||||
gas = new DataWord(gas.intValue() + GasCost.STIPEND_CALL);
|
||||
if( !value.isZero()) {
|
||||
gas = new DataWord(gas.intValue() + GasCost.STIPEND_CALL);
|
||||
}
|
||||
|
||||
DataWord inDataOffs = program.stackPop();
|
||||
DataWord inDataSize = program.stackPop();
|
||||
|
@ -1157,11 +1160,15 @@ public class VM {
|
|||
|
||||
if (program.invokeData.byTestingSuite()) return;
|
||||
|
||||
while (!program.isStopped())
|
||||
while (!program.isStopped()) {
|
||||
this.step(program);
|
||||
}
|
||||
|
||||
} catch (RuntimeException e) {
|
||||
program.setRuntimeFailure(e);
|
||||
} catch (StackOverflowError soe){
|
||||
logger.error("\n !!! StackOverflowError: update your java run command with -Xss32M !!!\n");
|
||||
System.exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ public final class VMUtils {
|
|||
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger("VM");
|
||||
private static final SystemProperties CONFIG = new SystemProperties();
|
||||
|
||||
|
||||
private VMUtils() {
|
||||
}
|
||||
|
||||
|
@ -32,12 +32,12 @@ public final class VMUtils {
|
|||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static File createProgramTraceFile(String txHash) {
|
||||
File result = null;
|
||||
|
||||
|
||||
if (CONFIG.vmTrace() && !CONFIG.vmTraceDir().isEmpty()) {
|
||||
|
||||
|
||||
String pathname = format("%s/%s/%s/%s.json", getProperty("user.dir"), CONFIG.databaseDir(), CONFIG.vmTraceDir(), txHash);
|
||||
File file = new File(pathname);
|
||||
|
||||
|
@ -55,7 +55,7 @@ public final class VMUtils {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -72,7 +72,7 @@ public final class VMUtils {
|
|||
closeQuietly(out);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void saveProgramTraceFile(String txHash, String content) {
|
||||
File file = createProgramTraceFile(txHash);
|
||||
if (file != null) {
|
||||
|
|
|
@ -6,6 +6,7 @@ log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
|||
log4j.appender.stdout.Target=System.out
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern= %d{HH:mm:ss} [%c{1}] %m%n
|
||||
log4j.appender.stdout.Threshold=ERROR
|
||||
|
||||
log4j.appender.file=org.apache.log4j.rolling.RollingFileAppender
|
||||
log4j.appender.file.layout=org.apache.log4j.PatternLayout
|
||||
|
|
|
@ -7,7 +7,7 @@ log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
|||
log4j.appender.stdout.Target=System.out
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern=%d{HH:mm:ss.SSS} [%c{1}] %m%n
|
||||
log4j.appender.stdout.Threshold=TRACE
|
||||
log4j.appender.stdout.Threshold=INFO
|
||||
|
||||
# Direct log messages to stdout
|
||||
log4j.appender.DUMP=org.apache.log4j.ConsoleAppender
|
||||
|
@ -22,26 +22,28 @@ log4j.appender.file.RollingPolicy.FileNamePattern=./logs/ethereum_%d{yyyy-MM-dd}
|
|||
|
||||
# filter noisy classes
|
||||
log4j.logger.block = ERROR
|
||||
log4j.logger.blockqueue = INFO
|
||||
log4j.logger.blockqueue = ERROR
|
||||
log4j.logger.wallet = ERROR
|
||||
log4j.logger.general = DEBUG
|
||||
log4j.logger.net = TRACE
|
||||
log4j.logger.general = INFO
|
||||
log4j.logger.net = ERROR
|
||||
log4j.logger.db = ERROR
|
||||
log4j.logger.peerdiscovery = TRACE
|
||||
log4j.logger.peermonitor = TRACE
|
||||
log4j.logger.peerdiscovery = ERROR
|
||||
log4j.logger.peermonitor = ERROR
|
||||
log4j.logger.java.nio = ERROR
|
||||
log4j.logger.io.netty = ERROR
|
||||
log4j.logger.wire = DEBUG
|
||||
log4j.logger.wire = ERROR
|
||||
log4j.logger.VM = ERROR
|
||||
log4j.logger.main = ERROR
|
||||
log4j.logger.trie = ERROR
|
||||
log4j.logger.state = INFO
|
||||
log4j.logger.repository = DEBUG
|
||||
log4j.logger.blockchain = DEBUG
|
||||
log4j.logger.state = ERROR
|
||||
log4j.logger.repository = ERROR
|
||||
log4j.logger.blockchain = ERROR
|
||||
log4j.logger.txs = ERROR
|
||||
log4j.logger.ui = ERROR
|
||||
log4j.logger.gas = ERROR
|
||||
log4j.logger.cli = INFO
|
||||
log4j.logger.cli = ERROR
|
||||
log4j.logger.TCK-Test = ERROR
|
||||
log4j.logger.execute = ERROR
|
||||
|
||||
log4j.logger.org.springframework = ERROR
|
||||
log4j.logger.org.hibernate = ERROR
|
||||
|
|
|
@ -11,8 +11,8 @@ peer.discovery.ip.list = poc-7.ethdev.com:30303,\
|
|||
#peer.active.port = 30103
|
||||
#peer.active.nodeid = d348964fbb47d0cb9d206b926d416b2be8c1c0c68679f07d7611ef04ee797857d0ec8e7490cc3cc64094de9e7659be42baaf1fd24ca822c1bffc58ca9cf479dd
|
||||
|
||||
# my poc-9-test peer-1
|
||||
#peer.active.ip = 162.243.46.9
|
||||
# my poc-9-peer-1
|
||||
#peer.active.ip = 45.55.204.106
|
||||
#peer.active.port = 30303
|
||||
#peer.active.nodeid = e437a4836b77ad9d9ffe73ee782ef2614e6d8370fcf62191a6e488276e23717147073a7ce0b444d485fff5a0c34c4577251a7a990cf80d8542e21b95aa8c5e6c
|
||||
|
||||
|
@ -122,7 +122,7 @@ dump.clean.on.restart = true
|
|||
# exposed to the user
|
||||
# in json or any other
|
||||
# convenient form.
|
||||
vm.structured.trace = true
|
||||
vm.structured.trace = false
|
||||
vm.structured.dir = vmtrace
|
||||
vm.structured.compressed = true
|
||||
|
||||
|
@ -160,10 +160,10 @@ max.blocks.ask = 500
|
|||
# how much block we will keep in buffer
|
||||
# until the execution is set by this param
|
||||
# recommended value: [100.300]
|
||||
max.blocks.queued = 3000
|
||||
max.blocks.queued = 300000
|
||||
|
||||
# project version auto copied during build phase
|
||||
project.version = 0.9.0
|
||||
project.version = 0.9.4
|
||||
|
||||
# hello phrase will be included in
|
||||
# the hello message of the peer
|
||||
|
@ -192,9 +192,11 @@ keyvalue.datasource = leveldb
|
|||
redis.enabled=false
|
||||
|
||||
record.blocks=false
|
||||
blockchain.only=false
|
||||
|
||||
# Load the blocks
|
||||
# from a rlp lines
|
||||
# file and not for
|
||||
# the net
|
||||
blocks.loader=
|
||||
blocks.loader=
|
||||
#E:\\temp\\_poc-9-blocks\\poc-9-492k_.dmp
|
||||
|
|
|
@ -10,6 +10,8 @@ import org.spongycastle.util.encoders.Hex;
|
|||
*/
|
||||
public class RedisDataSourceTest extends AbstractRedisTest {
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void testSet1() {
|
||||
if (!isConnected()) return;
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.config.SystemProperties;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.datasource.DriverManagerDataSource;
|
||||
import org.springframework.orm.hibernate4.LocalSessionFactoryBuilder;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
public class AbstractInMemoryBlockStoreTest {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger("test");
|
||||
|
||||
|
||||
public SessionFactory sessionFactory() {
|
||||
LocalSessionFactoryBuilder builder =
|
||||
new LocalSessionFactoryBuilder(dataSource());
|
||||
builder.scanPackages("org.ethereum.db")
|
||||
.addProperties(getHibernateProperties());
|
||||
|
||||
return builder.buildSessionFactory();
|
||||
}
|
||||
|
||||
|
||||
private Properties getHibernateProperties() {
|
||||
|
||||
Properties prop = new Properties();
|
||||
|
||||
prop.put("hibernate.hbm2ddl.auto", "create-drop");
|
||||
prop.put("hibernate.format_sql", "true");
|
||||
prop.put("hibernate.connection.autocommit", "false");
|
||||
prop.put("hibernate.connection.release_mode", "after_transaction");
|
||||
prop.put("hibernate.jdbc.batch_size", "1000");
|
||||
prop.put("hibernate.order_inserts", "true");
|
||||
prop.put("hibernate.order_updates", "true");
|
||||
|
||||
prop.put("hibernate.dialect",
|
||||
"org.hibernate.dialect.H2Dialect");
|
||||
|
||||
return prop;
|
||||
}
|
||||
|
||||
|
||||
public DriverManagerDataSource dataSource() {
|
||||
|
||||
logger.info("Connecting to the block store");
|
||||
|
||||
System.setProperty("hsqldb.reconfig_logging", "false");
|
||||
|
||||
String url =
|
||||
String.format("jdbc:h2:./%s/blockchain/blockchain.db;CACHE_SIZE=10240;PAGE_SIZE=1024;LOCK_MODE=0;UNDO_LOG=0",
|
||||
"test_mem_store_db");
|
||||
|
||||
DriverManagerDataSource ds = new DriverManagerDataSource();
|
||||
ds.setDriverClassName("org.h2.Driver");
|
||||
ds.setUrl(url);
|
||||
ds.setUsername("sa");
|
||||
|
||||
return ds;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,154 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.vm.DataWord;
|
||||
import org.junit.Test;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class ContractDetailsTest {
|
||||
|
||||
@Test
|
||||
public void test_1(){
|
||||
|
||||
byte[] code = Hex.decode("60016002");
|
||||
|
||||
byte[] key_1 = Hex.decode("111111");
|
||||
byte[] val_1 = Hex.decode("aaaaaa");
|
||||
|
||||
byte[] key_2 = Hex.decode("222222");
|
||||
byte[] val_2 = Hex.decode("bbbbbb");
|
||||
|
||||
ContractDetailsImpl contractDetails = new ContractDetailsImpl();
|
||||
contractDetails.setCode(code);
|
||||
contractDetails.put(new DataWord(key_1), new DataWord(val_1));
|
||||
contractDetails.put(new DataWord(key_2), new DataWord(val_2));
|
||||
|
||||
byte[] data = contractDetails.getEncoded();
|
||||
|
||||
ContractDetailsImpl contractDetails_ = new ContractDetailsImpl(data);
|
||||
|
||||
assertEquals(Hex.toHexString(code),
|
||||
Hex.toHexString(contractDetails_.getCode()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_1),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_1)).getNoLeadZeroesData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_2),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_2)).getNoLeadZeroesData()));
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test_2(){
|
||||
|
||||
byte[] code = Hex.decode("7c0100000000000000000000000000000000000000000000000000000000600035046333d546748114610065578063430fe5f01461007c5780634d432c1d1461008d578063501385b2146100b857806357eb3b30146100e9578063dbc7df61146100fb57005b6100766004356024356044356102f0565b60006000f35b61008760043561039e565b60006000f35b610098600435610178565b8073ffffffffffffffffffffffffffffffffffffffff1660005260206000f35b6100c96004356024356044356101a0565b8073ffffffffffffffffffffffffffffffffffffffff1660005260206000f35b6100f1610171565b8060005260206000f35b610106600435610133565b8360005282602052816040528073ffffffffffffffffffffffffffffffffffffffff1660605260806000f35b5b60006020819052908152604090208054600182015460028301546003909301549192909173ffffffffffffffffffffffffffffffffffffffff1684565b5b60015481565b5b60026020526000908152604090205473ffffffffffffffffffffffffffffffffffffffff1681565b73ffffffffffffffffffffffffffffffffffffffff831660009081526020819052604081206002015481908302341080156101fe575073ffffffffffffffffffffffffffffffffffffffff8516600090815260208190526040812054145b8015610232575073ffffffffffffffffffffffffffffffffffffffff85166000908152602081905260409020600101548390105b61023b57610243565b3391506102e8565b6101966103ca60003973ffffffffffffffffffffffffffffffffffffffff3381166101965285166101b68190526000908152602081905260408120600201546101d6526101f68490526102169080f073ffffffffffffffffffffffffffffffffffffffff8616600090815260208190526040902060030180547fffffffffffffffffffffffff0000000000000000000000000000000000000000168217905591508190505b509392505050565b73ffffffffffffffffffffffffffffffffffffffff33166000908152602081905260408120548190821461032357610364565b60018054808201909155600090815260026020526040902080547fffffffffffffffffffffffff000000000000000000000000000000000000000016331790555b50503373ffffffffffffffffffffffffffffffffffffffff1660009081526020819052604090209081556001810192909255600290910155565b3373ffffffffffffffffffffffffffffffffffffffff166000908152602081905260409020600201555600608061019660043960048051602451604451606451600080547fffffffffffffffffffffffff0000000000000000000000000000000000000000908116909517815560018054909516909317909355600355915561013390819061006390396000f3007c0100000000000000000000000000000000000000000000000000000000600035046347810fe381146100445780637e4a1aa81461005557806383d2421b1461006957005b61004f6004356100ab565b60006000f35b6100636004356024356100fc565b60006000f35b61007460043561007a565b60006000f35b6001543373ffffffffffffffffffffffffffffffffffffffff9081169116146100a2576100a8565b60078190555b50565b73ffffffffffffffffffffffffffffffffffffffff8116600090815260026020526040902080547fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0016600117905550565b6001543373ffffffffffffffffffffffffffffffffffffffff9081169116146101245761012f565b600582905560068190555b505056");
|
||||
|
||||
byte[] key_0 = Hex.decode("39a2338cbc13ff8523a9b1c9bc421b7518d63b70aa690ad37cb50908746c9a55");
|
||||
byte[] val_0 = Hex.decode("0000000000000000000000000000000000000000000000000000000000000064");
|
||||
|
||||
byte[] key_1 = Hex.decode("39a2338cbc13ff8523a9b1c9bc421b7518d63b70aa690ad37cb50908746c9a56");
|
||||
byte[] val_1 = Hex.decode("000000000000000000000000000000000000000000000000000000000000000c");
|
||||
|
||||
byte[] key_2 = Hex.decode("4effac3ed62305246f40d058e1a9a8925a448d1967513482947d1d3f6104316f");
|
||||
byte[] val_2 = Hex.decode("7a65703300000000000000000000000000000000000000000000000000000000");
|
||||
|
||||
byte[] key_3 = Hex.decode("4effac3ed62305246f40d058e1a9a8925a448d1967513482947d1d3f61043171");
|
||||
byte[] val_3 = Hex.decode("0000000000000000000000000000000000000000000000000000000000000014");
|
||||
|
||||
byte[] key_4 = Hex.decode("39a2338cbc13ff8523a9b1c9bc421b7518d63b70aa690ad37cb50908746c9a54");
|
||||
byte[] val_4 = Hex.decode("7a65703200000000000000000000000000000000000000000000000000000000");
|
||||
|
||||
byte[] key_5 = Hex.decode("4effac3ed62305246f40d058e1a9a8925a448d1967513482947d1d3f61043170");
|
||||
byte[] val_5 = Hex.decode("0000000000000000000000000000000000000000000000000000000000000078");
|
||||
|
||||
byte[] key_6 = Hex.decode("e90b7bceb6e7df5418fb78d8ee546e97c83a08bbccc01a0644d599ccd2a7c2e0");
|
||||
byte[] val_6 = Hex.decode("00000000000000000000000010b426278fbec874791c4e3f9f48a59a44686efe");
|
||||
|
||||
byte[] key_7 = Hex.decode("0df3cc3597c5ede0b1448e94daf1f1445aa541c6c03f602a426f04ae47508bb8");
|
||||
byte[] val_7 = Hex.decode("7a65703100000000000000000000000000000000000000000000000000000000");
|
||||
|
||||
byte[] key_8 = Hex.decode("0df3cc3597c5ede0b1448e94daf1f1445aa541c6c03f602a426f04ae47508bb9");
|
||||
byte[] val_8 = Hex.decode("00000000000000000000000000000000000000000000000000000000000000c8");
|
||||
|
||||
byte[] key_9 = Hex.decode("0df3cc3597c5ede0b1448e94daf1f1445aa541c6c03f602a426f04ae47508bba");
|
||||
byte[] val_9 = Hex.decode("000000000000000000000000000000000000000000000000000000000000000a");
|
||||
|
||||
byte[] key_10 = Hex.decode("0000000000000000000000000000000000000000000000000000000000000001");
|
||||
byte[] val_10 = Hex.decode("0000000000000000000000000000000000000000000000000000000000000003");
|
||||
|
||||
byte[] key_11 = Hex.decode("0df3cc3597c5ede0b1448e94daf1f1445aa541c6c03f602a426f04ae47508bbb");
|
||||
byte[] val_11 = Hex.decode("0000000000000000000000007cd917d6194bcfc3670d8a1613e5b0c790036a35");
|
||||
|
||||
byte[] key_12 = Hex.decode("679795a0195a1b76cdebb7c51d74e058aee92919b8c3389af86ef24535e8a28c");
|
||||
byte[] val_12 = Hex.decode("000000000000000000000000b0b0a72fcfe293a85bef5915e1a7acb37bf0c685");
|
||||
|
||||
byte[] key_13 = Hex.decode("ac33ff75c19e70fe83507db0d683fd3465c996598dc972688b7ace676c89077b");
|
||||
byte[] val_13 = Hex.decode("0000000000000000000000000c6686f3d6ee27e285f2de7b68e8db25cf1b1063");
|
||||
|
||||
|
||||
ContractDetailsImpl contractDetails = new ContractDetailsImpl();
|
||||
contractDetails.setCode(code);
|
||||
contractDetails.put(new DataWord(key_0), new DataWord(val_0));
|
||||
contractDetails.put(new DataWord(key_1), new DataWord(val_1));
|
||||
contractDetails.put(new DataWord(key_2), new DataWord(val_2));
|
||||
contractDetails.put(new DataWord(key_3), new DataWord(val_3));
|
||||
contractDetails.put(new DataWord(key_4), new DataWord(val_4));
|
||||
contractDetails.put(new DataWord(key_5), new DataWord(val_5));
|
||||
contractDetails.put(new DataWord(key_6), new DataWord(val_6));
|
||||
contractDetails.put(new DataWord(key_7), new DataWord(val_7));
|
||||
contractDetails.put(new DataWord(key_8), new DataWord(val_8));
|
||||
contractDetails.put(new DataWord(key_9), new DataWord(val_9));
|
||||
contractDetails.put(new DataWord(key_10), new DataWord(val_10));
|
||||
contractDetails.put(new DataWord(key_11), new DataWord(val_11));
|
||||
contractDetails.put(new DataWord(key_12), new DataWord(val_12));
|
||||
contractDetails.put(new DataWord(key_13), new DataWord(val_13));
|
||||
|
||||
byte[] data = contractDetails.getEncoded();
|
||||
|
||||
ContractDetailsImpl contractDetails_ = new ContractDetailsImpl(data);
|
||||
|
||||
assertEquals(Hex.toHexString(code),
|
||||
Hex.toHexString(contractDetails_.getCode()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_1),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_1)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_2),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_2)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_3),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_3)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_4),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_4)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_5),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_5)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_6),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_6)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_7),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_7)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_8),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_8)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_9),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_9)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_10),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_10)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_11),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_11)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_12),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_12)).getData()));
|
||||
|
||||
assertEquals(Hex.toHexString(val_13),
|
||||
Hex.toHexString(contractDetails_.get(new DataWord(key_13)).getData()));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,135 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.datasource.HashMapDB;
|
||||
import org.ethereum.vm.DataWord;
|
||||
import org.junit.Test;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class DetailsDataStoreTest {
|
||||
|
||||
|
||||
@Test
|
||||
public void test1(){
|
||||
|
||||
DatabaseImpl db = new DatabaseImpl(new HashMapDB());
|
||||
DetailsDataStore dds = new DetailsDataStore();
|
||||
dds.setDB(db);
|
||||
|
||||
byte[] c_key = Hex.decode("1a2b");
|
||||
byte[] code = Hex.decode("60606060");
|
||||
byte[] key = Hex.decode("11");
|
||||
byte[] value = Hex.decode("aa");
|
||||
|
||||
ContractDetails contractDetails = new ContractDetailsImpl();
|
||||
contractDetails.setCode(code);
|
||||
contractDetails.put(new DataWord(key), new DataWord(value));
|
||||
|
||||
dds.update(c_key, contractDetails);
|
||||
|
||||
ContractDetails contractDetails_ = dds.get(c_key);
|
||||
|
||||
String encoded1 = Hex.toHexString(contractDetails.getEncoded());
|
||||
String encoded2 = Hex.toHexString(contractDetails_.getEncoded());
|
||||
|
||||
assertEquals(encoded1, encoded2);
|
||||
|
||||
dds.flush();
|
||||
|
||||
contractDetails_ = dds.get(c_key);
|
||||
encoded2 = Hex.toHexString(contractDetails_.getEncoded());
|
||||
assertEquals(encoded1, encoded2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test2(){
|
||||
|
||||
DatabaseImpl db = new DatabaseImpl(new HashMapDB());
|
||||
DetailsDataStore dds = new DetailsDataStore();
|
||||
dds.setDB(db);
|
||||
|
||||
byte[] c_key = Hex.decode("1a2b");
|
||||
byte[] code = Hex.decode("60606060");
|
||||
byte[] key = Hex.decode("11");
|
||||
byte[] value = Hex.decode("aa");
|
||||
|
||||
ContractDetails contractDetails = new ContractDetailsImpl();
|
||||
contractDetails.setCode(code);
|
||||
contractDetails.put(new DataWord(key), new DataWord(value));
|
||||
|
||||
dds.update(c_key, contractDetails);
|
||||
|
||||
ContractDetails contractDetails_ = dds.get(c_key);
|
||||
|
||||
String encoded1 = Hex.toHexString(contractDetails.getEncoded());
|
||||
String encoded2 = Hex.toHexString(contractDetails_.getEncoded());
|
||||
|
||||
assertEquals(encoded1, encoded2);
|
||||
|
||||
dds.remove(c_key);
|
||||
|
||||
contractDetails_ = dds.get(c_key);
|
||||
assertNull(contractDetails_);
|
||||
|
||||
dds.flush();
|
||||
|
||||
contractDetails_ = dds.get(c_key);
|
||||
assertNull(contractDetails_);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test3(){
|
||||
|
||||
DatabaseImpl db = new DatabaseImpl(new HashMapDB());
|
||||
DetailsDataStore dds = new DetailsDataStore();
|
||||
dds.setDB(db);
|
||||
|
||||
byte[] c_key = Hex.decode("1a2b");
|
||||
byte[] code = Hex.decode("60606060");
|
||||
byte[] key = Hex.decode("11");
|
||||
byte[] value = Hex.decode("aa");
|
||||
|
||||
ContractDetails contractDetails = new ContractDetailsImpl();
|
||||
contractDetails.setCode(code);
|
||||
contractDetails.put(new DataWord(key), new DataWord(value));
|
||||
|
||||
dds.update(c_key, contractDetails);
|
||||
|
||||
ContractDetails contractDetails_ = dds.get(c_key);
|
||||
|
||||
String encoded1 = Hex.toHexString(contractDetails.getEncoded());
|
||||
String encoded2 = Hex.toHexString(contractDetails_.getEncoded());
|
||||
|
||||
assertEquals(encoded1, encoded2);
|
||||
|
||||
dds.remove(c_key);
|
||||
dds.update(c_key, contractDetails);
|
||||
|
||||
contractDetails_ = dds.get(c_key);
|
||||
encoded2 = Hex.toHexString(contractDetails_.getEncoded());
|
||||
assertEquals(encoded1, encoded2);
|
||||
|
||||
dds.flush();
|
||||
|
||||
contractDetails_ = dds.get(c_key);
|
||||
encoded2 = Hex.toHexString(contractDetails_.getEncoded());
|
||||
assertEquals(encoded1, encoded2);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test4() {
|
||||
|
||||
DatabaseImpl db = new DatabaseImpl(new HashMapDB());
|
||||
DetailsDataStore dds = new DetailsDataStore();
|
||||
dds.setDB(db);
|
||||
|
||||
byte[] c_key = Hex.decode("1a2b");
|
||||
|
||||
ContractDetails contractDetails = dds.get(c_key);
|
||||
assertNull(contractDetails);
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,262 @@
|
|||
package org.ethereum.db;
|
||||
|
||||
import org.ethereum.core.Block;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.spongycastle.util.encoders.Hex;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Scanner;
|
||||
|
||||
import static java.math.BigInteger.ZERO;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* @author: Roman Mandeleil
|
||||
* Created on: 30/01/2015 11:04
|
||||
*/
|
||||
|
||||
public class InMemoryBlockStoreTest extends AbstractInMemoryBlockStoreTest {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger("test");
|
||||
|
||||
private List<Block> blocks = new ArrayList<>();
|
||||
|
||||
@Before
|
||||
public void setup() throws URISyntaxException, IOException {
|
||||
|
||||
URL scenario1 = ClassLoader
|
||||
.getSystemResource("blockstore/load.dmp");
|
||||
|
||||
File file = new File(scenario1.toURI());
|
||||
List<String> strData = Files.readAllLines(file.toPath(), StandardCharsets.UTF_8);
|
||||
|
||||
BigInteger cumDifficulty = ZERO;
|
||||
|
||||
for (String blockRLP : strData) {
|
||||
|
||||
Block block = new Block(
|
||||
Hex.decode(blockRLP));
|
||||
|
||||
if (block.getNumber() % 1000 == 0)
|
||||
logger.info("adding block.hash: [{}] block.number: [{}]",
|
||||
block.getShortHash(),
|
||||
block.getNumber());
|
||||
|
||||
blocks.add(block);
|
||||
cumDifficulty = cumDifficulty.add(block.getCumulativeDifficulty());
|
||||
}
|
||||
|
||||
logger.info("total difficulty: {}", cumDifficulty);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmpty(){
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
blockStore.setSessionFactory(sessionFactory());
|
||||
assertNull(blockStore.getBestBlock());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFlush(){
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
blockStore.setSessionFactory(sessionFactory());
|
||||
|
||||
for( Block block : blocks ){
|
||||
blockStore.saveBlock(block, null);
|
||||
}
|
||||
|
||||
blockStore.flush();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleLoad(){
|
||||
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
SessionFactory sessionFactory = sessionFactory();
|
||||
|
||||
for( Block block : blocks ){
|
||||
blockStore.saveBlock(block, null);
|
||||
}
|
||||
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
blockStore.flush();
|
||||
|
||||
blockStore = new InMemoryBlockStore();
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
|
||||
blockStore.load();
|
||||
|
||||
assertTrue(blockStore.getBestBlock().getNumber() == 8003);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFlushEach1000(){
|
||||
|
||||
InMemoryBlockStore blockStore = new InMemoryBlockStore();
|
||||
SessionFactory sessionFactory = sessionFactory();
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
|
||||
for( int i = 0; i < blocks.size(); ++i ){
|
||||
|
||||
blockStore.saveBlock(blocks.get(i), null);
|
||||
if ( i % 1000 == 0){
|
||||
blockStore.flush();
|
||||
assertTrue(blockStore.blocks.size() == 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testBlockHashByNumber(){
|
||||
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
SessionFactory sessionFactory = sessionFactory();
|
||||
|
||||
for( Block block : blocks ){
|
||||
blockStore.saveBlock(block, null);
|
||||
}
|
||||
|
||||
String hash = Hex.toHexString(blockStore.getBlockHashByNumber(7000));
|
||||
assertTrue(hash.startsWith("459a8f"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockHashByNumber(6000));
|
||||
assertTrue(hash.startsWith("7a577a"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockHashByNumber(5000));
|
||||
assertTrue(hash.startsWith("820aa7"));
|
||||
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
blockStore.flush();
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockHashByNumber(7000));
|
||||
assertTrue(hash.startsWith("459a8f"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockHashByNumber(6000));
|
||||
assertTrue(hash.startsWith("7a577a"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockHashByNumber(5000));
|
||||
assertTrue(hash.startsWith("820aa7"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBlockByNumber(){
|
||||
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
SessionFactory sessionFactory = sessionFactory();
|
||||
|
||||
for( Block block : blocks ){
|
||||
blockStore.saveBlock(block, null);
|
||||
}
|
||||
|
||||
String hash = Hex.toHexString(blockStore.getBlockByNumber(7000).getHash());
|
||||
assertTrue(hash.startsWith("459a8f"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockByNumber(6000).getHash());
|
||||
assertTrue(hash.startsWith("7a577a"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockByNumber(5000).getHash());
|
||||
assertTrue(hash.startsWith("820aa7"));
|
||||
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
blockStore.flush();
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockByNumber(7000).getHash());
|
||||
assertTrue(hash.startsWith("459a8f"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockByNumber(6000).getHash());
|
||||
assertTrue(hash.startsWith("7a577a"));
|
||||
|
||||
hash = Hex.toHexString(blockStore.getBlockByNumber(5000).getHash());
|
||||
assertTrue(hash.startsWith("820aa7"));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testGetBlockByNumber() {
|
||||
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
SessionFactory sessionFactory = sessionFactory();
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
|
||||
for( Block block : blocks ){
|
||||
blockStore.saveBlock(block, null);
|
||||
}
|
||||
|
||||
assertEquals("4312750101", blockStore.getTotalDifficulty().toString());
|
||||
|
||||
blockStore.flush();
|
||||
assertEquals("4312750101", blockStore.getTotalDifficulty().toString());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDbGetBlockByHash(){
|
||||
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
SessionFactory sessionFactory = sessionFactory();
|
||||
blockStore.setSessionFactory(sessionFactory);
|
||||
|
||||
for( Block block : blocks ){
|
||||
blockStore.saveBlock(block, null);
|
||||
}
|
||||
|
||||
byte[] hash7000 = Hex.decode("459a8f0ee5d4b0c9ea047797606c94f0c1158ed0f30120490b96f7df9893e1fa");
|
||||
byte[] hash6000 = Hex.decode("7a577a6b0b7e72e51a646c4cec82cf684c977bca6307e2a49a4116af49316159");
|
||||
byte[] hash5000 = Hex.decode("820aa786619e1a2ae139877ba342078c83e5bd65c559069336c13321441e03dc");
|
||||
|
||||
Long number = blockStore.getBlockByHash(hash7000).getNumber();
|
||||
assertTrue(number == 7000);
|
||||
|
||||
number = blockStore.getBlockByHash(hash6000).getNumber();
|
||||
assertTrue(number == 6000);
|
||||
|
||||
number = blockStore.getBlockByHash(hash5000).getNumber();
|
||||
assertTrue(number == 5000);
|
||||
|
||||
}
|
||||
|
||||
@Ignore // TO much time to run it on general basis
|
||||
@Test
|
||||
public void save100KBlocks() throws FileNotFoundException {
|
||||
|
||||
String blocksFile = "E:\\temp\\_poc-9-blocks\\poc-9-492k.dmp";
|
||||
|
||||
FileInputStream inputStream = new FileInputStream(blocksFile);
|
||||
Scanner scanner = new Scanner(inputStream, "UTF-8");
|
||||
|
||||
BlockStore blockStore = new InMemoryBlockStore();
|
||||
blockStore.setSessionFactory(sessionFactory());
|
||||
|
||||
|
||||
while (scanner.hasNextLine()) {
|
||||
|
||||
byte[] blockRLPBytes = Hex.decode( scanner.nextLine());
|
||||
Block block = new Block(blockRLPBytes);
|
||||
|
||||
System.out.println(block.getNumber());
|
||||
|
||||
blockStore.saveBlock(block, null);
|
||||
|
||||
if (block.getNumber() > 100_000) break;
|
||||
}
|
||||
|
||||
blockStore.flush();
|
||||
}
|
||||
|
||||
}
|
|
@ -4,6 +4,7 @@ import org.ethereum.config.SystemProperties;
|
|||
import org.ethereum.core.Genesis;
|
||||
import org.ethereum.crypto.HashUtil;
|
||||
|
||||
import org.ethereum.datasource.HashMapDB;
|
||||
import org.ethereum.datasource.LevelDbDataSource;
|
||||
import org.ethereum.facade.Repository;
|
||||
import org.ethereum.vm.DataWord;
|
||||
|
@ -30,8 +31,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test1() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -49,8 +49,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test2() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -68,8 +67,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test3() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -89,8 +87,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test4() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -114,8 +111,8 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test5() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -145,8 +142,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test6() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -179,8 +175,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test7() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -204,8 +199,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test8() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -225,11 +219,79 @@ public class RepositoryTest {
|
|||
repository.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test7_1() {
|
||||
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track1 = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
||||
track1.addBalance(cow, BigInteger.TEN);
|
||||
track1.addBalance(horse, BigInteger.ONE);
|
||||
|
||||
assertEquals(BigInteger.TEN, track1.getBalance(cow));
|
||||
assertEquals(BigInteger.ONE, track1.getBalance(horse));
|
||||
|
||||
Repository track2 = track1.startTracking();
|
||||
|
||||
assertEquals(BigInteger.TEN, track2.getBalance(cow));
|
||||
assertEquals(BigInteger.ONE, track2.getBalance(horse));
|
||||
|
||||
track2.addBalance(cow, BigInteger.TEN);
|
||||
track2.addBalance(cow, BigInteger.TEN);
|
||||
track2.addBalance(cow, BigInteger.TEN);
|
||||
|
||||
track2.commit();
|
||||
|
||||
track1.commit();
|
||||
|
||||
assertEquals(new BigInteger("40"), repository.getBalance(cow));
|
||||
assertEquals(BigInteger.ONE, repository.getBalance(horse));
|
||||
|
||||
repository.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test7_2() {
|
||||
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track1 = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
||||
track1.addBalance(cow, BigInteger.TEN);
|
||||
track1.addBalance(horse, BigInteger.ONE);
|
||||
|
||||
assertEquals(BigInteger.TEN, track1.getBalance(cow));
|
||||
assertEquals(BigInteger.ONE, track1.getBalance(horse));
|
||||
|
||||
Repository track2 = track1.startTracking();
|
||||
|
||||
assertEquals(BigInteger.TEN, track2.getBalance(cow));
|
||||
assertEquals(BigInteger.ONE, track2.getBalance(horse));
|
||||
|
||||
track2.addBalance(cow, BigInteger.TEN);
|
||||
track2.addBalance(cow, BigInteger.TEN);
|
||||
track2.addBalance(cow, BigInteger.TEN);
|
||||
|
||||
track2.commit();
|
||||
|
||||
track1.rollback();
|
||||
|
||||
assertEquals(BigInteger.ZERO, repository.getBalance(cow));
|
||||
assertEquals(BigInteger.ZERO, repository.getBalance(horse));
|
||||
|
||||
repository.close();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void test9() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -258,8 +320,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test10() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -289,8 +350,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test11() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -317,8 +377,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test12() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
@ -344,8 +403,7 @@ public class RepositoryTest {
|
|||
@Test // Let's upload genesis pre-mine just like in the real world
|
||||
public void test13() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository track = repository.startTracking();
|
||||
|
||||
Genesis genesis = (Genesis)Genesis.getInstance();
|
||||
|
@ -365,8 +423,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test14() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -403,8 +460,7 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test15() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -436,12 +492,10 @@ public class RepositoryTest {
|
|||
repository.close();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void test16() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -486,7 +540,6 @@ public class RepositoryTest {
|
|||
assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertEquals(new DataWord(horseValue2), track1.getStorageValue(horse, new DataWord(horseKey2)));
|
||||
|
||||
|
||||
track1.commit();
|
||||
// leaving level_1
|
||||
|
||||
|
@ -499,12 +552,207 @@ public class RepositoryTest {
|
|||
repository.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test16_2() {
|
||||
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
||||
byte[] cowKey1 = "key-c-1".getBytes();
|
||||
byte[] cowValue1 = "val-c-1".getBytes();
|
||||
|
||||
byte[] horseKey1 = "key-h-1".getBytes();
|
||||
byte[] horseValue1 = "val-h-1".getBytes();
|
||||
|
||||
byte[] cowKey2 = "key-c-2".getBytes();
|
||||
byte[] cowValue2 = "val-c-2".getBytes();
|
||||
|
||||
byte[] horseKey2 = "key-h-2".getBytes();
|
||||
byte[] horseValue2 = "val-h-2".getBytes();
|
||||
|
||||
// changes level_1
|
||||
Repository track1 = repository.startTracking();
|
||||
|
||||
// changes level_2
|
||||
Repository track2 = track1.startTracking();
|
||||
track2.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2));
|
||||
track2.addStorageRow(horse, new DataWord(horseKey2), new DataWord(horseValue2));
|
||||
|
||||
assertNull(track2.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
assertNull(track2.getStorageValue(horse, new DataWord(horseKey1)));
|
||||
|
||||
assertEquals(new DataWord(cowValue2), track2.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertEquals(new DataWord(horseValue2), track2.getStorageValue(horse, new DataWord(horseKey2)));
|
||||
|
||||
track2.commit();
|
||||
// leaving level_2
|
||||
|
||||
assertNull(track1.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
assertNull(track1.getStorageValue(horse, new DataWord(horseKey1)));
|
||||
|
||||
assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertEquals(new DataWord(horseValue2), track1.getStorageValue(horse, new DataWord(horseKey2)));
|
||||
|
||||
track1.commit();
|
||||
// leaving level_1
|
||||
|
||||
assertEquals(null, repository.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
assertEquals(null, repository.getStorageValue(horse, new DataWord(horseKey1)));
|
||||
|
||||
assertEquals(new DataWord(cowValue2), repository.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertEquals(new DataWord(horseValue2), repository.getStorageValue(horse, new DataWord(horseKey2)));
|
||||
|
||||
repository.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test16_3() {
|
||||
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
||||
byte[] cowKey1 = "key-c-1".getBytes();
|
||||
byte[] cowValue1 = "val-c-1".getBytes();
|
||||
|
||||
byte[] horseKey1 = "key-h-1".getBytes();
|
||||
byte[] horseValue1 = "val-h-1".getBytes();
|
||||
|
||||
byte[] cowKey2 = "key-c-2".getBytes();
|
||||
byte[] cowValue2 = "val-c-2".getBytes();
|
||||
|
||||
byte[] horseKey2 = "key-h-2".getBytes();
|
||||
byte[] horseValue2 = "val-h-2".getBytes();
|
||||
|
||||
// changes level_1
|
||||
Repository track1 = repository.startTracking();
|
||||
|
||||
// changes level_2
|
||||
Repository track2 = track1.startTracking();
|
||||
track2.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2));
|
||||
track2.addStorageRow(horse, new DataWord(horseKey2), new DataWord(horseValue2));
|
||||
|
||||
assertNull(track2.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
assertNull(track2.getStorageValue(horse, new DataWord(horseKey1)));
|
||||
|
||||
assertEquals(new DataWord(cowValue2), track2.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertEquals(new DataWord(horseValue2), track2.getStorageValue(horse, new DataWord(horseKey2)));
|
||||
|
||||
track2.commit();
|
||||
// leaving level_2
|
||||
|
||||
assertNull(track1.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
assertNull(track1.getStorageValue(horse, new DataWord(horseKey1)));
|
||||
|
||||
assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertEquals(new DataWord(horseValue2), track1.getStorageValue(horse, new DataWord(horseKey2)));
|
||||
|
||||
track1.rollback();
|
||||
// leaving level_1
|
||||
|
||||
assertNull(track1.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
assertNull(track1.getStorageValue(horse, new DataWord(horseKey1)));
|
||||
|
||||
assertNull(track1.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertNull(track1.getStorageValue(horse, new DataWord(horseKey2)));
|
||||
|
||||
repository.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void test16_4() {
|
||||
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
||||
byte[] cowKey1 = "key-c-1".getBytes();
|
||||
byte[] cowValue1 = "val-c-1".getBytes();
|
||||
|
||||
byte[] horseKey1 = "key-h-1".getBytes();
|
||||
byte[] horseValue1 = "val-h-1".getBytes();
|
||||
|
||||
byte[] cowKey2 = "key-c-2".getBytes();
|
||||
byte[] cowValue2 = "val-c-2".getBytes();
|
||||
|
||||
byte[] horseKey2 = "key-h-2".getBytes();
|
||||
byte[] horseValue2 = "val-h-2".getBytes();
|
||||
|
||||
repository.addStorageRow(cow, new DataWord(cowKey1), new DataWord(cowValue1));
|
||||
|
||||
// changes level_1
|
||||
Repository track1 = repository.startTracking();
|
||||
|
||||
// changes level_2
|
||||
Repository track2 = track1.startTracking();
|
||||
track2.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2));
|
||||
|
||||
track2.commit();
|
||||
// leaving level_2
|
||||
|
||||
track1.commit();
|
||||
// leaving level_1
|
||||
|
||||
assertEquals(new DataWord(cowValue1), track1.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
|
||||
|
||||
repository.close();
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void test16_5() {
|
||||
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
||||
byte[] cowKey1 = "key-c-1".getBytes();
|
||||
byte[] cowValue1 = "val-c-1".getBytes();
|
||||
|
||||
byte[] horseKey1 = "key-h-1".getBytes();
|
||||
byte[] horseValue1 = "val-h-1".getBytes();
|
||||
|
||||
byte[] cowKey2 = "key-c-2".getBytes();
|
||||
byte[] cowValue2 = "val-c-2".getBytes();
|
||||
|
||||
byte[] horseKey2 = "key-h-2".getBytes();
|
||||
byte[] horseValue2 = "val-h-2".getBytes();
|
||||
|
||||
// changes level_1
|
||||
Repository track1 = repository.startTracking();
|
||||
track1.addStorageRow(cow, new DataWord(cowKey2), new DataWord(cowValue2));
|
||||
|
||||
// changes level_2
|
||||
Repository track2 = track1.startTracking();
|
||||
assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertNull(track1.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
|
||||
track2.commit();
|
||||
// leaving level_2
|
||||
|
||||
track1.commit();
|
||||
// leaving level_1
|
||||
|
||||
assertEquals(new DataWord(cowValue2), track1.getStorageValue(cow, new DataWord(cowKey2)));
|
||||
assertNull(track1.getStorageValue(cow, new DataWord(cowKey1)));
|
||||
|
||||
repository.close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
@Test
|
||||
public void test17() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repository = new RepositoryImpl(new LevelDbDataSource(), new LevelDbDataSource());
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
|
||||
|
@ -531,9 +779,8 @@ public class RepositoryTest {
|
|||
@Test
|
||||
public void test18() {
|
||||
|
||||
SystemProperties.CONFIG.setDataBaseDir("test_db/" + RepositoryTest.class);
|
||||
Repository repoTrack = new RepositoryTrack(); // dummy
|
||||
Repository repoTrack2 = repoTrack.startTracking(); //track
|
||||
Repository repository = new RepositoryImpl(new HashMapDB(), new HashMapDB());
|
||||
Repository repoTrack2 = repository.startTracking(); //track
|
||||
|
||||
byte[] cow = Hex.decode("CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826");
|
||||
byte[] horse = Hex.decode("13978AEE95F38490E9769C39B2773ED763D9CD5F");
|
||||
|
@ -543,12 +790,12 @@ public class RepositoryTest {
|
|||
byte[] cowCode = Hex.decode("A1A2A3");
|
||||
byte[] horseCode = Hex.decode("B1B2B3");
|
||||
|
||||
repoTrack.saveCode(cow, cowCode);
|
||||
repoTrack.saveCode(horse, horseCode);
|
||||
repository.saveCode(cow, cowCode);
|
||||
repository.saveCode(horse, horseCode);
|
||||
|
||||
repoTrack.delete(horse);
|
||||
repository.delete(horse);
|
||||
|
||||
assertEquals(true, repoTrack2.isExist(cow));
|
||||
assertEquals(true, repoTrack2.isExist(cow));
|
||||
assertEquals(false, repoTrack2.isExist(horse));
|
||||
assertEquals(false, repoTrack2.isExist(pig));
|
||||
assertEquals(false, repoTrack2.isExist(precompiled));
|
||||
|
|
|
@ -19,14 +19,15 @@ import static org.ethereum.jsontestsuite.JSONReader.getFileNamesForTreeSha;
|
|||
public class GitHubStateTest {
|
||||
|
||||
//SHACOMMIT of tested commit, ethereum/tests.git
|
||||
public String shacommit = "d2ba02fe0507da205e3d17d79612ae15282b35a2";
|
||||
public String shacommit = "baf4b8479c0b524560137d27e61d7e573dc4ab17";
|
||||
|
||||
|
||||
@Ignore
|
||||
@Test // this method is mostly for hands-on convenient testing
|
||||
public void stSingleTest() throws ParseException, IOException {
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stMemoryTest.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, "stackLimitPush32_1025");
|
||||
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stCallCreateCallCodeTest.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, "createNameRegistratorPerTxsNotEnoughGas");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -44,6 +45,8 @@ public class GitHubStateTest {
|
|||
excluded.add("Call1024OOG");
|
||||
excluded.add("callcodeWithHighValue");
|
||||
excluded.add("callWithHighValue");
|
||||
excluded.add("Call1024PreCalls");
|
||||
excluded.add("CallRecursiveBombPreCall");
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stCallCreateCallCodeTest.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, excluded);
|
||||
}
|
||||
|
@ -64,6 +67,7 @@ public class GitHubStateTest {
|
|||
|
||||
@Test
|
||||
public void stPreCompiledContracts() throws ParseException, IOException {
|
||||
|
||||
Set<String> excluded = new HashSet<>();
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stPreCompiledContracts.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, excluded);
|
||||
|
@ -83,6 +87,8 @@ public class GitHubStateTest {
|
|||
public void stMemoryTest() throws ParseException, IOException {
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stMemoryTest.json", shacommit);
|
||||
Set<String> excluded = new HashSet<>();
|
||||
excluded.add("codecopy_dejavu2"); // FIXME: codeOffset has to be bigint inorder for CODECOPY to work correct in that test
|
||||
|
||||
GitHubJSONTestSuite.runStateTest(json, excluded);
|
||||
}
|
||||
|
||||
|
@ -120,6 +126,11 @@ public class GitHubStateTest {
|
|||
@Test
|
||||
public void stSpecialTest() throws ParseException, IOException {
|
||||
Set<String> excluded = new HashSet<>();
|
||||
excluded.add("txfrom0_deja"); // (!!!) FIXME fix them as soon as possible
|
||||
excluded.add("JUMPDEST_AttackwithJump"); // (!!!) FIXME fix them as soon as possible
|
||||
excluded.add("JUMPDEST_Attack"); // (!!!) FIXME fix them as soon as possible
|
||||
|
||||
|
||||
String json = JSONReader.loadJSONFromCommit("StateTests/stSpecialTest.json", shacommit);
|
||||
GitHubJSONTestSuite.runStateTest(json, excluded);
|
||||
}
|
||||
|
@ -133,6 +144,7 @@ public class GitHubStateTest {
|
|||
//@Ignore
|
||||
@Test
|
||||
public void stSystemOperationsTest() throws IOException {
|
||||
|
||||
Set<String> excluded = new HashSet<>();
|
||||
excluded.add("CallRecursiveBomb0_OOG_atMaxCallDepth"); //FIXME hitting VM limits
|
||||
excluded.add("Call10"); //FIXME gaslimit as biginteger
|
||||
|
|
|
@ -129,10 +129,11 @@ public class GitHubVMTest {
|
|||
GitHubJSONTestSuite.runGitHubJsonVMTest(json, excluded);
|
||||
}
|
||||
|
||||
//@Ignore
|
||||
@Ignore // // FIXME: as soon as possible
|
||||
@Test // testing full suite
|
||||
public void testvmSystemOperationsTestGitHub() throws ParseException {
|
||||
Set<String> excluded = new HashSet<>();
|
||||
|
||||
String json = JSONReader.loadJSONFromCommit("VMTests/vmSystemOperationsTest.json", shacommit);
|
||||
GitHubJSONTestSuite.runGitHubJsonVMTest(json, excluded);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,9 @@ import org.ethereum.core.AccountState;
|
|||
import org.ethereum.datasource.KeyValueDataSource;
|
||||
import org.ethereum.datasource.LevelDbDataSource;
|
||||
import org.ethereum.datasource.HashMapDB;
|
||||
import org.ethereum.db.ByteArrayWrapper;
|
||||
import org.ethereum.db.DatabaseImpl;
|
||||
import org.ethereum.util.*;
|
||||
import org.json.simple.JSONArray;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.json.simple.parser.JSONParser;
|
||||
|
@ -30,6 +32,7 @@ import java.util.*;
|
|||
|
||||
import static org.ethereum.crypto.HashUtil.EMPTY_TRIE_HASH;
|
||||
import static org.ethereum.crypto.SHA3Helper.sha3;
|
||||
import static org.ethereum.util.ByteUtil.wrap;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class TrieTest {
|
||||
|
@ -1029,4 +1032,61 @@ public class TrieTest {
|
|||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSerialize_1(){
|
||||
|
||||
long t = System.nanoTime();
|
||||
|
||||
TrieImpl trie = new SecureTrie(new HashMapDB());
|
||||
|
||||
byte[] k1 = "do".getBytes();
|
||||
byte[] v1 = "verb".getBytes();
|
||||
|
||||
byte[] k2 = "ether".getBytes();
|
||||
byte[] v2 = "wookiedoo".getBytes();
|
||||
|
||||
byte[] k3 = "horse".getBytes();
|
||||
byte[] v3 = "stallion".getBytes();
|
||||
|
||||
byte[] k4 = "shaman".getBytes();
|
||||
byte[] v4 = "horse".getBytes();
|
||||
|
||||
byte[] k5 = "doge".getBytes();
|
||||
byte[] v5 = "coin".getBytes();
|
||||
|
||||
byte[] k6 = "ether".getBytes();
|
||||
byte[] v6 = "".getBytes();
|
||||
|
||||
byte[] k7 = "dog".getBytes();
|
||||
byte[] v7 = "puppy".getBytes();
|
||||
|
||||
byte[] k8 = "shaman".getBytes();
|
||||
byte[] v8 = "".getBytes();
|
||||
|
||||
trie.update(k1, v1);
|
||||
trie.update(k2, v2);
|
||||
trie.update(k3, v3);
|
||||
trie.update(k4, v4);
|
||||
trie.update(k5, v5);
|
||||
trie.update(k6, v6);
|
||||
trie.update(k7, v7);
|
||||
trie.update(k8, v8);
|
||||
|
||||
byte[] data = trie.serialize();
|
||||
String original = trie.getTrieDump();
|
||||
|
||||
TrieImpl trie2 = new SecureTrie(new HashMapDB());
|
||||
|
||||
long t_ = System.nanoTime();
|
||||
|
||||
trie2.deserialize(data);
|
||||
|
||||
String expected = trie2.getTrieDump();
|
||||
assertEquals(original, expected);
|
||||
|
||||
System.out.println("took: " + ((float)(t_ - t) / 1_000_000) + "ms");
|
||||
System.out.println("size: " + ((float)(data.length) / 1_000) + "KB");
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ import org.ethereum.crypto.HashUtil;
|
|||
|
||||
import com.cedarsoftware.util.DeepEquals;
|
||||
|
||||
import org.ethereum.db.ByteArrayWrapper;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -19,11 +20,10 @@ import java.math.BigInteger;
|
|||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Queue;
|
||||
import java.util.*;
|
||||
|
||||
import static org.ethereum.util.ByteUtil.byteArrayToInt;
|
||||
import static org.ethereum.util.ByteUtil.wrap;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.ethereum.util.RlpTestData.*;
|
||||
|
||||
|
@ -979,5 +979,56 @@ public class RLPTest {
|
|||
System.out.println(Hex.toHexString(encodedData));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEncodeListHeader(){
|
||||
|
||||
byte[] header = RLP.encodeListHeader(10);
|
||||
String expected_1 = "ca";
|
||||
assertEquals(expected_1, Hex.toHexString(header));
|
||||
|
||||
header = RLP.encodeListHeader(1000);
|
||||
String expected_2 = "f903e8";
|
||||
assertEquals(expected_2, Hex.toHexString(header));
|
||||
|
||||
header = RLP.encodeListHeader(1000000000);
|
||||
String expected_3 = "fb3b9aca00";
|
||||
assertEquals(expected_3, Hex.toHexString(header));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testEncodeSet_1(){
|
||||
|
||||
Set<ByteArrayWrapper> data = new HashSet<>();
|
||||
|
||||
ByteArrayWrapper element1 =
|
||||
new ByteArrayWrapper(Hex.decode("1111111111111111111111111111111111111111111111111111111111111111"));
|
||||
|
||||
ByteArrayWrapper element2 =
|
||||
new ByteArrayWrapper(Hex.decode("2222222222222222222222222222222222222222222222222222222222222222"));
|
||||
|
||||
data.add(element1);
|
||||
data.add(element2);
|
||||
|
||||
byte[] setEncoded = RLP.encodeSet(data);
|
||||
|
||||
RLPList list = (RLPList)RLP.decode2(setEncoded).get(0);
|
||||
|
||||
byte[] element1_ = list.get(0).getRLPData();
|
||||
byte[] element2_ = list.get(1).getRLPData();
|
||||
|
||||
assertTrue(data.contains(wrap(element1_)));
|
||||
assertTrue(data.contains(wrap(element2_)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEncodeSet_2(){
|
||||
|
||||
Set<ByteArrayWrapper> data = new HashSet<>();
|
||||
byte[] setEncoded = RLP.encodeSet(data);
|
||||
assertEquals("c0", Hex.toHexString(setEncoded));
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -5,6 +5,7 @@ import org.ethereum.crypto.ECKey;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.spongycastle.crypto.InvalidCipherTextException;
|
||||
|
@ -42,6 +43,7 @@ public class EtherSaleWalletDecoderTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void shouldGeneratePasswordHashWithUmlauts() throws InvalidKeySpecException, NoSuchAlgorithmException {
|
||||
|
||||
byte[] result = walletDecoder.generatePasswordHash("öäüß");
|
||||
|
@ -51,6 +53,7 @@ public class EtherSaleWalletDecoderTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
@Ignore
|
||||
public void shouldGeneratePasswordHashWithUnicode() throws InvalidKeySpecException, NoSuchAlgorithmException {
|
||||
|
||||
byte[] result = walletDecoder.generatePasswordHash("☯");
|
||||
|
|
|
@ -6,14 +6,14 @@ log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
|||
log4j.appender.stdout.Target=System.out
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern= %d{HH:mm:ss.SSS} [%c{1}] %m%n
|
||||
log4j.appender.stdout.Threshold=ERROR
|
||||
log4j.appender.stdout.Threshold=INFO
|
||||
|
||||
# filter noisy classes
|
||||
log4j.logger.block = ERROR
|
||||
log4j.logger.blockqueue = TRACE
|
||||
log4j.logger.blockqueue = ERROR
|
||||
log4j.logger.wallet = ERROR
|
||||
log4j.logger.general = DEBUG
|
||||
log4j.logger.net = TRACE
|
||||
log4j.logger.general = ERROR
|
||||
log4j.logger.net = ERROR
|
||||
log4j.logger.db = ERROR
|
||||
log4j.logger.peerdiscovery = ERROR
|
||||
log4j.logger.peermonitor = ERROR
|
||||
|
@ -23,13 +23,14 @@ log4j.logger.wire = ERROR
|
|||
log4j.logger.VM = ERROR
|
||||
log4j.logger.main = ERROR
|
||||
log4j.logger.trie = ERROR
|
||||
log4j.logger.state = INFO
|
||||
log4j.logger.repository = INFO
|
||||
log4j.logger.blockchain = TRACE
|
||||
log4j.logger.state = ERROR
|
||||
log4j.logger.repository = ERROR
|
||||
log4j.logger.blockchain = ERROR
|
||||
log4j.logger.txs = ERROR
|
||||
log4j.logger.ui = ERROR
|
||||
log4j.logger.gas = ERROR
|
||||
log4j.logger.TCK-Test = INFO
|
||||
log4j.logger.TCK-Test = ERROR
|
||||
log4j.logger.test = ERROR
|
||||
|
||||
|
||||
log4j.logger.org.springframework = ERROR
|
||||
|
|
Loading…
Reference in New Issue