Merge pull request #1329 from Gustav-Simonsson/ethash_input_validations

Update Ethash Godeps
This commit is contained in:
Jeffrey Wilcke 2015-06-25 02:06:06 -07:00
commit 8774fdcd64
5 changed files with 54 additions and 19 deletions

4
Godeps/Godeps.json generated
View File

@ -21,8 +21,8 @@
}, },
{ {
"ImportPath": "github.com/ethereum/ethash", "ImportPath": "github.com/ethereum/ethash",
"Comment": "v23.1-222-g173b8ff", "Comment": "v23.1-227-g8f6ccaa",
"Rev": "173b8ff953610c13710061e83b95b50c73d7ea50" "Rev": "8f6ccaaef9b418553807a73a95cb5f49cd3ea39f"
}, },
{ {
"ImportPath": "github.com/howeyc/fsnotify", "ImportPath": "github.com/howeyc/fsnotify",

View File

@ -100,19 +100,29 @@ type Light struct {
func (l *Light) Verify(block pow.Block) bool { func (l *Light) Verify(block pow.Block) bool {
// TODO: do ethash_quick_verify before getCache in order // TODO: do ethash_quick_verify before getCache in order
// to prevent DOS attacks. // to prevent DOS attacks.
var ( blockNum := block.NumberU64()
blockNum = block.NumberU64()
difficulty = block.Difficulty()
cache = l.getCache(blockNum)
dagSize = C.ethash_get_datasize(C.uint64_t(blockNum))
)
if l.test {
dagSize = dagSizeForTesting
}
if blockNum >= epochLength*2048 { if blockNum >= epochLength*2048 {
glog.V(logger.Debug).Infof("block number %d too high, limit is %d", epochLength*2048) glog.V(logger.Debug).Infof("block number %d too high, limit is %d", epochLength*2048)
return false return false
} }
difficulty := block.Difficulty()
/* Cannot happen if block header diff is validated prior to PoW, but can
happen if PoW is checked first due to parallel PoW checking.
We could check the minimum valid difficulty but for SoC we avoid (duplicating)
Ethereum protocol consensus rules here which are not in scope of Ethash
*/
if difficulty.Cmp(common.Big0) == 0 {
glog.V(logger.Debug).Infof("invalid block difficulty")
return false
}
cache := l.getCache(blockNum)
dagSize := C.ethash_get_datasize(C.uint64_t(blockNum))
if l.test {
dagSize = dagSizeForTesting
}
// Recompute the hash using the cache. // Recompute the hash using the cache.
hash := hashToH256(block.HashNoNonce()) hash := hashToH256(block.HashNoNonce())
ret := C.ethash_light_compute_internal(cache.ptr, dagSize, hash, C.uint64_t(block.Nonce())) ret := C.ethash_light_compute_internal(cache.ptr, dagSize, hash, C.uint64_t(block.Nonce()))

View File

@ -11,6 +11,7 @@ import (
"testing" "testing"
"github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
) )
func init() { func init() {
@ -59,6 +60,14 @@ var validBlocks = []*testBlock{
}, },
} }
var invalidZeroDiffBlock = testBlock{
number: 61440000,
hashNoNonce: crypto.Sha3Hash([]byte("foo")),
difficulty: big.NewInt(0),
nonce: 0xcafebabec00000fe,
mixDigest: crypto.Sha3Hash([]byte("bar")),
}
func TestEthashVerifyValid(t *testing.T) { func TestEthashVerifyValid(t *testing.T) {
eth := New() eth := New()
for i, block := range validBlocks { for i, block := range validBlocks {
@ -68,6 +77,13 @@ func TestEthashVerifyValid(t *testing.T) {
} }
} }
func TestEthashVerifyInvalid(t *testing.T) {
eth := New()
if eth.Verify(&invalidZeroDiffBlock) {
t.Errorf("should not validate - we just ensure it does not panic on this block")
}
}
func TestEthashConcurrentVerify(t *testing.T) { func TestEthashConcurrentVerify(t *testing.T) {
eth, err := NewForTesting() eth, err := NewForTesting()
if err != nil { if err != nil {

View File

@ -284,13 +284,13 @@ bool ethash_quick_check_difficulty(
ethash_h256_t const* header_hash, ethash_h256_t const* header_hash,
uint64_t const nonce, uint64_t const nonce,
ethash_h256_t const* mix_hash, ethash_h256_t const* mix_hash,
ethash_h256_t const* difficulty ethash_h256_t const* boundary
) )
{ {
ethash_h256_t return_hash; ethash_h256_t return_hash;
ethash_quick_hash(&return_hash, header_hash, nonce, mix_hash); ethash_quick_hash(&return_hash, header_hash, nonce, mix_hash);
return ethash_check_difficulty(&return_hash, difficulty); return ethash_check_difficulty(&return_hash, boundary);
} }
ethash_light_t ethash_light_new_internal(uint64_t cache_size, ethash_h256_t const* seed) ethash_light_t ethash_light_new_internal(uint64_t cache_size, ethash_h256_t const* seed)

View File

@ -46,27 +46,36 @@ static inline void ethash_h256_reset(ethash_h256_t* hash)
memset(hash, 0, 32); memset(hash, 0, 32);
} }
// Returns if hash is less than or equal to difficulty // Returns if hash is less than or equal to boundary (2^256/difficulty)
static inline bool ethash_check_difficulty( static inline bool ethash_check_difficulty(
ethash_h256_t const* hash, ethash_h256_t const* hash,
ethash_h256_t const* difficulty ethash_h256_t const* boundary
) )
{ {
// Difficulty is big endian // Boundary is big endian
for (int i = 0; i < 32; i++) { for (int i = 0; i < 32; i++) {
if (ethash_h256_get(hash, i) == ethash_h256_get(difficulty, i)) { if (ethash_h256_get(hash, i) == ethash_h256_get(boundary, i)) {
continue; continue;
} }
return ethash_h256_get(hash, i) < ethash_h256_get(difficulty, i); return ethash_h256_get(hash, i) < ethash_h256_get(boundary, i);
} }
return true; return true;
} }
/**
* Difficulty quick check for POW preverification
*
* @param header_hash The hash of the header
* @param nonce The block's nonce
* @param mix_hash The mix digest hash
* @param boundary The boundary is defined as (2^256 / difficulty)
* @return true for succesful pre-verification and false otherwise
*/
bool ethash_quick_check_difficulty( bool ethash_quick_check_difficulty(
ethash_h256_t const* header_hash, ethash_h256_t const* header_hash,
uint64_t const nonce, uint64_t const nonce,
ethash_h256_t const* mix_hash, ethash_h256_t const* mix_hash,
ethash_h256_t const* difficulty ethash_h256_t const* boundary
); );
struct ethash_light { struct ethash_light {