diff --git a/pow/ethash.go b/pow/ethash.go index 1e577a587..9adc38540 100644 --- a/pow/ethash.go +++ b/pow/ethash.go @@ -428,7 +428,7 @@ func (ethash *Ethash) cache(block uint64) []uint32 { current, future := ethash.caches[epoch], (*cache)(nil) if current == nil { // No in-memory cache, evict the oldest if the cache limit was reached - for len(ethash.caches) >= ethash.cachesinmem { + for len(ethash.caches) > 0 && len(ethash.caches) >= ethash.cachesinmem { var evict *cache for _, cache := range ethash.caches { if evict == nil || evict.used.After(cache.used) { @@ -480,22 +480,16 @@ func (ethash *Ethash) cache(block uint64) []uint32 { // Search implements PoW, attempting to find a nonce that satisfies the block's // difficulty requirements. func (ethash *Ethash) Search(block Block, stop <-chan struct{}) (uint64, []byte) { - // Extract some data from the block - var ( - hash = block.HashNoNonce().Bytes() - diff = block.Difficulty() - target = new(big.Int).Div(maxUint256, diff) - ) - // Retrieve the mining dataset - dataset, size := ethash.dataset(block.NumberU64()), datasetSize(block.NumberU64()) - - // Start generating random nonces until we abort or find a good one var ( + hash = block.HashNoNonce().Bytes() + diff = block.Difficulty() + target = new(big.Int).Div(maxUint256, diff) + dataset = ethash.dataset(block.NumberU64()) + rand = rand.New(rand.NewSource(time.Now().UnixNano())) + nonce = uint64(rand.Int63()) attempts int64 - - rand = rand.New(rand.NewSource(time.Now().UnixNano())) - nonce = uint64(rand.Int63()) ) + // Start generating random nonces until we abort or find a good one for { select { case <-stop: @@ -511,7 +505,7 @@ func (ethash *Ethash) Search(block Block, stop <-chan struct{}) (uint64, []byte) attempts = 0 } // Compute the PoW value of this nonce - digest, result := hashimotoFull(size, dataset, hash, nonce) + digest, result := hashimotoFull(dataset, hash, nonce) if new(big.Int).SetBytes(result).Cmp(target) <= 0 { return nonce, digest } @@ -532,7 +526,7 @@ func (ethash *Ethash) dataset(block uint64) []uint32 { current, future := ethash.datasets[epoch], (*dataset)(nil) if current == nil { // No in-memory dataset, evict the oldest if the dataset limit was reached - for len(ethash.datasets) >= ethash.dagsinmem { + for len(ethash.datasets) > 0 && len(ethash.datasets) >= ethash.dagsinmem { var evict *dataset for _, dataset := range ethash.datasets { if evict == nil || evict.used.After(dataset.used) { diff --git a/pow/ethash_algo.go b/pow/ethash_algo.go index 3737cc5d7..1e996785f 100644 --- a/pow/ethash_algo.go +++ b/pow/ethash_algo.go @@ -349,12 +349,12 @@ func hashimotoLight(size uint64, cache []uint32, hash []byte, nonce uint64) ([]b // hashimotoFull aggregates data from the full dataset (using the full in-memory // dataset) in order to produce our final value for a particular header hash and // nonce. -func hashimotoFull(size uint64, dataset []uint32, hash []byte, nonce uint64) ([]byte, []byte) { +func hashimotoFull(dataset []uint32, hash []byte, nonce uint64) ([]byte, []byte) { lookup := func(index uint32) []uint32 { offset := index * hashWords return dataset[offset : offset+hashWords] } - return hashimoto(hash, nonce, size, lookup) + return hashimoto(hash, nonce, uint64(len(dataset))*4, lookup) } // datasetSizes is a lookup table for the ethash dataset size for the first 2048 diff --git a/pow/ethash_algo_test.go b/pow/ethash_algo_test.go index c881874ff..0605d70ad 100644 --- a/pow/ethash_algo_test.go +++ b/pow/ethash_algo_test.go @@ -660,7 +660,7 @@ func TestHashimoto(t *testing.T) { if !bytes.Equal(result, wantResult) { t.Errorf("light hashimoto result mismatch: have %x, want %x", result, wantResult) } - digest, result = hashimotoFull(32*1024, dataset, hash, nonce) + digest, result = hashimotoFull(dataset, hash, nonce) if !bytes.Equal(digest, wantDigest) { t.Errorf("full hashimoto digest mismatch: have %x, want %x", digest, wantDigest) } @@ -713,6 +713,17 @@ func TestConcurrentDiskCacheGeneration(t *testing.T) { pend.Wait() } +func TestTestMode(t *testing.T) { + head := &types.Header{Difficulty: big.NewInt(100)} + ethash := NewTestEthash() + nonce, mix := ethash.Search(types.NewBlockWithHeader(head), nil) + head.Nonce = types.EncodeNonce(nonce) + copy(head.MixDigest[:], mix) + if err := ethash.Verify(types.NewBlockWithHeader(head)); err != nil { + t.Error("unexpected Verify error:", err) + } +} + // Benchmarks the cache generation performance. func BenchmarkCacheGeneration(b *testing.B) { for i := 0; i < b.N; i++ { @@ -758,6 +769,6 @@ func BenchmarkHashimotoFullSmall(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - hashimotoFull(32*65536, dataset, hash, 0) + hashimotoFull(dataset, hash, 0) } }