Godeps, eth, tests: update ethash, used shared for testing

This commit is contained in:
Péter Szilágyi 2016-02-24 13:29:47 +02:00
parent 1e62cd6c79
commit 0a5ee08e2b
27 changed files with 191 additions and 3401 deletions

144
Godeps/Godeps.json generated
View File

@ -20,8 +20,8 @@
},
{
"ImportPath": "github.com/ethereum/ethash",
"Comment": "v23.1-238-g9401881",
"Rev": "9401881ab040d1a3b0ae9e4780a115bc284a8a1a"
"Comment": "v23.1-240-ga524c9f",
"Rev": "a524c9f7d55cb8925567dc201b44ba555862056d"
},
{
"ImportPath": "github.com/fatih/color",
@ -40,10 +40,38 @@
"ImportPath": "github.com/hashicorp/golang-lru",
"Rev": "a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4"
},
{
"ImportPath": "github.com/hashicorp/golang-lru/simplelru",
"Rev": "a0d98a5f288019575c6d1f4bb1573fef2d1fcdc4"
},
{
"ImportPath": "github.com/huin/goupnp",
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
},
{
"ImportPath": "github.com/huin/goupnp/dcps/internetgateway1",
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
},
{
"ImportPath": "github.com/huin/goupnp/dcps/internetgateway2",
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
},
{
"ImportPath": "github.com/huin/goupnp/httpu",
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
},
{
"ImportPath": "github.com/huin/goupnp/scpd",
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
},
{
"ImportPath": "github.com/huin/goupnp/soap",
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
},
{
"ImportPath": "github.com/huin/goupnp/ssdp",
"Rev": "46bde78b11f3f021f2a511df138be9e2fc7506e8"
},
{
"ImportPath": "github.com/jackpal/gateway",
"Rev": "192609c58b8985e645cbe82ddcb28a4362ca0fdc"
@ -86,10 +114,78 @@
"ImportPath": "github.com/robertkrimen/otto",
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
},
{
"ImportPath": "github.com/robertkrimen/otto/ast",
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
},
{
"ImportPath": "github.com/robertkrimen/otto/dbg",
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
},
{
"ImportPath": "github.com/robertkrimen/otto/file",
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
},
{
"ImportPath": "github.com/robertkrimen/otto/parser",
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
},
{
"ImportPath": "github.com/robertkrimen/otto/registry",
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
},
{
"ImportPath": "github.com/robertkrimen/otto/token",
"Rev": "53221230c215611a90762720c9042ac782ef74ee"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/cache",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/comparer",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/errors",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/filter",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/iterator",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/journal",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/memdb",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/opt",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/storage",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/table",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "github.com/syndtr/goleveldb/leveldb/util",
"Rev": "e7e6f5b5ef25adb580feac515f9ccec514d0bda8"
},
{
"ImportPath": "golang.org/x/crypto/pbkdf2",
"Rev": "1f22c0103821b9390939b6776727195525381532"
@ -110,6 +206,14 @@
"ImportPath": "golang.org/x/net/html",
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
},
{
"ImportPath": "golang.org/x/net/html/atom",
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
},
{
"ImportPath": "golang.org/x/net/html/charset",
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
},
{
"ImportPath": "golang.org/x/net/websocket",
"Rev": "8968c61983e8f51a91b8c0ef25bf739278c89634"
@ -122,6 +226,42 @@
"ImportPath": "golang.org/x/text/encoding",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/charmap",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/htmlindex",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/internal",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/internal/identifier",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/japanese",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/korean",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/simplifiedchinese",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/traditionalchinese",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/encoding/unicode",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"
},
{
"ImportPath": "golang.org/x/text/internal/tag",
"Rev": "09761194ac5034a97b2bfad4f5b896b0ac350b3e"

View File

@ -1,161 +0,0 @@
#.rst:
# CMakeParseArguments
# -------------------
#
#
#
# CMAKE_PARSE_ARGUMENTS(<prefix> <options> <one_value_keywords>
# <multi_value_keywords> args...)
#
# CMAKE_PARSE_ARGUMENTS() is intended to be used in macros or functions
# for parsing the arguments given to that macro or function. It
# processes the arguments and defines a set of variables which hold the
# values of the respective options.
#
# The <options> argument contains all options for the respective macro,
# i.e. keywords which can be used when calling the macro without any
# value following, like e.g. the OPTIONAL keyword of the install()
# command.
#
# The <one_value_keywords> argument contains all keywords for this macro
# which are followed by one value, like e.g. DESTINATION keyword of the
# install() command.
#
# The <multi_value_keywords> argument contains all keywords for this
# macro which can be followed by more than one value, like e.g. the
# TARGETS or FILES keywords of the install() command.
#
# When done, CMAKE_PARSE_ARGUMENTS() will have defined for each of the
# keywords listed in <options>, <one_value_keywords> and
# <multi_value_keywords> a variable composed of the given <prefix>
# followed by "_" and the name of the respective keyword. These
# variables will then hold the respective value from the argument list.
# For the <options> keywords this will be TRUE or FALSE.
#
# All remaining arguments are collected in a variable
# <prefix>_UNPARSED_ARGUMENTS, this can be checked afterwards to see
# whether your macro was called with unrecognized parameters.
#
# As an example here a my_install() macro, which takes similar arguments
# as the real install() command:
#
# ::
#
# function(MY_INSTALL)
# set(options OPTIONAL FAST)
# set(oneValueArgs DESTINATION RENAME)
# set(multiValueArgs TARGETS CONFIGURATIONS)
# cmake_parse_arguments(MY_INSTALL "${options}" "${oneValueArgs}"
# "${multiValueArgs}" ${ARGN} )
# ...
#
#
#
# Assume my_install() has been called like this:
#
# ::
#
# my_install(TARGETS foo bar DESTINATION bin OPTIONAL blub)
#
#
#
# After the cmake_parse_arguments() call the macro will have set the
# following variables:
#
# ::
#
# MY_INSTALL_OPTIONAL = TRUE
# MY_INSTALL_FAST = FALSE (this option was not used when calling my_install()
# MY_INSTALL_DESTINATION = "bin"
# MY_INSTALL_RENAME = "" (was not used)
# MY_INSTALL_TARGETS = "foo;bar"
# MY_INSTALL_CONFIGURATIONS = "" (was not used)
# MY_INSTALL_UNPARSED_ARGUMENTS = "blub" (no value expected after "OPTIONAL"
#
#
#
# You can then continue and process these variables.
#
# Keywords terminate lists of values, e.g. if directly after a
# one_value_keyword another recognized keyword follows, this is
# interpreted as the beginning of the new option. E.g.
# my_install(TARGETS foo DESTINATION OPTIONAL) would result in
# MY_INSTALL_DESTINATION set to "OPTIONAL", but MY_INSTALL_DESTINATION
# would be empty and MY_INSTALL_OPTIONAL would be set to TRUE therefor.
#=============================================================================
# Copyright 2010 Alexander Neundorf <neundorf@kde.org>
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# (To distribute this file outside of CMake, substitute the full
# License text for the above reference.)
if(__CMAKE_PARSE_ARGUMENTS_INCLUDED)
return()
endif()
set(__CMAKE_PARSE_ARGUMENTS_INCLUDED TRUE)
function(CMAKE_PARSE_ARGUMENTS prefix _optionNames _singleArgNames _multiArgNames)
# first set all result variables to empty/FALSE
foreach(arg_name ${_singleArgNames} ${_multiArgNames})
set(${prefix}_${arg_name})
endforeach()
foreach(option ${_optionNames})
set(${prefix}_${option} FALSE)
endforeach()
set(${prefix}_UNPARSED_ARGUMENTS)
set(insideValues FALSE)
set(currentArgName)
# now iterate over all arguments and fill the result variables
foreach(currentArg ${ARGN})
list(FIND _optionNames "${currentArg}" optionIndex) # ... then this marks the end of the arguments belonging to this keyword
list(FIND _singleArgNames "${currentArg}" singleArgIndex) # ... then this marks the end of the arguments belonging to this keyword
list(FIND _multiArgNames "${currentArg}" multiArgIndex) # ... then this marks the end of the arguments belonging to this keyword
if(${optionIndex} EQUAL -1 AND ${singleArgIndex} EQUAL -1 AND ${multiArgIndex} EQUAL -1)
if(insideValues)
if("${insideValues}" STREQUAL "SINGLE")
set(${prefix}_${currentArgName} ${currentArg})
set(insideValues FALSE)
elseif("${insideValues}" STREQUAL "MULTI")
list(APPEND ${prefix}_${currentArgName} ${currentArg})
endif()
else()
list(APPEND ${prefix}_UNPARSED_ARGUMENTS ${currentArg})
endif()
else()
if(NOT ${optionIndex} EQUAL -1)
set(${prefix}_${currentArg} TRUE)
set(insideValues FALSE)
elseif(NOT ${singleArgIndex} EQUAL -1)
set(currentArgName ${currentArg})
set(${prefix}_${currentArgName})
set(insideValues "SINGLE")
elseif(NOT ${multiArgIndex} EQUAL -1)
set(currentArgName ${currentArg})
set(${prefix}_${currentArgName})
set(insideValues "MULTI")
endif()
endif()
endforeach()
# propagate the result variables to the caller:
foreach(arg_name ${_singleArgNames} ${_multiArgNames} ${_optionNames})
set(${prefix}_${arg_name} ${${prefix}_${arg_name}} PARENT_SCOPE)
endforeach()
set(${prefix}_UNPARSED_ARGUMENTS ${${prefix}_UNPARSED_ARGUMENTS} PARENT_SCOPE)
endfunction()

View File

@ -1,108 +0,0 @@
# Module for locating the Crypto++ encryption library.
#
# Customizable variables:
# CRYPTOPP_ROOT_DIR
# This variable points to the CryptoPP root directory. On Windows the
# library location typically will have to be provided explicitly using the
# -D command-line option. The directory should include the include/cryptopp,
# lib and/or bin sub-directories.
#
# Read-only variables:
# CRYPTOPP_FOUND
# Indicates whether the library has been found.
#
# CRYPTOPP_INCLUDE_DIRS
# Points to the CryptoPP include directory.
#
# CRYPTOPP_LIBRARIES
# Points to the CryptoPP libraries that should be passed to
# target_link_libararies.
#
#
# Copyright (c) 2012 Sergiu Dotenco
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
INCLUDE (FindPackageHandleStandardArgs)
FIND_PATH (CRYPTOPP_ROOT_DIR
NAMES cryptopp/cryptlib.h include/cryptopp/cryptlib.h
PATHS ENV CRYPTOPPROOT
DOC "CryptoPP root directory")
# Re-use the previous path:
FIND_PATH (CRYPTOPP_INCLUDE_DIR
NAMES cryptopp/cryptlib.h
HINTS ${CRYPTOPP_ROOT_DIR}
PATH_SUFFIXES include
DOC "CryptoPP include directory")
FIND_LIBRARY (CRYPTOPP_LIBRARY_DEBUG
NAMES cryptlibd cryptoppd
HINTS ${CRYPTOPP_ROOT_DIR}
PATH_SUFFIXES lib
DOC "CryptoPP debug library")
FIND_LIBRARY (CRYPTOPP_LIBRARY_RELEASE
NAMES cryptlib cryptopp
HINTS ${CRYPTOPP_ROOT_DIR}
PATH_SUFFIXES lib
DOC "CryptoPP release library")
IF (CRYPTOPP_LIBRARY_DEBUG AND CRYPTOPP_LIBRARY_RELEASE)
SET (CRYPTOPP_LIBRARY
optimized ${CRYPTOPP_LIBRARY_RELEASE}
debug ${CRYPTOPP_LIBRARY_DEBUG} CACHE DOC "CryptoPP library")
ELSEIF (CRYPTOPP_LIBRARY_RELEASE)
SET (CRYPTOPP_LIBRARY ${CRYPTOPP_LIBRARY_RELEASE} CACHE DOC
"CryptoPP library")
ENDIF (CRYPTOPP_LIBRARY_DEBUG AND CRYPTOPP_LIBRARY_RELEASE)
IF (CRYPTOPP_INCLUDE_DIR)
SET (_CRYPTOPP_VERSION_HEADER ${CRYPTOPP_INCLUDE_DIR}/cryptopp/config.h)
IF (EXISTS ${_CRYPTOPP_VERSION_HEADER})
FILE (STRINGS ${_CRYPTOPP_VERSION_HEADER} _CRYPTOPP_VERSION_TMP REGEX
"^#define CRYPTOPP_VERSION[ \t]+[0-9]+$")
STRING (REGEX REPLACE
"^#define CRYPTOPP_VERSION[ \t]+([0-9]+)" "\\1" _CRYPTOPP_VERSION_TMP
${_CRYPTOPP_VERSION_TMP})
STRING (REGEX REPLACE "([0-9]+)[0-9][0-9]" "\\1" CRYPTOPP_VERSION_MAJOR
${_CRYPTOPP_VERSION_TMP})
STRING (REGEX REPLACE "[0-9]([0-9])[0-9]" "\\1" CRYPTOPP_VERSION_MINOR
${_CRYPTOPP_VERSION_TMP})
STRING (REGEX REPLACE "[0-9][0-9]([0-9])" "\\1" CRYPTOPP_VERSION_PATCH
${_CRYPTOPP_VERSION_TMP})
SET (CRYPTOPP_VERSION_COUNT 3)
SET (CRYPTOPP_VERSION
${CRYPTOPP_VERSION_MAJOR}.${CRYPTOPP_VERSION_MINOR}.${CRYPTOPP_VERSION_PATCH})
ENDIF (EXISTS ${_CRYPTOPP_VERSION_HEADER})
ENDIF (CRYPTOPP_INCLUDE_DIR)
SET (CRYPTOPP_INCLUDE_DIRS ${CRYPTOPP_INCLUDE_DIR})
SET (CRYPTOPP_LIBRARIES ${CRYPTOPP_LIBRARY})
MARK_AS_ADVANCED (CRYPTOPP_INCLUDE_DIR CRYPTOPP_LIBRARY CRYPTOPP_LIBRARY_DEBUG
CRYPTOPP_LIBRARY_RELEASE)
FIND_PACKAGE_HANDLE_STANDARD_ARGS (CryptoPP REQUIRED_VARS CRYPTOPP_ROOT_DIR
CRYPTOPP_INCLUDE_DIR CRYPTOPP_LIBRARY VERSION_VAR CRYPTOPP_VERSION)

View File

@ -1,148 +0,0 @@
#.rst:
# FindOpenCL
# ----------
#
# Try to find OpenCL
#
# Once done this will define::
#
# OpenCL_FOUND - True if OpenCL was found
# OpenCL_INCLUDE_DIRS - include directories for OpenCL
# OpenCL_LIBRARIES - link against this library to use OpenCL
# OpenCL_VERSION_STRING - Highest supported OpenCL version (eg. 1.2)
# OpenCL_VERSION_MAJOR - The major version of the OpenCL implementation
# OpenCL_VERSION_MINOR - The minor version of the OpenCL implementation
#
# The module will also define two cache variables::
#
# OpenCL_INCLUDE_DIR - the OpenCL include directory
# OpenCL_LIBRARY - the path to the OpenCL library
#
#=============================================================================
# Copyright 2014 Matthaeus G. Chajdas
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# (To distribute this file outside of CMake, substitute the full
# License text for the above reference.)
function(_FIND_OPENCL_VERSION)
include(CheckSymbolExists)
include(CMakePushCheckState)
set(CMAKE_REQUIRED_QUIET ${OpenCL_FIND_QUIETLY})
CMAKE_PUSH_CHECK_STATE()
foreach(VERSION "2_0" "1_2" "1_1" "1_0")
set(CMAKE_REQUIRED_INCLUDES "${OpenCL_INCLUDE_DIR}")
if(APPLE)
CHECK_SYMBOL_EXISTS(
CL_VERSION_${VERSION}
"${OpenCL_INCLUDE_DIR}/OpenCL/cl.h"
OPENCL_VERSION_${VERSION})
else()
CHECK_SYMBOL_EXISTS(
CL_VERSION_${VERSION}
"${OpenCL_INCLUDE_DIR}/CL/cl.h"
OPENCL_VERSION_${VERSION})
endif()
if(OPENCL_VERSION_${VERSION})
string(REPLACE "_" "." VERSION "${VERSION}")
set(OpenCL_VERSION_STRING ${VERSION} PARENT_SCOPE)
string(REGEX MATCHALL "[0-9]+" version_components "${VERSION}")
list(GET version_components 0 major_version)
list(GET version_components 1 minor_version)
set(OpenCL_VERSION_MAJOR ${major_version} PARENT_SCOPE)
set(OpenCL_VERSION_MINOR ${minor_version} PARENT_SCOPE)
break()
endif()
endforeach()
CMAKE_POP_CHECK_STATE()
endfunction()
find_path(OpenCL_INCLUDE_DIR
NAMES
CL/cl.h OpenCL/cl.h
PATHS
ENV "PROGRAMFILES(X86)"
ENV AMDAPPSDKROOT
ENV INTELOCLSDKROOT
ENV NVSDKCOMPUTE_ROOT
ENV CUDA_PATH
ENV ATISTREAMSDKROOT
PATH_SUFFIXES
include
OpenCL/common/inc
"AMD APP/include")
_FIND_OPENCL_VERSION()
if(WIN32)
if(CMAKE_SIZEOF_VOID_P EQUAL 4)
find_library(OpenCL_LIBRARY
NAMES OpenCL
PATHS
ENV "PROGRAMFILES(X86)"
ENV AMDAPPSDKROOT
ENV INTELOCLSDKROOT
ENV CUDA_PATH
ENV NVSDKCOMPUTE_ROOT
ENV ATISTREAMSDKROOT
PATH_SUFFIXES
"AMD APP/lib/x86"
lib/x86
lib/Win32
OpenCL/common/lib/Win32)
elseif(CMAKE_SIZEOF_VOID_P EQUAL 8)
find_library(OpenCL_LIBRARY
NAMES OpenCL
PATHS
ENV "PROGRAMFILES(X86)"
ENV AMDAPPSDKROOT
ENV INTELOCLSDKROOT
ENV CUDA_PATH
ENV NVSDKCOMPUTE_ROOT
ENV ATISTREAMSDKROOT
PATH_SUFFIXES
"AMD APP/lib/x86_64"
lib/x86_64
lib/x64
OpenCL/common/lib/x64)
endif()
else()
find_library(OpenCL_LIBRARY
NAMES OpenCL
PATHS
ENV "PROGRAMFILES(X86)"
ENV AMDAPPSDKROOT
ENV INTELOCLSDKROOT
ENV CUDA_PATH
ENV NVSDKCOMPUTE_ROOT
ENV ATISTREAMSDKROOT
PATH_SUFFIXES
"AMD APP/lib/x86_64"
lib/x86_64
lib/x64
OpenCL/common/lib/x64)
endif()
set(OpenCL_LIBRARIES ${OpenCL_LIBRARY})
set(OpenCL_INCLUDE_DIRS ${OpenCL_INCLUDE_DIR})
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake)
find_package_handle_standard_args(
OpenCL
FOUND_VAR OpenCL_FOUND
REQUIRED_VARS OpenCL_LIBRARY OpenCL_INCLUDE_DIR
VERSION_VAR OpenCL_VERSION_STRING)
mark_as_advanced(
OpenCL_INCLUDE_DIR
OpenCL_LIBRARY)

View File

@ -1,382 +0,0 @@
#.rst:
# FindPackageHandleStandardArgs
# -----------------------------
#
#
#
# FIND_PACKAGE_HANDLE_STANDARD_ARGS(<name> ... )
#
# This function is intended to be used in FindXXX.cmake modules files.
# It handles the REQUIRED, QUIET and version-related arguments to
# find_package(). It also sets the <packagename>_FOUND variable. The
# package is considered found if all variables <var1>... listed contain
# valid results, e.g. valid filepaths.
#
# There are two modes of this function. The first argument in both
# modes is the name of the Find-module where it is called (in original
# casing).
#
# The first simple mode looks like this:
#
# ::
#
# FIND_PACKAGE_HANDLE_STANDARD_ARGS(<name>
# (DEFAULT_MSG|"Custom failure message") <var1>...<varN> )
#
# If the variables <var1> to <varN> are all valid, then
# <UPPERCASED_NAME>_FOUND will be set to TRUE. If DEFAULT_MSG is given
# as second argument, then the function will generate itself useful
# success and error messages. You can also supply a custom error
# message for the failure case. This is not recommended.
#
# The second mode is more powerful and also supports version checking:
#
# ::
#
# FIND_PACKAGE_HANDLE_STANDARD_ARGS(NAME
# [FOUND_VAR <resultVar>]
# [REQUIRED_VARS <var1>...<varN>]
# [VERSION_VAR <versionvar>]
# [HANDLE_COMPONENTS]
# [CONFIG_MODE]
# [FAIL_MESSAGE "Custom failure message"] )
#
# In this mode, the name of the result-variable can be set either to
# either <UPPERCASED_NAME>_FOUND or <OriginalCase_Name>_FOUND using the
# FOUND_VAR option. Other names for the result-variable are not
# allowed. So for a Find-module named FindFooBar.cmake, the two
# possible names are FooBar_FOUND and FOOBAR_FOUND. It is recommended
# to use the original case version. If the FOUND_VAR option is not
# used, the default is <UPPERCASED_NAME>_FOUND.
#
# As in the simple mode, if <var1> through <varN> are all valid,
# <packagename>_FOUND will be set to TRUE. After REQUIRED_VARS the
# variables which are required for this package are listed. Following
# VERSION_VAR the name of the variable can be specified which holds the
# version of the package which has been found. If this is done, this
# version will be checked against the (potentially) specified required
# version used in the find_package() call. The EXACT keyword is also
# handled. The default messages include information about the required
# version and the version which has been actually found, both if the
# version is ok or not. If the package supports components, use the
# HANDLE_COMPONENTS option to enable handling them. In this case,
# find_package_handle_standard_args() will report which components have
# been found and which are missing, and the <packagename>_FOUND variable
# will be set to FALSE if any of the required components (i.e. not the
# ones listed after OPTIONAL_COMPONENTS) are missing. Use the option
# CONFIG_MODE if your FindXXX.cmake module is a wrapper for a
# find_package(... NO_MODULE) call. In this case VERSION_VAR will be
# set to <NAME>_VERSION and the macro will automatically check whether
# the Config module was found. Via FAIL_MESSAGE a custom failure
# message can be specified, if this is not used, the default message
# will be displayed.
#
# Example for mode 1:
#
# ::
#
# find_package_handle_standard_args(LibXml2 DEFAULT_MSG
# LIBXML2_LIBRARY LIBXML2_INCLUDE_DIR)
#
#
#
# LibXml2 is considered to be found, if both LIBXML2_LIBRARY and
# LIBXML2_INCLUDE_DIR are valid. Then also LIBXML2_FOUND is set to
# TRUE. If it is not found and REQUIRED was used, it fails with
# FATAL_ERROR, independent whether QUIET was used or not. If it is
# found, success will be reported, including the content of <var1>. On
# repeated Cmake runs, the same message won't be printed again.
#
# Example for mode 2:
#
# ::
#
# find_package_handle_standard_args(LibXslt
# FOUND_VAR LibXslt_FOUND
# REQUIRED_VARS LibXslt_LIBRARIES LibXslt_INCLUDE_DIRS
# VERSION_VAR LibXslt_VERSION_STRING)
#
# In this case, LibXslt is considered to be found if the variable(s)
# listed after REQUIRED_VAR are all valid, i.e. LibXslt_LIBRARIES and
# LibXslt_INCLUDE_DIRS in this case. The result will then be stored in
# LibXslt_FOUND . Also the version of LibXslt will be checked by using
# the version contained in LibXslt_VERSION_STRING. Since no
# FAIL_MESSAGE is given, the default messages will be printed.
#
# Another example for mode 2:
#
# ::
#
# find_package(Automoc4 QUIET NO_MODULE HINTS /opt/automoc4)
# find_package_handle_standard_args(Automoc4 CONFIG_MODE)
#
# In this case, FindAutmoc4.cmake wraps a call to find_package(Automoc4
# NO_MODULE) and adds an additional search directory for automoc4. Here
# the result will be stored in AUTOMOC4_FOUND. The following
# FIND_PACKAGE_HANDLE_STANDARD_ARGS() call produces a proper
# success/error message.
#=============================================================================
# Copyright 2007-2009 Kitware, Inc.
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# (To distribute this file outside of CMake, substitute the full
# License text for the above reference.)
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageMessage.cmake)
include(${CMAKE_CURRENT_LIST_DIR}/CMakeParseArguments.cmake)
# internal helper macro
macro(_FPHSA_FAILURE_MESSAGE _msg)
if (${_NAME}_FIND_REQUIRED)
message(FATAL_ERROR "${_msg}")
else ()
if (NOT ${_NAME}_FIND_QUIETLY)
message(STATUS "${_msg}")
endif ()
endif ()
endmacro()
# internal helper macro to generate the failure message when used in CONFIG_MODE:
macro(_FPHSA_HANDLE_FAILURE_CONFIG_MODE)
# <name>_CONFIG is set, but FOUND is false, this means that some other of the REQUIRED_VARS was not found:
if(${_NAME}_CONFIG)
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: missing: ${MISSING_VARS} (found ${${_NAME}_CONFIG} ${VERSION_MSG})")
else()
# If _CONSIDERED_CONFIGS is set, the config-file has been found, but no suitable version.
# List them all in the error message:
if(${_NAME}_CONSIDERED_CONFIGS)
set(configsText "")
list(LENGTH ${_NAME}_CONSIDERED_CONFIGS configsCount)
math(EXPR configsCount "${configsCount} - 1")
foreach(currentConfigIndex RANGE ${configsCount})
list(GET ${_NAME}_CONSIDERED_CONFIGS ${currentConfigIndex} filename)
list(GET ${_NAME}_CONSIDERED_VERSIONS ${currentConfigIndex} version)
set(configsText "${configsText} ${filename} (version ${version})\n")
endforeach()
if (${_NAME}_NOT_FOUND_MESSAGE)
set(configsText "${configsText} Reason given by package: ${${_NAME}_NOT_FOUND_MESSAGE}\n")
endif()
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} ${VERSION_MSG}, checked the following files:\n${configsText}")
else()
# Simple case: No Config-file was found at all:
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: found neither ${_NAME}Config.cmake nor ${_NAME_LOWER}-config.cmake ${VERSION_MSG}")
endif()
endif()
endmacro()
function(FIND_PACKAGE_HANDLE_STANDARD_ARGS _NAME _FIRST_ARG)
# set up the arguments for CMAKE_PARSE_ARGUMENTS and check whether we are in
# new extended or in the "old" mode:
set(options CONFIG_MODE HANDLE_COMPONENTS)
set(oneValueArgs FAIL_MESSAGE VERSION_VAR FOUND_VAR)
set(multiValueArgs REQUIRED_VARS)
set(_KEYWORDS_FOR_EXTENDED_MODE ${options} ${oneValueArgs} ${multiValueArgs} )
list(FIND _KEYWORDS_FOR_EXTENDED_MODE "${_FIRST_ARG}" INDEX)
if(${INDEX} EQUAL -1)
set(FPHSA_FAIL_MESSAGE ${_FIRST_ARG})
set(FPHSA_REQUIRED_VARS ${ARGN})
set(FPHSA_VERSION_VAR)
else()
CMAKE_PARSE_ARGUMENTS(FPHSA "${options}" "${oneValueArgs}" "${multiValueArgs}" ${_FIRST_ARG} ${ARGN})
if(FPHSA_UNPARSED_ARGUMENTS)
message(FATAL_ERROR "Unknown keywords given to FIND_PACKAGE_HANDLE_STANDARD_ARGS(): \"${FPHSA_UNPARSED_ARGUMENTS}\"")
endif()
if(NOT FPHSA_FAIL_MESSAGE)
set(FPHSA_FAIL_MESSAGE "DEFAULT_MSG")
endif()
endif()
# now that we collected all arguments, process them
if("x${FPHSA_FAIL_MESSAGE}" STREQUAL "xDEFAULT_MSG")
set(FPHSA_FAIL_MESSAGE "Could NOT find ${_NAME}")
endif()
# In config-mode, we rely on the variable <package>_CONFIG, which is set by find_package()
# when it successfully found the config-file, including version checking:
if(FPHSA_CONFIG_MODE)
list(INSERT FPHSA_REQUIRED_VARS 0 ${_NAME}_CONFIG)
list(REMOVE_DUPLICATES FPHSA_REQUIRED_VARS)
set(FPHSA_VERSION_VAR ${_NAME}_VERSION)
endif()
if(NOT FPHSA_REQUIRED_VARS)
message(FATAL_ERROR "No REQUIRED_VARS specified for FIND_PACKAGE_HANDLE_STANDARD_ARGS()")
endif()
list(GET FPHSA_REQUIRED_VARS 0 _FIRST_REQUIRED_VAR)
string(TOUPPER ${_NAME} _NAME_UPPER)
string(TOLOWER ${_NAME} _NAME_LOWER)
if(FPHSA_FOUND_VAR)
if(FPHSA_FOUND_VAR MATCHES "^${_NAME}_FOUND$" OR FPHSA_FOUND_VAR MATCHES "^${_NAME_UPPER}_FOUND$")
set(_FOUND_VAR ${FPHSA_FOUND_VAR})
else()
message(FATAL_ERROR "The argument for FOUND_VAR is \"${FPHSA_FOUND_VAR}\", but only \"${_NAME}_FOUND\" and \"${_NAME_UPPER}_FOUND\" are valid names.")
endif()
else()
set(_FOUND_VAR ${_NAME_UPPER}_FOUND)
endif()
# collect all variables which were not found, so they can be printed, so the
# user knows better what went wrong (#6375)
set(MISSING_VARS "")
set(DETAILS "")
# check if all passed variables are valid
unset(${_FOUND_VAR})
foreach(_CURRENT_VAR ${FPHSA_REQUIRED_VARS})
if(NOT ${_CURRENT_VAR})
set(${_FOUND_VAR} FALSE)
set(MISSING_VARS "${MISSING_VARS} ${_CURRENT_VAR}")
else()
set(DETAILS "${DETAILS}[${${_CURRENT_VAR}}]")
endif()
endforeach()
if(NOT "${${_FOUND_VAR}}" STREQUAL "FALSE")
set(${_FOUND_VAR} TRUE)
endif()
# component handling
unset(FOUND_COMPONENTS_MSG)
unset(MISSING_COMPONENTS_MSG)
if(FPHSA_HANDLE_COMPONENTS)
foreach(comp ${${_NAME}_FIND_COMPONENTS})
if(${_NAME}_${comp}_FOUND)
if(NOT DEFINED FOUND_COMPONENTS_MSG)
set(FOUND_COMPONENTS_MSG "found components: ")
endif()
set(FOUND_COMPONENTS_MSG "${FOUND_COMPONENTS_MSG} ${comp}")
else()
if(NOT DEFINED MISSING_COMPONENTS_MSG)
set(MISSING_COMPONENTS_MSG "missing components: ")
endif()
set(MISSING_COMPONENTS_MSG "${MISSING_COMPONENTS_MSG} ${comp}")
if(${_NAME}_FIND_REQUIRED_${comp})
set(${_FOUND_VAR} FALSE)
set(MISSING_VARS "${MISSING_VARS} ${comp}")
endif()
endif()
endforeach()
set(COMPONENT_MSG "${FOUND_COMPONENTS_MSG} ${MISSING_COMPONENTS_MSG}")
set(DETAILS "${DETAILS}[c${COMPONENT_MSG}]")
endif()
# version handling:
set(VERSION_MSG "")
set(VERSION_OK TRUE)
set(VERSION ${${FPHSA_VERSION_VAR}})
# check with DEFINED here as the requested or found version may be "0"
if (DEFINED ${_NAME}_FIND_VERSION)
if(DEFINED ${FPHSA_VERSION_VAR})
if(${_NAME}_FIND_VERSION_EXACT) # exact version required
# count the dots in the version string
string(REGEX REPLACE "[^.]" "" _VERSION_DOTS "${VERSION}")
# add one dot because there is one dot more than there are components
string(LENGTH "${_VERSION_DOTS}." _VERSION_DOTS)
if (_VERSION_DOTS GREATER ${_NAME}_FIND_VERSION_COUNT)
# Because of the C++ implementation of find_package() ${_NAME}_FIND_VERSION_COUNT
# is at most 4 here. Therefore a simple lookup table is used.
if (${_NAME}_FIND_VERSION_COUNT EQUAL 1)
set(_VERSION_REGEX "[^.]*")
elseif (${_NAME}_FIND_VERSION_COUNT EQUAL 2)
set(_VERSION_REGEX "[^.]*\\.[^.]*")
elseif (${_NAME}_FIND_VERSION_COUNT EQUAL 3)
set(_VERSION_REGEX "[^.]*\\.[^.]*\\.[^.]*")
else ()
set(_VERSION_REGEX "[^.]*\\.[^.]*\\.[^.]*\\.[^.]*")
endif ()
string(REGEX REPLACE "^(${_VERSION_REGEX})\\..*" "\\1" _VERSION_HEAD "${VERSION}")
unset(_VERSION_REGEX)
if (NOT ${_NAME}_FIND_VERSION VERSION_EQUAL _VERSION_HEAD)
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is exact version \"${${_NAME}_FIND_VERSION}\"")
set(VERSION_OK FALSE)
else ()
set(VERSION_MSG "(found suitable exact version \"${VERSION}\")")
endif ()
unset(_VERSION_HEAD)
else ()
if (NOT "${${_NAME}_FIND_VERSION}" VERSION_EQUAL "${VERSION}")
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is exact version \"${${_NAME}_FIND_VERSION}\"")
set(VERSION_OK FALSE)
else ()
set(VERSION_MSG "(found suitable exact version \"${VERSION}\")")
endif ()
endif ()
unset(_VERSION_DOTS)
else() # minimum version specified:
if ("${${_NAME}_FIND_VERSION}" VERSION_GREATER "${VERSION}")
set(VERSION_MSG "Found unsuitable version \"${VERSION}\", but required is at least \"${${_NAME}_FIND_VERSION}\"")
set(VERSION_OK FALSE)
else ()
set(VERSION_MSG "(found suitable version \"${VERSION}\", minimum required is \"${${_NAME}_FIND_VERSION}\")")
endif ()
endif()
else()
# if the package was not found, but a version was given, add that to the output:
if(${_NAME}_FIND_VERSION_EXACT)
set(VERSION_MSG "(Required is exact version \"${${_NAME}_FIND_VERSION}\")")
else()
set(VERSION_MSG "(Required is at least version \"${${_NAME}_FIND_VERSION}\")")
endif()
endif()
else ()
if(VERSION)
set(VERSION_MSG "(found version \"${VERSION}\")")
endif()
endif ()
if(VERSION_OK)
set(DETAILS "${DETAILS}[v${VERSION}(${${_NAME}_FIND_VERSION})]")
else()
set(${_FOUND_VAR} FALSE)
endif()
# print the result:
if (${_FOUND_VAR})
FIND_PACKAGE_MESSAGE(${_NAME} "Found ${_NAME}: ${${_FIRST_REQUIRED_VAR}} ${VERSION_MSG} ${COMPONENT_MSG}" "${DETAILS}")
else ()
if(FPHSA_CONFIG_MODE)
_FPHSA_HANDLE_FAILURE_CONFIG_MODE()
else()
if(NOT VERSION_OK)
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE}: ${VERSION_MSG} (found ${${_FIRST_REQUIRED_VAR}})")
else()
_FPHSA_FAILURE_MESSAGE("${FPHSA_FAIL_MESSAGE} (missing: ${MISSING_VARS}) ${VERSION_MSG}")
endif()
endif()
endif ()
set(${_FOUND_VAR} ${${_FOUND_VAR}} PARENT_SCOPE)
endfunction()

View File

@ -1,57 +0,0 @@
#.rst:
# FindPackageMessage
# ------------------
#
#
#
# FIND_PACKAGE_MESSAGE(<name> "message for user" "find result details")
#
# This macro is intended to be used in FindXXX.cmake modules files. It
# will print a message once for each unique find result. This is useful
# for telling the user where a package was found. The first argument
# specifies the name (XXX) of the package. The second argument
# specifies the message to display. The third argument lists details
# about the find result so that if they change the message will be
# displayed again. The macro also obeys the QUIET argument to the
# find_package command.
#
# Example:
#
# ::
#
# if(X11_FOUND)
# FIND_PACKAGE_MESSAGE(X11 "Found X11: ${X11_X11_LIB}"
# "[${X11_X11_LIB}][${X11_INCLUDE_DIR}]")
# else()
# ...
# endif()
#=============================================================================
# Copyright 2008-2009 Kitware, Inc.
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# (To distribute this file outside of CMake, substitute the full
# License text for the above reference.)
function(FIND_PACKAGE_MESSAGE pkg msg details)
# Avoid printing a message repeatedly for the same find result.
if(NOT ${pkg}_FIND_QUIETLY)
string(REPLACE "\n" "" details "${details}")
set(DETAILS_VAR FIND_PACKAGE_MESSAGE_DETAILS_${pkg})
if(NOT "${details}" STREQUAL "${${DETAILS_VAR}}")
# The message has not yet been printed.
message(STATUS "${msg}")
# Save the find details in the cache to avoid printing the same
# message again.
set("${DETAILS_VAR}" "${details}"
CACHE INTERNAL "Details about finding ${pkg}")
endif()
endif()
endfunction()

View File

@ -1,13 +0,0 @@
set(LIBRARY cryptopp)
include_directories(../../cryptopp)
# todo, subset
file(GLOB HEADERS "../../cryptopp/*.h")
file(GLOB SOURCE "../../cryptopp/*.cpp")
add_library(${LIBRARY} ${HEADERS} ${SOURCE})
set(CRYPTOPP_INCLUDE_DIRS "../.." "../../../" PARENT_SCOPE)
set(CRYPTOPP_LIBRARIES ${LIBRARY} PARENT_SCOPE)
set(CRYPTOPP_FOUND TRUE PARENT_SCOPE)

View File

@ -75,6 +75,7 @@ func defaultDir() string {
// and automatic memory management.
type cache struct {
epoch uint64
used time.Time
test bool
gen sync.Once // ensures cache is only generated once.
@ -104,14 +105,13 @@ func freeCache(cache *cache) {
cache.ptr = nil
}
// Light implements the Verify half of the proof of work.
// It uses a small in-memory cache to verify the nonces
// found by Full.
// Light implements the Verify half of the proof of work. It uses a few small
// in-memory caches to verify the nonces found by Full.
type Light struct {
test bool // if set use a smaller cache size
mu sync.Mutex // protects current
current *cache // last cache which was generated.
// TODO: keep multiple caches.
test bool // if set use a smaller cache size
mu sync.Mutex // protects the per-epoch map of DAGs
caches map[uint64]*cache // currently cached verification DAGs
NumCaches int // Maximum number of DAGs to cache before eviction (only init, don't modify)
}
// Verify checks whether the block's nonce is valid.
@ -173,16 +173,36 @@ func hashToH256(in common.Hash) C.ethash_h256_t {
func (l *Light) getCache(blockNum uint64) *cache {
var c *cache
epoch := blockNum / epochLength
// Update or reuse the last cache.
// If we have a PoW for that epoch, use that
l.mu.Lock()
if l.current != nil && l.current.epoch == epoch {
c = l.current
} else {
c = &cache{epoch: epoch, test: l.test}
l.current = c
if l.caches == nil {
l.caches = make(map[uint64]*cache)
}
if l.NumCaches == 0 {
l.NumCaches = 3
}
c = l.caches[epoch]
if c == nil {
// No cached DAG, evict the oldest if the cache limit was reached
if len(l.caches) >= l.NumCaches {
var evict *cache
for _, cache := range l.caches {
if evict == nil || evict.used.After(cache.used) {
evict = cache
}
}
glog.V(logger.Info).Infof("Evicting DAG for epoch %d in favour of epoch %d", evict.epoch, epoch)
delete(l.caches, evict.epoch)
}
// Create and return a new DAG for the epoch
c = &cache{epoch: epoch, test: l.test}
l.caches[epoch] = c
}
c.used = time.Now()
l.mu.Unlock()
// Wait for the cache to finish generating.
// Wait for generation finish and return the cache
c.generate()
return c
}
@ -362,9 +382,13 @@ type Ethash struct {
}
// New creates an instance of the proof of work.
// A single instance of Light is shared across all instances
// created with New.
func New() *Ethash {
return &Ethash{new(Light), &Full{turbo: true}}
}
// NewShared creates an instance of the proof of work., where a single instance
// of the Light cache is shared across all instances created with NewShared.
func NewShared() *Ethash {
return &Ethash{sharedLight, &Full{turbo: true}}
}

View File

@ -1,22 +0,0 @@
The MIT License (MIT)
Copyright (c) 2015 Tim Hughes
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,190 +0,0 @@
// ethash.js
// Tim Hughes <tim@twistedfury.com>
// Revision 19
/*jslint node: true, shadow:true */
"use strict";
var Keccak = require('./keccak');
var util = require('./util');
// 32-bit unsigned modulo
function mod32(x, n)
{
return (x>>>0) % (n>>>0);
}
function fnv(x, y)
{
// js integer multiply by 0x01000193 will lose precision
return ((x*0x01000000 | 0) + (x*0x193 | 0)) ^ y;
}
function computeCache(params, seedWords)
{
var cache = new Uint32Array(params.cacheSize >> 2);
var cacheNodeCount = params.cacheSize >> 6;
// Initialize cache
var keccak = new Keccak();
keccak.digestWords(cache, 0, 16, seedWords, 0, seedWords.length);
for (var n = 1; n < cacheNodeCount; ++n)
{
keccak.digestWords(cache, n<<4, 16, cache, (n-1)<<4, 16);
}
var tmp = new Uint32Array(16);
// Do randmemohash passes
for (var r = 0; r < params.cacheRounds; ++r)
{
for (var n = 0; n < cacheNodeCount; ++n)
{
var p0 = mod32(n + cacheNodeCount - 1, cacheNodeCount) << 4;
var p1 = mod32(cache[n<<4|0], cacheNodeCount) << 4;
for (var w = 0; w < 16; w=(w+1)|0)
{
tmp[w] = cache[p0 | w] ^ cache[p1 | w];
}
keccak.digestWords(cache, n<<4, 16, tmp, 0, tmp.length);
}
}
return cache;
}
function computeDagNode(o_node, params, cache, keccak, nodeIndex)
{
var cacheNodeCount = params.cacheSize >> 6;
var dagParents = params.dagParents;
var c = (nodeIndex % cacheNodeCount) << 4;
var mix = o_node;
for (var w = 0; w < 16; ++w)
{
mix[w] = cache[c|w];
}
mix[0] ^= nodeIndex;
keccak.digestWords(mix, 0, 16, mix, 0, 16);
for (var p = 0; p < dagParents; ++p)
{
// compute cache node (word) index
c = mod32(fnv(nodeIndex ^ p, mix[p&15]), cacheNodeCount) << 4;
for (var w = 0; w < 16; ++w)
{
mix[w] = fnv(mix[w], cache[c|w]);
}
}
keccak.digestWords(mix, 0, 16, mix, 0, 16);
}
function computeHashInner(mix, params, cache, keccak, tempNode)
{
var mixParents = params.mixParents|0;
var mixWordCount = params.mixSize >> 2;
var mixNodeCount = mixWordCount >> 4;
var dagPageCount = (params.dagSize / params.mixSize) >> 0;
// grab initial first word
var s0 = mix[0];
// initialise mix from initial 64 bytes
for (var w = 16; w < mixWordCount; ++w)
{
mix[w] = mix[w & 15];
}
for (var a = 0; a < mixParents; ++a)
{
var p = mod32(fnv(s0 ^ a, mix[a & (mixWordCount-1)]), dagPageCount);
var d = (p * mixNodeCount)|0;
for (var n = 0, w = 0; n < mixNodeCount; ++n, w += 16)
{
computeDagNode(tempNode, params, cache, keccak, (d + n)|0);
for (var v = 0; v < 16; ++v)
{
mix[w|v] = fnv(mix[w|v], tempNode[v]);
}
}
}
}
function convertSeed(seed)
{
// todo, reconcile with spec, byte ordering?
// todo, big-endian conversion
var newSeed = util.toWords(seed);
if (newSeed === null)
throw Error("Invalid seed '" + seed + "'");
return newSeed;
}
exports.defaultParams = function()
{
return {
cacheSize: 1048384,
cacheRounds: 3,
dagSize: 1073739904,
dagParents: 256,
mixSize: 128,
mixParents: 64,
};
};
exports.Ethash = function(params, seed)
{
// precompute cache and related values
seed = convertSeed(seed);
var cache = computeCache(params, seed);
// preallocate buffers/etc
var initBuf = new ArrayBuffer(96);
var initBytes = new Uint8Array(initBuf);
var initWords = new Uint32Array(initBuf);
var mixWords = new Uint32Array(params.mixSize / 4);
var tempNode = new Uint32Array(16);
var keccak = new Keccak();
var retWords = new Uint32Array(8);
var retBytes = new Uint8Array(retWords.buffer); // supposedly read-only
this.hash = function(header, nonce)
{
// compute initial hash
initBytes.set(header, 0);
initBytes.set(nonce, 32);
keccak.digestWords(initWords, 0, 16, initWords, 0, 8 + nonce.length/4);
// compute mix
for (var i = 0; i != 16; ++i)
{
mixWords[i] = initWords[i];
}
computeHashInner(mixWords, params, cache, keccak, tempNode);
// compress mix and append to initWords
for (var i = 0; i != mixWords.length; i += 4)
{
initWords[16 + i/4] = fnv(fnv(fnv(mixWords[i], mixWords[i+1]), mixWords[i+2]), mixWords[i+3]);
}
// final Keccak hashes
keccak.digestWords(retWords, 0, 8, initWords, 0, 24); // Keccak-256(s + cmix)
return retBytes;
};
this.cacheDigest = function()
{
return keccak.digest(32, new Uint8Array(cache.buffer));
};
};

View File

@ -1,404 +0,0 @@
// keccak.js
// Tim Hughes <tim@twistedfury.com>
// derived from Markku-Juhani O. Saarinen's C code (http://keccak.noekeon.org/readable_code.html)
/*jslint node: true, shadow:true */
"use strict";
var Keccak_f1600_RC = new Uint32Array([
0x00000001, 0x00000000,
0x00008082, 0x00000000,
0x0000808a, 0x80000000,
0x80008000, 0x80000000,
0x0000808b, 0x00000000,
0x80000001, 0x00000000,
0x80008081, 0x80000000,
0x00008009, 0x80000000,
0x0000008a, 0x00000000,
0x00000088, 0x00000000,
0x80008009, 0x00000000,
0x8000000a, 0x00000000,
0x8000808b, 0x00000000,
0x0000008b, 0x80000000,
0x00008089, 0x80000000,
0x00008003, 0x80000000,
0x00008002, 0x80000000,
0x00000080, 0x80000000,
0x0000800a, 0x00000000,
0x8000000a, 0x80000000,
0x80008081, 0x80000000,
0x00008080, 0x80000000,
0x80000001, 0x00000000,
0x80008008, 0x80000000
]);
function keccak_f1600(outState, outOffset, outSize, inState)
{
// todo, handle big endian loads
var a00l = inState[0]|0;
var a00h = inState[1]|0;
var a01l = inState[2]|0;
var a01h = inState[3]|0;
var a02l = inState[4]|0;
var a02h = inState[5]|0;
var a03l = inState[6]|0;
var a03h = inState[7]|0;
var a04l = inState[8]|0;
var a04h = inState[9]|0;
var a05l = inState[10]|0;
var a05h = inState[11]|0;
var a06l = inState[12]|0;
var a06h = inState[13]|0;
var a07l = inState[14]|0;
var a07h = inState[15]|0;
var a08l = inState[16]|0;
var a08h = inState[17]|0;
var a09l = inState[18]|0;
var a09h = inState[19]|0;
var a10l = inState[20]|0;
var a10h = inState[21]|0;
var a11l = inState[22]|0;
var a11h = inState[23]|0;
var a12l = inState[24]|0;
var a12h = inState[25]|0;
var a13l = inState[26]|0;
var a13h = inState[27]|0;
var a14l = inState[28]|0;
var a14h = inState[29]|0;
var a15l = inState[30]|0;
var a15h = inState[31]|0;
var a16l = inState[32]|0;
var a16h = inState[33]|0;
var a17l = inState[34]|0;
var a17h = inState[35]|0;
var a18l = inState[36]|0;
var a18h = inState[37]|0;
var a19l = inState[38]|0;
var a19h = inState[39]|0;
var a20l = inState[40]|0;
var a20h = inState[41]|0;
var a21l = inState[42]|0;
var a21h = inState[43]|0;
var a22l = inState[44]|0;
var a22h = inState[45]|0;
var a23l = inState[46]|0;
var a23h = inState[47]|0;
var a24l = inState[48]|0;
var a24h = inState[49]|0;
var b00l, b00h, b01l, b01h, b02l, b02h, b03l, b03h, b04l, b04h;
var b05l, b05h, b06l, b06h, b07l, b07h, b08l, b08h, b09l, b09h;
var b10l, b10h, b11l, b11h, b12l, b12h, b13l, b13h, b14l, b14h;
var b15l, b15h, b16l, b16h, b17l, b17h, b18l, b18h, b19l, b19h;
var b20l, b20h, b21l, b21h, b22l, b22h, b23l, b23h, b24l, b24h;
var tl, nl;
var th, nh;
for (var r = 0; r < 48; r = (r+2)|0)
{
// Theta
b00l = a00l ^ a05l ^ a10l ^ a15l ^ a20l;
b00h = a00h ^ a05h ^ a10h ^ a15h ^ a20h;
b01l = a01l ^ a06l ^ a11l ^ a16l ^ a21l;
b01h = a01h ^ a06h ^ a11h ^ a16h ^ a21h;
b02l = a02l ^ a07l ^ a12l ^ a17l ^ a22l;
b02h = a02h ^ a07h ^ a12h ^ a17h ^ a22h;
b03l = a03l ^ a08l ^ a13l ^ a18l ^ a23l;
b03h = a03h ^ a08h ^ a13h ^ a18h ^ a23h;
b04l = a04l ^ a09l ^ a14l ^ a19l ^ a24l;
b04h = a04h ^ a09h ^ a14h ^ a19h ^ a24h;
tl = b04l ^ (b01l << 1 | b01h >>> 31);
th = b04h ^ (b01h << 1 | b01l >>> 31);
a00l ^= tl;
a00h ^= th;
a05l ^= tl;
a05h ^= th;
a10l ^= tl;
a10h ^= th;
a15l ^= tl;
a15h ^= th;
a20l ^= tl;
a20h ^= th;
tl = b00l ^ (b02l << 1 | b02h >>> 31);
th = b00h ^ (b02h << 1 | b02l >>> 31);
a01l ^= tl;
a01h ^= th;
a06l ^= tl;
a06h ^= th;
a11l ^= tl;
a11h ^= th;
a16l ^= tl;
a16h ^= th;
a21l ^= tl;
a21h ^= th;
tl = b01l ^ (b03l << 1 | b03h >>> 31);
th = b01h ^ (b03h << 1 | b03l >>> 31);
a02l ^= tl;
a02h ^= th;
a07l ^= tl;
a07h ^= th;
a12l ^= tl;
a12h ^= th;
a17l ^= tl;
a17h ^= th;
a22l ^= tl;
a22h ^= th;
tl = b02l ^ (b04l << 1 | b04h >>> 31);
th = b02h ^ (b04h << 1 | b04l >>> 31);
a03l ^= tl;
a03h ^= th;
a08l ^= tl;
a08h ^= th;
a13l ^= tl;
a13h ^= th;
a18l ^= tl;
a18h ^= th;
a23l ^= tl;
a23h ^= th;
tl = b03l ^ (b00l << 1 | b00h >>> 31);
th = b03h ^ (b00h << 1 | b00l >>> 31);
a04l ^= tl;
a04h ^= th;
a09l ^= tl;
a09h ^= th;
a14l ^= tl;
a14h ^= th;
a19l ^= tl;
a19h ^= th;
a24l ^= tl;
a24h ^= th;
// Rho Pi
b00l = a00l;
b00h = a00h;
b10l = a01l << 1 | a01h >>> 31;
b10h = a01h << 1 | a01l >>> 31;
b07l = a10l << 3 | a10h >>> 29;
b07h = a10h << 3 | a10l >>> 29;
b11l = a07l << 6 | a07h >>> 26;
b11h = a07h << 6 | a07l >>> 26;
b17l = a11l << 10 | a11h >>> 22;
b17h = a11h << 10 | a11l >>> 22;
b18l = a17l << 15 | a17h >>> 17;
b18h = a17h << 15 | a17l >>> 17;
b03l = a18l << 21 | a18h >>> 11;
b03h = a18h << 21 | a18l >>> 11;
b05l = a03l << 28 | a03h >>> 4;
b05h = a03h << 28 | a03l >>> 4;
b16l = a05h << 4 | a05l >>> 28;
b16h = a05l << 4 | a05h >>> 28;
b08l = a16h << 13 | a16l >>> 19;
b08h = a16l << 13 | a16h >>> 19;
b21l = a08h << 23 | a08l >>> 9;
b21h = a08l << 23 | a08h >>> 9;
b24l = a21l << 2 | a21h >>> 30;
b24h = a21h << 2 | a21l >>> 30;
b04l = a24l << 14 | a24h >>> 18;
b04h = a24h << 14 | a24l >>> 18;
b15l = a04l << 27 | a04h >>> 5;
b15h = a04h << 27 | a04l >>> 5;
b23l = a15h << 9 | a15l >>> 23;
b23h = a15l << 9 | a15h >>> 23;
b19l = a23h << 24 | a23l >>> 8;
b19h = a23l << 24 | a23h >>> 8;
b13l = a19l << 8 | a19h >>> 24;
b13h = a19h << 8 | a19l >>> 24;
b12l = a13l << 25 | a13h >>> 7;
b12h = a13h << 25 | a13l >>> 7;
b02l = a12h << 11 | a12l >>> 21;
b02h = a12l << 11 | a12h >>> 21;
b20l = a02h << 30 | a02l >>> 2;
b20h = a02l << 30 | a02h >>> 2;
b14l = a20l << 18 | a20h >>> 14;
b14h = a20h << 18 | a20l >>> 14;
b22l = a14h << 7 | a14l >>> 25;
b22h = a14l << 7 | a14h >>> 25;
b09l = a22h << 29 | a22l >>> 3;
b09h = a22l << 29 | a22h >>> 3;
b06l = a09l << 20 | a09h >>> 12;
b06h = a09h << 20 | a09l >>> 12;
b01l = a06h << 12 | a06l >>> 20;
b01h = a06l << 12 | a06h >>> 20;
// Chi
a00l = b00l ^ ~b01l & b02l;
a00h = b00h ^ ~b01h & b02h;
a01l = b01l ^ ~b02l & b03l;
a01h = b01h ^ ~b02h & b03h;
a02l = b02l ^ ~b03l & b04l;
a02h = b02h ^ ~b03h & b04h;
a03l = b03l ^ ~b04l & b00l;
a03h = b03h ^ ~b04h & b00h;
a04l = b04l ^ ~b00l & b01l;
a04h = b04h ^ ~b00h & b01h;
a05l = b05l ^ ~b06l & b07l;
a05h = b05h ^ ~b06h & b07h;
a06l = b06l ^ ~b07l & b08l;
a06h = b06h ^ ~b07h & b08h;
a07l = b07l ^ ~b08l & b09l;
a07h = b07h ^ ~b08h & b09h;
a08l = b08l ^ ~b09l & b05l;
a08h = b08h ^ ~b09h & b05h;
a09l = b09l ^ ~b05l & b06l;
a09h = b09h ^ ~b05h & b06h;
a10l = b10l ^ ~b11l & b12l;
a10h = b10h ^ ~b11h & b12h;
a11l = b11l ^ ~b12l & b13l;
a11h = b11h ^ ~b12h & b13h;
a12l = b12l ^ ~b13l & b14l;
a12h = b12h ^ ~b13h & b14h;
a13l = b13l ^ ~b14l & b10l;
a13h = b13h ^ ~b14h & b10h;
a14l = b14l ^ ~b10l & b11l;
a14h = b14h ^ ~b10h & b11h;
a15l = b15l ^ ~b16l & b17l;
a15h = b15h ^ ~b16h & b17h;
a16l = b16l ^ ~b17l & b18l;
a16h = b16h ^ ~b17h & b18h;
a17l = b17l ^ ~b18l & b19l;
a17h = b17h ^ ~b18h & b19h;
a18l = b18l ^ ~b19l & b15l;
a18h = b18h ^ ~b19h & b15h;
a19l = b19l ^ ~b15l & b16l;
a19h = b19h ^ ~b15h & b16h;
a20l = b20l ^ ~b21l & b22l;
a20h = b20h ^ ~b21h & b22h;
a21l = b21l ^ ~b22l & b23l;
a21h = b21h ^ ~b22h & b23h;
a22l = b22l ^ ~b23l & b24l;
a22h = b22h ^ ~b23h & b24h;
a23l = b23l ^ ~b24l & b20l;
a23h = b23h ^ ~b24h & b20h;
a24l = b24l ^ ~b20l & b21l;
a24h = b24h ^ ~b20h & b21h;
// Iota
a00l ^= Keccak_f1600_RC[r|0];
a00h ^= Keccak_f1600_RC[r|1];
}
// todo, handle big-endian stores
outState[outOffset|0] = a00l;
outState[outOffset|1] = a00h;
outState[outOffset|2] = a01l;
outState[outOffset|3] = a01h;
outState[outOffset|4] = a02l;
outState[outOffset|5] = a02h;
outState[outOffset|6] = a03l;
outState[outOffset|7] = a03h;
if (outSize == 8)
return;
outState[outOffset|8] = a04l;
outState[outOffset|9] = a04h;
outState[outOffset|10] = a05l;
outState[outOffset|11] = a05h;
outState[outOffset|12] = a06l;
outState[outOffset|13] = a06h;
outState[outOffset|14] = a07l;
outState[outOffset|15] = a07h;
if (outSize == 16)
return;
outState[outOffset|16] = a08l;
outState[outOffset|17] = a08h;
outState[outOffset|18] = a09l;
outState[outOffset|19] = a09h;
outState[outOffset|20] = a10l;
outState[outOffset|21] = a10h;
outState[outOffset|22] = a11l;
outState[outOffset|23] = a11h;
outState[outOffset|24] = a12l;
outState[outOffset|25] = a12h;
outState[outOffset|26] = a13l;
outState[outOffset|27] = a13h;
outState[outOffset|28] = a14l;
outState[outOffset|29] = a14h;
outState[outOffset|30] = a15l;
outState[outOffset|31] = a15h;
outState[outOffset|32] = a16l;
outState[outOffset|33] = a16h;
outState[outOffset|34] = a17l;
outState[outOffset|35] = a17h;
outState[outOffset|36] = a18l;
outState[outOffset|37] = a18h;
outState[outOffset|38] = a19l;
outState[outOffset|39] = a19h;
outState[outOffset|40] = a20l;
outState[outOffset|41] = a20h;
outState[outOffset|42] = a21l;
outState[outOffset|43] = a21h;
outState[outOffset|44] = a22l;
outState[outOffset|45] = a22h;
outState[outOffset|46] = a23l;
outState[outOffset|47] = a23h;
outState[outOffset|48] = a24l;
outState[outOffset|49] = a24h;
}
var Keccak = function()
{
var stateBuf = new ArrayBuffer(200);
var stateBytes = new Uint8Array(stateBuf);
var stateWords = new Uint32Array(stateBuf);
this.digest = function(oSize, iBytes)
{
for (var i = 0; i < 50; ++i)
{
stateWords[i] = 0;
}
var r = 200 - oSize*2;
var iLength = iBytes.length;
var iOffset = 0;
for ( ; ;)
{
var len = iLength < r ? iLength : r;
for (i = 0; i < len; ++i, ++iOffset)
{
stateBytes[i] ^= iBytes[iOffset];
}
if (iLength < r)
break;
iLength -= len;
keccak_f1600(stateWords, 0, 50, stateWords);
}
stateBytes[iLength] ^= 1;
stateBytes[r-1] ^= 0x80;
keccak_f1600(stateWords, 0, 50, stateWords);
return stateBytes.subarray(0, oSize);
};
this.digestWords = function(oWords, oOffset, oLength, iWords, iOffset, iLength)
{
for (var i = 0; i < 50; ++i)
{
stateWords[i] = 0;
}
var r = 50 - oLength*2;
for (; ; )
{
var len = iLength < r ? iLength : r;
for (i = 0; i < len; ++i, ++iOffset)
{
stateWords[i] ^= iWords[iOffset];
}
if (iLength < r)
break;
iLength -= len;
keccak_f1600(stateWords, 0, 50, stateWords);
}
stateBytes[iLength<<2] ^= 1;
stateBytes[(r<<2) - 1] ^= 0x80;
keccak_f1600(oWords, oOffset, oLength, stateWords);
};
};
module.exports = Keccak;

View File

@ -1,201 +0,0 @@
#!/usr/bin/env node
// makekeccak.js
// Tim Hughes <tim@twistedfury.com>
/*jslint node: true, shadow:true */
"use strict";
var Keccak_f1600_Rho = [
1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14,
27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44
];
var Keccak_f1600_Pi= [
10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4,
15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1
];
var Keccak_f1600_RC = [
0x00000001, 0x00000000,
0x00008082, 0x00000000,
0x0000808a, 0x80000000,
0x80008000, 0x80000000,
0x0000808b, 0x00000000,
0x80000001, 0x00000000,
0x80008081, 0x80000000,
0x00008009, 0x80000000,
0x0000008a, 0x00000000,
0x00000088, 0x00000000,
0x80008009, 0x00000000,
0x8000000a, 0x00000000,
0x8000808b, 0x00000000,
0x0000008b, 0x80000000,
0x00008089, 0x80000000,
0x00008003, 0x80000000,
0x00008002, 0x80000000,
0x00000080, 0x80000000,
0x0000800a, 0x00000000,
0x8000000a, 0x80000000,
0x80008081, 0x80000000,
0x00008080, 0x80000000,
0x80000001, 0x00000000,
0x80008008, 0x80000000,
];
function makeRotLow(lo, hi, n)
{
if (n === 0 || n === 32) throw Error("unsupported");
if ((n & 0x20) !== 0)
{
n &= ~0x20;
var t = hi;
hi = lo;
lo = t;
}
var hir = hi + " >>> " + (32 - n);
var los = lo + " << " + n;
return los + " | " + hir;
}
function makeRotHigh(lo, hi, n)
{
if (n === 0 || n === 32) throw Error("unsupported");
if ((n & 0x20) !== 0)
{
n &= ~0x20;
var t = hi;
hi = lo;
lo = t;
}
var his = hi + " << " + n;
var lor = lo + " >>> " + (32 - n);
return his + " | " + lor;
}
function makeKeccak_f1600()
{
var format = function(n)
{
return n < 10 ? "0"+n : ""+n;
};
var a = function(n, w)
{
return "a" + format(n) + (w !== 0?'h':'l');
};
var b = function(n, w)
{
return "b" + format(n) + (w !== 0?'h':'l');
};
var str = "";
str += "function keccak_f1600(outState, outOffset, outSize, inState)\n";
str += "{\n";
for (var i = 0; i < 25; ++i)
{
for (var w = 0; w <= 1; ++w)
{
str += "\tvar " + a(i,w) + " = inState["+(i<<1|w)+"]|0;\n";
}
}
for (var j = 0; j < 5; ++j)
{
str += "\tvar ";
for (var i = 0; i < 5; ++i)
{
if (i !== 0)
str += ", ";
str += b(j*5+i,0) + ", " + b(j*5+i,1);
}
str += ";\n";
}
str += "\tvar tl, th;\n";
str += "\n";
str += "\tfor (var r = 0; r < 48; r = (r+2)|0)\n";
str += "\t{\n";
// Theta
str += "\t\t// Theta\n";
for (var i = 0; i < 5; ++i)
{
for (var w = 0; w <= 1; ++w)
{
str += "\t\t" + b(i,w) + " = " + a(i,w) + " ^ " + a(i+5,w) + " ^ " + a(i+10,w) + " ^ " + a(i+15,w) + " ^ " + a(i+20,w) + ";\n";
}
}
for (var i = 0; i < 5; ++i)
{
var i4 = (i + 4) % 5;
var i1 = (i + 1) % 5;
str += "\t\ttl = " + b(i4,0) + " ^ (" + b(i1,0) + " << 1 | " + b(i1,1) + " >>> 31);\n";
str += "\t\tth = " + b(i4,1) + " ^ (" + b(i1,1) + " << 1 | " + b(i1,0) + " >>> 31);\n";
for (var j = 0; j < 25; j = (j+5)|0)
{
str += "\t\t" + a((j+i),0) + " ^= tl;\n";
str += "\t\t" + a((j+i),1) + " ^= th;\n";
}
}
// Rho Pi
str += "\n\t\t// Rho Pi\n";
for (var w = 0; w <= 1; ++w)
{
str += "\t\t" + b(0,w) + " = " + a(0,w) + ";\n";
}
var opi = 1;
for (var i = 0; i < 24; ++i)
{
var pi = Keccak_f1600_Pi[i];
str += "\t\t" + b(pi,0) + " = " + makeRotLow(a(opi,0), a(opi,1), Keccak_f1600_Rho[i]) + ";\n";
str += "\t\t" + b(pi,1) + " = " + makeRotHigh(a(opi,0), a(opi,1), Keccak_f1600_Rho[i]) + ";\n";
opi = pi;
}
// Chi
str += "\n\t\t// Chi\n";
for (var j = 0; j < 25; j += 5)
{
for (var i = 0; i < 5; ++i)
{
for (var w = 0; w <= 1; ++w)
{
str += "\t\t" + a(j+i,w) + " = " + b(j+i,w) + " ^ ~" + b(j+(i+1)%5,w) + " & " + b(j+(i+2)%5,w) + ";\n";
}
}
}
// Iota
str += "\n\t\t// Iota\n";
for (var w = 0; w <= 1; ++w)
{
str += "\t\t" + a(0,w) + " ^= Keccak_f1600_RC[r|" + w + "];\n";
}
str += "\t}\n";
for (var i = 0; i < 25; ++i)
{
if (i == 4 || i == 8)
{
str += "\tif (outSize == " + i*2 + ")\n\t\treturn;\n";
}
for (var w = 0; w <= 1; ++w)
{
str += "\toutState[outOffset|"+(i<<1|w)+"] = " + a(i,w) + ";\n";
}
}
str += "}\n";
return str;
}
console.log(makeKeccak_f1600());

View File

@ -1,53 +0,0 @@
// test.js
// Tim Hughes <tim@twistedfury.com>
/*jslint node: true, shadow:true */
"use strict";
var ethash = require('./ethash');
var util = require('./util');
var Keccak = require('./keccak');
// sanity check hash functions
var src = util.stringToBytes("");
if (util.bytesToHexString(new Keccak().digest(32, src)) != "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470") throw Error("Keccak-256 failed");
if (util.bytesToHexString(new Keccak().digest(64, src)) != "0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e") throw Error("Keccak-512 failed");
src = new Uint32Array(src.buffer);
var dst = new Uint32Array(8);
new Keccak().digestWords(dst, 0, dst.length, src, 0, src.length);
if (util.wordsToHexString(dst) != "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470") throw Error("Keccak-256 Fast failed");
var dst = new Uint32Array(16);
new Keccak().digestWords(dst, 0, dst.length, src, 0, src.length);
if (util.wordsToHexString(dst) != "0eab42de4c3ceb9235fc91acffe746b29c29a8c366b7c60e4e67c466f36a4304c00fa9caf9d87976ba469bcbe06713b435f091ef2769fb160cdab33d3670680e") throw Error("Keccak-512 Fast failed");
// init params
var ethashParams = ethash.defaultParams();
//ethashParams.cacheRounds = 0;
// create hasher
var seed = util.hexStringToBytes("9410b944535a83d9adf6bbdcc80e051f30676173c16ca0d32d6f1263fc246466")
var startTime = new Date().getTime();
var hasher = new ethash.Ethash(ethashParams, seed);
console.log('Ethash startup took: '+(new Date().getTime() - startTime) + "ms");
console.log('Ethash cache hash: ' + util.bytesToHexString(hasher.cacheDigest()));
var testHexString = "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470";
if (testHexString != util.bytesToHexString(util.hexStringToBytes(testHexString)))
throw Error("bytesToHexString or hexStringToBytes broken");
var header = util.hexStringToBytes("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
var nonce = util.hexStringToBytes("0000000000000000");
var hash;
startTime = new Date().getTime();
var trials = 10;
for (var i = 0; i < trials; ++i)
{
hash = hasher.hash(header, nonce);
}
console.log("Light client hashes averaged: " + (new Date().getTime() - startTime)/trials + "ms");
console.log("Hash = " + util.bytesToHexString(hash));

View File

@ -1,100 +0,0 @@
// util.js
// Tim Hughes <tim@twistedfury.com>
/*jslint node: true, shadow:true */
"use strict";
function nibbleToChar(nibble)
{
return String.fromCharCode((nibble < 10 ? 48 : 87) + nibble);
}
function charToNibble(chr)
{
if (chr >= 48 && chr <= 57)
{
return chr - 48;
}
if (chr >= 65 && chr <= 70)
{
return chr - 65 + 10;
}
if (chr >= 97 && chr <= 102)
{
return chr - 97 + 10;
}
return 0;
}
function stringToBytes(str)
{
var bytes = new Uint8Array(str.length);
for (var i = 0; i != str.length; ++i)
{
bytes[i] = str.charCodeAt(i);
}
return bytes;
}
function hexStringToBytes(str)
{
var bytes = new Uint8Array(str.length>>>1);
for (var i = 0; i != bytes.length; ++i)
{
bytes[i] = charToNibble(str.charCodeAt(i<<1 | 0)) << 4;
bytes[i] |= charToNibble(str.charCodeAt(i<<1 | 1));
}
return bytes;
}
function bytesToHexString(bytes)
{
var str = "";
for (var i = 0; i != bytes.length; ++i)
{
str += nibbleToChar(bytes[i] >>> 4);
str += nibbleToChar(bytes[i] & 0xf);
}
return str;
}
function wordsToHexString(words)
{
return bytesToHexString(new Uint8Array(words.buffer));
}
function uint32ToHexString(num)
{
var buf = new Uint8Array(4);
buf[0] = (num >> 24) & 0xff;
buf[1] = (num >> 16) & 0xff;
buf[2] = (num >> 8) & 0xff;
buf[3] = (num >> 0) & 0xff;
return bytesToHexString(buf);
}
function toWords(input)
{
if (input instanceof Uint32Array)
{
return input;
}
else if (input instanceof Uint8Array)
{
var tmp = new Uint8Array((input.length + 3) & ~3);
tmp.set(input);
return new Uint32Array(tmp.buffer);
}
else if (typeof input === typeof "")
{
return toWords(stringToBytes(input));
}
return null;
}
exports.stringToBytes = stringToBytes;
exports.hexStringToBytes = hexStringToBytes;
exports.bytesToHexString = bytesToHexString;
exports.wordsToHexString = wordsToHexString;
exports.uint32ToHexString = uint32ToHexString;
exports.toWords = toWords;

View File

@ -1,58 +0,0 @@
include_directories(..)
set(CMAKE_BUILD_TYPE Release)
if (MSVC)
add_definitions("/openmp")
endif()
# enable C++11, should probably be a bit more specific about compiler
if (NOT MSVC)
SET(CMAKE_CXX_FLAGS "-std=c++11")
endif()
if (NOT MPI_FOUND)
find_package(MPI)
endif()
if (NOT CRYPTOPP_FOUND)
find_package(CryptoPP 5.6.2)
endif()
if (CRYPTOPP_FOUND)
add_definitions(-DWITH_CRYPTOPP)
find_package (Threads REQUIRED)
endif()
if (NOT OpenCL_FOUND)
find_package(OpenCL)
endif()
if (OpenCL_FOUND)
add_definitions(-DWITH_OPENCL)
include_directories(${OpenCL_INCLUDE_DIRS})
list(APPEND FILES ethash_cl_miner.cpp ethash_cl_miner.h)
endif()
if (MPI_FOUND)
include_directories(${MPI_INCLUDE_PATH})
add_executable (Benchmark_MPI_FULL benchmark.cpp)
target_link_libraries (Benchmark_MPI_FULL ${ETHHASH_LIBS} ${MPI_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
SET_TARGET_PROPERTIES(Benchmark_MPI_FULL PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} ${MPI_COMPILE_FLAGS} -DFULL -DMPI")
add_executable (Benchmark_MPI_LIGHT benchmark.cpp)
target_link_libraries (Benchmark_MPI_LIGHT ${ETHHASH_LIBS} ${MPI_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
SET_TARGET_PROPERTIES(Benchmark_MPI_LIGHT PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} ${MPI_COMPILE_FLAGS} -DMPI")
endif()
add_executable (Benchmark_FULL benchmark.cpp)
target_link_libraries (Benchmark_FULL ${ETHHASH_LIBS} ${CMAKE_THREAD_LIBS_INIT})
SET_TARGET_PROPERTIES(Benchmark_FULL PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} -DFULL")
add_executable (Benchmark_LIGHT benchmark.cpp)
target_link_libraries (Benchmark_LIGHT ${ETHHASH_LIBS} ${CMAKE_THREAD_LIBS_INIT})
if (OpenCL_FOUND)
add_executable (Benchmark_CL benchmark.cpp)
target_link_libraries (Benchmark_CL ${ETHHASH_LIBS} ethash-cl ${CMAKE_THREAD_LIBS_INIT})
SET_TARGET_PROPERTIES(Benchmark_CL PROPERTIES COMPILE_FLAGS "${COMPILE_FLAGS} -DOPENCL")
endif()

View File

@ -1,278 +0,0 @@
/*
This file is part of cpp-ethereum.
cpp-ethereum is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
cpp-ethereum is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>.
*/
/** @file benchmark.cpp
* @author Tim Hughes <tim@twistedfury.com>
* @date 2015
*/
#include <stdio.h>
#include <stdlib.h>
#include <chrono>
#include <libethash/ethash.h>
#include <libethash/util.h>
#ifdef OPENCL
#include <libethash-cl/ethash_cl_miner.h>
#endif
#include <vector>
#include <algorithm>
#ifdef WITH_CRYPTOPP
#include <libethash/sha3_cryptopp.h>
#include <string>
#else
#include "libethash/sha3.h"
#endif // WITH_CRYPTOPP
#undef min
#undef max
using std::chrono::high_resolution_clock;
#if defined(OPENCL)
const unsigned trials = 1024*1024*32;
#elif defined(FULL)
const unsigned trials = 1024*1024/8;
#else
const unsigned trials = 1024*1024/1024;
#endif
uint8_t g_hashes[1024*32];
static char nibbleToChar(unsigned nibble)
{
return (char) ((nibble >= 10 ? 'a'-10 : '0') + nibble);
}
static uint8_t charToNibble(char chr)
{
if (chr >= '0' && chr <= '9')
{
return (uint8_t) (chr - '0');
}
if (chr >= 'a' && chr <= 'z')
{
return (uint8_t) (chr - 'a' + 10);
}
if (chr >= 'A' && chr <= 'Z')
{
return (uint8_t) (chr - 'A' + 10);
}
return 0;
}
static std::vector<uint8_t> hexStringToBytes(char const* str)
{
std::vector<uint8_t> bytes(strlen(str) >> 1);
for (unsigned i = 0; i != bytes.size(); ++i)
{
bytes[i] = charToNibble(str[i*2 | 0]) << 4;
bytes[i] |= charToNibble(str[i*2 | 1]);
}
return bytes;
}
static std::string bytesToHexString(uint8_t const* bytes, unsigned size)
{
std::string str;
for (unsigned i = 0; i != size; ++i)
{
str += nibbleToChar(bytes[i] >> 4);
str += nibbleToChar(bytes[i] & 0xf);
}
return str;
}
static std::string bytesToHexString(ethash_h256_t const *hash, unsigned size)
{
return bytesToHexString((uint8_t*)hash, size);
}
extern "C" int main(void)
{
// params for ethash
ethash_params params;
ethash_params_init(&params, 0);
//params.full_size = 262147 * 4096; // 1GBish;
//params.full_size = 32771 * 4096; // 128MBish;
//params.full_size = 8209 * 4096; // 8MBish;
//params.cache_size = 8209*4096;
//params.cache_size = 2053*4096;
ethash_h256_t seed;
ethash_h256_t previous_hash;
memcpy(&seed, hexStringToBytes("9410b944535a83d9adf6bbdcc80e051f30676173c16ca0d32d6f1263fc246466").data(), 32);
memcpy(&previous_hash, hexStringToBytes("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").data(), 32);
// allocate page aligned buffer for dataset
#ifdef FULL
void* full_mem_buf = malloc(params.full_size + 4095);
void* full_mem = (void*)((uintptr_t(full_mem_buf) + 4095) & ~4095);
#endif
void* cache_mem_buf = malloc(params.cache_size + 63);
void* cache_mem = (void*)((uintptr_t(cache_mem_buf) + 63) & ~63);
ethash_cache cache;
cache.mem = cache_mem;
// compute cache or full data
{
auto startTime = high_resolution_clock::now();
ethash_mkcache(&cache, &params, &seed);
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
ethash_h256_t cache_hash;
SHA3_256(&cache_hash, (uint8_t const*)cache_mem, params.cache_size);
debugf("ethash_mkcache: %ums, sha3: %s\n", (unsigned)((time*1000)/CLOCKS_PER_SEC), bytesToHexString(&cache_hash, sizeof(cache_hash)).data());
// print a couple of test hashes
{
auto startTime = high_resolution_clock::now();
ethash_return_value hash;
ethash_light(&hash, &cache, &params, &previous_hash, 0);
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
debugf("ethash_light test: %ums, %s\n", (unsigned)time, bytesToHexString(&hash.result, 32).data());
}
#ifdef FULL
startTime = high_resolution_clock::now();
ethash_compute_full_data(full_mem, &params, &cache);
time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
debugf("ethash_compute_full_data: %ums\n", (unsigned)time);
#endif // FULL
}
#ifdef OPENCL
ethash_cl_miner miner;
{
auto startTime = high_resolution_clock::now();
if (!miner.init(params, &seed))
exit(-1);
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
debugf("ethash_cl_miner init: %ums\n", (unsigned)time);
}
#endif
#ifdef FULL
{
auto startTime = high_resolution_clock::now();
ethash_return_value hash;
ethash_full(&hash, full_mem, &params, &previous_hash, 0);
auto time = std::chrono::duration_cast<std::chrono::milliseconds>(high_resolution_clock::now() - startTime).count();
debugf("ethash_full test: %uns\n", (unsigned)time);
}
#endif
#ifdef OPENCL
// validate 1024 hashes against CPU
miner.hash(g_hashes, (uint8_t*)&previous_hash, 0, 1024);
for (unsigned i = 0; i != 1024; ++i)
{
ethash_return_value hash;
ethash_light(&hash, &cache, &params, &previous_hash, i);
if (memcmp(&hash.result, g_hashes + 32*i, 32) != 0)
{
debugf("nonce %u failed: %s %s\n", i, bytesToHexString(g_hashes + 32*i, 32).c_str(), bytesToHexString(&hash.result, 32).c_str());
static unsigned c = 0;
if (++c == 16)
{
exit(-1);
}
}
}
// ensure nothing else is going on
miner.finish();
#endif
auto startTime = high_resolution_clock::now();
unsigned hash_count = trials;
#ifdef OPENCL
{
struct search_hook : ethash_cl_miner::search_hook
{
unsigned hash_count;
std::vector<uint64_t> nonce_vec;
virtual bool found(uint64_t const* nonces, uint32_t count)
{
nonce_vec.insert(nonce_vec.end(), nonces, nonces + count);
return false;
}
virtual bool searched(uint64_t start_nonce, uint32_t count)
{
// do nothing
hash_count += count;
return hash_count >= trials;
}
};
search_hook hook;
hook.hash_count = 0;
miner.search((uint8_t*)&previous_hash, 0x000000ffffffffff, hook);
for (unsigned i = 0; i != hook.nonce_vec.size(); ++i)
{
uint64_t nonce = hook.nonce_vec[i];
ethash_return_value hash;
ethash_light(&hash, &cache, &params, &previous_hash, nonce);
debugf("found: %.8x%.8x -> %s\n", unsigned(nonce>>32), unsigned(nonce), bytesToHexString(&hash.result, 32).c_str());
}
hash_count = hook.hash_count;
}
#else
{
//#pragma omp parallel for
for (int nonce = 0; nonce < trials; ++nonce)
{
ethash_return_value hash;
#ifdef FULL
ethash_full(&hash, full_mem, &params, &previous_hash, nonce);
#else
ethash_light(&hash, &cache, &params, &previous_hash, nonce);
#endif // FULL
}
}
#endif
auto time = std::chrono::duration_cast<std::chrono::microseconds>(high_resolution_clock::now() - startTime).count();
debugf("Search took: %ums\n", (unsigned)time/1000);
unsigned read_size = ETHASH_ACCESSES * ETHASH_MIX_BYTES;
#if defined(OPENCL) || defined(FULL)
debugf(
"hashrate: %8.2f Mh/s, bw: %8.2f GB/s\n",
(double)hash_count * (1000*1000)/time / (1000*1000),
(double)hash_count*read_size * (1000*1000)/time / (1024*1024*1024)
);
#else
debugf(
"hashrate: %8.2f Kh/s, bw: %8.2f MB/s\n",
(double)hash_count * (1000*1000)/time / (1000),
(double)hash_count*read_size * (1000*1000)/time / (1024*1024)
);
#endif
free(cache_mem_buf);
#ifdef FULL
free(full_mem_buf);
#endif
return 0;
}

View File

@ -1,267 +0,0 @@
#include <Python.h>
#include <alloca.h>
#include <stdint.h>
#include <stdlib.h>
#include <time.h>
#include "../libethash/ethash.h"
#include "../libethash/internal.h"
#if PY_MAJOR_VERSION >= 3
#define PY_STRING_FORMAT "y#"
#define PY_CONST_STRING_FORMAT "y"
#else
#define PY_STRING_FORMAT "s#"
#define PY_CONST_STRING_FORMAT "s"
#endif
#define MIX_WORDS (ETHASH_MIX_BYTES/4)
static PyObject *
mkcache_bytes(PyObject *self, PyObject *args) {
unsigned long block_number;
unsigned long cache_size;
if (!PyArg_ParseTuple(args, "k", &block_number))
return 0;
ethash_light_t L = ethash_light_new(block_number);
PyObject * val = Py_BuildValue(PY_STRING_FORMAT, L->cache, L->cache_size);
free(L->cache);
return val;
}
/*
static PyObject *
calc_dataset_bytes(PyObject *self, PyObject *args) {
char *cache_bytes;
unsigned long full_size;
int cache_size;
if (!PyArg_ParseTuple(args, "k" PY_STRING_FORMAT, &full_size, &cache_bytes, &cache_size))
return 0;
if (full_size % MIX_WORDS != 0) {
char error_message[1024];
sprintf(error_message, "The size of data set must be a multiple of %i bytes (was %lu)", MIX_WORDS, full_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
if (cache_size % ETHASH_HASH_BYTES != 0) {
char error_message[1024];
sprintf(error_message, "The size of the cache must be a multiple of %i bytes (was %i)", ETHASH_HASH_BYTES, cache_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
ethash_params params;
params.cache_size = (size_t) cache_size;
params.full_size = (size_t) full_size;
ethash_cache cache;
cache.mem = (void *) cache_bytes;
void *mem = malloc(params.full_size);
ethash_compute_full_data(mem, &params, &cache);
PyObject * val = Py_BuildValue(PY_STRING_FORMAT, (char *) mem, full_size);
free(mem);
return val;
}*/
// hashimoto_light(full_size, cache, header, nonce)
static PyObject *
hashimoto_light(PyObject *self, PyObject *args) {
char *cache_bytes;
char *header;
unsigned long block_number;
unsigned long long nonce;
int cache_size, header_size;
if (!PyArg_ParseTuple(args, "k" PY_STRING_FORMAT PY_STRING_FORMAT "K", &block_number, &cache_bytes, &cache_size, &header, &header_size, &nonce))
return 0;
if (header_size != 32) {
char error_message[1024];
sprintf(error_message, "Seed must be 32 bytes long (was %i)", header_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
struct ethash_light *s;
s = calloc(sizeof(*s), 1);
s->cache = cache_bytes;
s->cache_size = cache_size;
s->block_number = block_number;
struct ethash_h256 *h;
h = calloc(sizeof(*h), 1);
for (int i = 0; i < 32; i++) h->b[i] = header[i];
struct ethash_return_value out = ethash_light_compute(s, *h, nonce);
return Py_BuildValue("{" PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT "," PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT "}",
"mix digest", &out.mix_hash, 32,
"result", &out.result, 32);
}
/*
// hashimoto_full(dataset, header, nonce)
static PyObject *
hashimoto_full(PyObject *self, PyObject *args) {
char *full_bytes;
char *header;
unsigned long long nonce;
int full_size, header_size;
if (!PyArg_ParseTuple(args, PY_STRING_FORMAT PY_STRING_FORMAT "K", &full_bytes, &full_size, &header, &header_size, &nonce))
return 0;
if (full_size % MIX_WORDS != 0) {
char error_message[1024];
sprintf(error_message, "The size of data set must be a multiple of %i bytes (was %i)", MIX_WORDS, full_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
if (header_size != 32) {
char error_message[1024];
sprintf(error_message, "Header must be 32 bytes long (was %i)", header_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
ethash_return_value out;
ethash_params params;
params.full_size = (size_t) full_size;
ethash_full(&out, (void *) full_bytes, &params, (ethash_h256_t *) header, nonce);
return Py_BuildValue("{" PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT ", " PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT "}",
"mix digest", &out.mix_hash, 32,
"result", &out.result, 32);
}
// mine(dataset_bytes, header, difficulty_bytes)
static PyObject *
mine(PyObject *self, PyObject *args) {
char *full_bytes;
char *header;
char *difficulty;
srand(time(0));
uint64_t nonce = ((uint64_t) rand()) << 32 | rand();
int full_size, header_size, difficulty_size;
if (!PyArg_ParseTuple(args, PY_STRING_FORMAT PY_STRING_FORMAT PY_STRING_FORMAT, &full_bytes, &full_size, &header, &header_size, &difficulty, &difficulty_size))
return 0;
if (full_size % MIX_WORDS != 0) {
char error_message[1024];
sprintf(error_message, "The size of data set must be a multiple of %i bytes (was %i)", MIX_WORDS, full_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
if (header_size != 32) {
char error_message[1024];
sprintf(error_message, "Header must be 32 bytes long (was %i)", header_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
if (difficulty_size != 32) {
char error_message[1024];
sprintf(error_message, "Difficulty must be an array of 32 bytes (only had %i)", difficulty_size);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
ethash_return_value out;
ethash_params params;
params.full_size = (size_t) full_size;
// TODO: Multi threading?
do {
ethash_full(&out, (void *) full_bytes, &params, (const ethash_h256_t *) header, nonce++);
// TODO: disagrees with the spec https://github.com/ethereum/wiki/wiki/Ethash#mining
} while (!ethash_check_difficulty(&out.result, (const ethash_h256_t *) difficulty));
return Py_BuildValue("{" PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT ", " PY_CONST_STRING_FORMAT ":" PY_STRING_FORMAT ", " PY_CONST_STRING_FORMAT ":K}",
"mix digest", &out.mix_hash, 32,
"result", &out.result, 32,
"nonce", nonce);
}
*/
//get_seedhash(block_number)
static PyObject *
get_seedhash(PyObject *self, PyObject *args) {
unsigned long block_number;
if (!PyArg_ParseTuple(args, "k", &block_number))
return 0;
if (block_number >= ETHASH_EPOCH_LENGTH * 2048) {
char error_message[1024];
sprintf(error_message, "Block number must be less than %i (was %lu)", ETHASH_EPOCH_LENGTH * 2048, block_number);
PyErr_SetString(PyExc_ValueError, error_message);
return 0;
}
ethash_h256_t seedhash = ethash_get_seedhash(block_number);
return Py_BuildValue(PY_STRING_FORMAT, (char *) &seedhash, 32);
}
static PyMethodDef PyethashMethods[] =
{
{"get_seedhash", get_seedhash, METH_VARARGS,
"get_seedhash(block_number)\n\n"
"Gets the seedhash for a block."},
{"mkcache_bytes", mkcache_bytes, METH_VARARGS,
"mkcache_bytes(block_number)\n\n"
"Makes a byte array for the cache for given block number\n"},
/*{"calc_dataset_bytes", calc_dataset_bytes, METH_VARARGS,
"calc_dataset_bytes(full_size, cache_bytes)\n\n"
"Makes a byte array for the dataset for a given size given cache bytes"},*/
{"hashimoto_light", hashimoto_light, METH_VARARGS,
"hashimoto_light(block_number, cache_bytes, header, nonce)\n\n"
"Runs the hashimoto hashing function just using cache bytes. Takes an int (full_size), byte array (cache_bytes), another byte array (header), and an int (nonce). Returns an object containing the mix digest, and hash result."},
/*{"hashimoto_full", hashimoto_full, METH_VARARGS,
"hashimoto_full(dataset_bytes, header, nonce)\n\n"
"Runs the hashimoto hashing function using the dataset bytes. Useful for testing. Returns an object containing the mix digest (byte array), and hash result (another byte array)."},
{"mine", mine, METH_VARARGS,
"mine(dataset_bytes, header, difficulty_bytes)\n\n"
"Mine for an adequate header. Returns an object containing the mix digest (byte array), hash result (another byte array) and nonce (an int)."},*/
{NULL, NULL, 0, NULL}
};
#if PY_MAJOR_VERSION >= 3
static struct PyModuleDef PyethashModule = {
PyModuleDef_HEAD_INIT,
"pyethash",
"...",
-1,
PyethashMethods
};
PyMODINIT_FUNC PyInit_pyethash(void) {
PyObject *module = PyModule_Create(&PyethashModule);
// Following Spec: https://github.com/ethereum/wiki/wiki/Ethash#definitions
PyModule_AddIntConstant(module, "REVISION", (long) ETHASH_REVISION);
PyModule_AddIntConstant(module, "DATASET_BYTES_INIT", (long) ETHASH_DATASET_BYTES_INIT);
PyModule_AddIntConstant(module, "DATASET_BYTES_GROWTH", (long) ETHASH_DATASET_BYTES_GROWTH);
PyModule_AddIntConstant(module, "CACHE_BYTES_INIT", (long) ETHASH_CACHE_BYTES_INIT);
PyModule_AddIntConstant(module, "CACHE_BYTES_GROWTH", (long) ETHASH_CACHE_BYTES_GROWTH);
PyModule_AddIntConstant(module, "EPOCH_LENGTH", (long) ETHASH_EPOCH_LENGTH);
PyModule_AddIntConstant(module, "MIX_BYTES", (long) ETHASH_MIX_BYTES);
PyModule_AddIntConstant(module, "HASH_BYTES", (long) ETHASH_HASH_BYTES);
PyModule_AddIntConstant(module, "DATASET_PARENTS", (long) ETHASH_DATASET_PARENTS);
PyModule_AddIntConstant(module, "CACHE_ROUNDS", (long) ETHASH_CACHE_ROUNDS);
PyModule_AddIntConstant(module, "ACCESSES", (long) ETHASH_ACCESSES);
return module;
}
#else
PyMODINIT_FUNC
initpyethash(void) {
PyObject *module = Py_InitModule("pyethash", PyethashMethods);
// Following Spec: https://github.com/ethereum/wiki/wiki/Ethash#definitions
PyModule_AddIntConstant(module, "REVISION", (long) ETHASH_REVISION);
PyModule_AddIntConstant(module, "DATASET_BYTES_INIT", (long) ETHASH_DATASET_BYTES_INIT);
PyModule_AddIntConstant(module, "DATASET_BYTES_GROWTH", (long) ETHASH_DATASET_BYTES_GROWTH);
PyModule_AddIntConstant(module, "CACHE_BYTES_INIT", (long) ETHASH_CACHE_BYTES_INIT);
PyModule_AddIntConstant(module, "CACHE_BYTES_GROWTH", (long) ETHASH_CACHE_BYTES_GROWTH);
PyModule_AddIntConstant(module, "EPOCH_LENGTH", (long) ETHASH_EPOCH_LENGTH);
PyModule_AddIntConstant(module, "MIX_BYTES", (long) ETHASH_MIX_BYTES);
PyModule_AddIntConstant(module, "HASH_BYTES", (long) ETHASH_HASH_BYTES);
PyModule_AddIntConstant(module, "DATASET_PARENTS", (long) ETHASH_DATASET_PARENTS);
PyModule_AddIntConstant(module, "CACHE_ROUNDS", (long) ETHASH_CACHE_ROUNDS);
PyModule_AddIntConstant(module, "ACCESSES", (long) ETHASH_ACCESSES);
}
#endif

View File

@ -1,66 +0,0 @@
if (MSVC)
if (NOT BOOST_ROOT)
set (BOOST_ROOT "$ENV{BOOST_ROOT}")
endif()
set (CMAKE_PREFIX_PATH BOOST_ROOT)
endif()
IF( NOT Boost_FOUND )
# use multithreaded boost libraries, with -mt suffix
set(Boost_USE_MULTITHREADED ON)
if (MSVC)
# TODO handle other msvc versions or it will fail find them
set(Boost_COMPILER -vc120)
# use static boost libraries *.lib
set(Boost_USE_STATIC_LIBS ON)
elseif (APPLE)
# use static boost libraries *.a
set(Boost_USE_STATIC_LIBS ON)
elseif (UNIX)
# use dynamic boost libraries .dll
set(Boost_USE_STATIC_LIBS OFF)
endif()
find_package(Boost 1.48.0 COMPONENTS unit_test_framework system filesystem)
ENDIF()
IF (Boost_FOUND)
message(STATUS "boost header: ${Boost_INCLUDE_DIRS}")
message(STATUS "boost libs : ${Boost_LIBRARIES}")
include_directories( ${Boost_INCLUDE_DIR} )
include_directories(../../src)
link_directories(${Boost_LIBRARY_DIRS})
file(GLOB HEADERS "*.h")
if ((NOT MSVC) AND (NOT APPLE))
ADD_DEFINITIONS(-DBOOST_TEST_DYN_LINK)
endif()
if (NOT CRYPTOPP_FOUND)
find_package (CryptoPP)
endif()
if (CRYPTOPP_FOUND)
add_definitions(-DWITH_CRYPTOPP)
endif()
if (NOT MSVC)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 ")
endif()
add_executable (Test "./test.cpp" ${HEADERS})
target_link_libraries(Test ${ETHHASH_LIBS})
target_link_libraries(Test ${Boost_FILESYSTEM_LIBRARIES})
target_link_libraries(Test ${Boost_SYSTEM_LIBRARIES})
target_link_libraries(Test ${Boost_UNIT_TEST_FRAMEWORK_LIBRARIES})
if (CRYPTOPP_FOUND)
TARGET_LINK_LIBRARIES(Test ${CRYPTOPP_LIBRARIES})
endif()
enable_testing ()
add_test(NAME ethash COMMAND Test)
ENDIF()

View File

@ -1,669 +0,0 @@
#include <iomanip>
#include <libethash/fnv.h>
#include <libethash/ethash.h>
#include <libethash/internal.h>
#include <libethash/io.h>
#ifdef WITH_CRYPTOPP
#include <libethash/sha3_cryptopp.h>
#else
#include <libethash/sha3.h>
#endif // WITH_CRYPTOPP
#ifdef _WIN32
#include <windows.h>
#include <Shlobj.h>
#endif
#define BOOST_TEST_MODULE Daggerhashimoto
#define BOOST_TEST_MAIN
#include <iostream>
#include <fstream>
#include <vector>
#include <boost/filesystem.hpp>
#include <boost/test/unit_test.hpp>
using namespace std;
using byte = uint8_t;
using bytes = std::vector<byte>;
namespace fs = boost::filesystem;
// Just an alloca "wrapper" to silence uint64_t to size_t conversion warnings in windows
// consider replacing alloca calls with something better though!
#define our_alloca(param__) alloca((size_t)(param__))
// some functions taken from eth::dev for convenience.
std::string bytesToHexString(const uint8_t *str, const uint64_t s)
{
std::ostringstream ret;
for (size_t i = 0; i < s; ++i)
ret << std::hex << std::setfill('0') << std::setw(2) << std::nouppercase << (int) str[i];
return ret.str();
}
std::string blockhashToHexString(ethash_h256_t* _hash)
{
return bytesToHexString((uint8_t*)_hash, 32);
}
int fromHex(char _i)
{
if (_i >= '0' && _i <= '9')
return _i - '0';
if (_i >= 'a' && _i <= 'f')
return _i - 'a' + 10;
if (_i >= 'A' && _i <= 'F')
return _i - 'A' + 10;
BOOST_REQUIRE_MESSAGE(false, "should never get here");
return -1;
}
bytes hexStringToBytes(std::string const& _s)
{
unsigned s = (_s[0] == '0' && _s[1] == 'x') ? 2 : 0;
std::vector<uint8_t> ret;
ret.reserve((_s.size() - s + 1) / 2);
if (_s.size() % 2)
try
{
ret.push_back(fromHex(_s[s++]));
}
catch (...)
{
ret.push_back(0);
}
for (unsigned i = s; i < _s.size(); i += 2)
try
{
ret.push_back((byte)(fromHex(_s[i]) * 16 + fromHex(_s[i + 1])));
}
catch (...){
ret.push_back(0);
}
return ret;
}
ethash_h256_t stringToBlockhash(std::string const& _s)
{
ethash_h256_t ret;
bytes b = hexStringToBytes(_s);
memcpy(&ret, b.data(), b.size());
return ret;
}
BOOST_AUTO_TEST_CASE(fnv_hash_check) {
uint32_t x = 1235U;
const uint32_t
y = 9999999U,
expected = (FNV_PRIME * x) ^y;
x = fnv_hash(x, y);
BOOST_REQUIRE_MESSAGE(x == expected,
"\nexpected: " << expected << "\n"
<< "actual: " << x << "\n");
}
BOOST_AUTO_TEST_CASE(SHA256_check) {
ethash_h256_t input;
ethash_h256_t out;
memcpy(&input, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
SHA3_256(&out, (uint8_t*)&input, 32);
const std::string
expected = "2b5ddf6f4d21c23de216f44d5e4bdc68e044b71897837ea74c83908be7037cd7",
actual = bytesToHexString((uint8_t*)&out, 32);
BOOST_REQUIRE_MESSAGE(expected == actual,
"\nexpected: " << expected.c_str() << "\n"
<< "actual: " << actual.c_str() << "\n");
}
BOOST_AUTO_TEST_CASE(SHA512_check) {
uint8_t input[64], out[64];
memcpy(input, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 64);
SHA3_512(out, input, 64);
const std::string
expected = "0be8a1d334b4655fe58c6b38789f984bb13225684e86b20517a55ab2386c7b61c306f25e0627c60064cecd6d80cd67a82b3890bd1289b7ceb473aad56a359405",
actual = bytesToHexString(out, 64);
BOOST_REQUIRE_MESSAGE(expected == actual,
"\nexpected: " << expected.c_str() << "\n"
<< "actual: " << actual.c_str() << "\n");
}
BOOST_AUTO_TEST_CASE(test_swap_endian32) {
uint32_t v32 = (uint32_t)0xBAADF00D;
v32 = ethash_swap_u32(v32);
BOOST_REQUIRE_EQUAL(v32, (uint32_t)0x0DF0ADBA);
}
BOOST_AUTO_TEST_CASE(test_swap_endian64) {
uint64_t v64 = (uint64_t)0xFEE1DEADDEADBEEF;
v64 = ethash_swap_u64(v64);
BOOST_REQUIRE_EQUAL(v64, (uint64_t)0xEFBEADDEADDEE1FE);
}
BOOST_AUTO_TEST_CASE(ethash_params_init_genesis_check) {
uint64_t full_size = ethash_get_datasize(0);
uint64_t cache_size = ethash_get_cachesize(0);
BOOST_REQUIRE_MESSAGE(full_size < ETHASH_DATASET_BYTES_INIT,
"\nfull size: " << full_size << "\n"
<< "should be less than or equal to: " << ETHASH_DATASET_BYTES_INIT << "\n");
BOOST_REQUIRE_MESSAGE(full_size + 20 * ETHASH_MIX_BYTES >= ETHASH_DATASET_BYTES_INIT,
"\nfull size + 20*MIX_BYTES: " << full_size + 20 * ETHASH_MIX_BYTES << "\n"
<< "should be greater than or equal to: " << ETHASH_DATASET_BYTES_INIT << "\n");
BOOST_REQUIRE_MESSAGE(cache_size < ETHASH_DATASET_BYTES_INIT / 32,
"\ncache size: " << cache_size << "\n"
<< "should be less than or equal to: " << ETHASH_DATASET_BYTES_INIT / 32 << "\n");
}
BOOST_AUTO_TEST_CASE(ethash_params_init_genesis_calcifide_check) {
uint64_t full_size = ethash_get_datasize(0);
uint64_t cache_size = ethash_get_cachesize(0);
const uint32_t expected_full_size = 1073739904;
const uint32_t expected_cache_size = 16776896;
BOOST_REQUIRE_MESSAGE(full_size == expected_full_size,
"\nexpected: " << expected_cache_size << "\n"
<< "actual: " << full_size << "\n");
BOOST_REQUIRE_MESSAGE(cache_size == expected_cache_size,
"\nexpected: " << expected_cache_size << "\n"
<< "actual: " << cache_size << "\n");
}
BOOST_AUTO_TEST_CASE(ethash_check_difficulty_check) {
ethash_h256_t hash;
ethash_h256_t target;
memcpy(&hash, "11111111111111111111111111111111", 32);
memcpy(&target, "22222222222222222222222222222222", 32);
BOOST_REQUIRE_MESSAGE(
ethash_check_difficulty(&hash, &target),
"\nexpected \"" << std::string((char *) &hash, 32).c_str() << "\" to have the same or less difficulty than \"" << std::string((char *) &target, 32).c_str() << "\"\n");
BOOST_REQUIRE_MESSAGE(
ethash_check_difficulty(&hash, &hash), "");
// "\nexpected \"" << hash << "\" to have the same or less difficulty than \"" << hash << "\"\n");
memcpy(&target, "11111111111111111111111111111112", 32);
BOOST_REQUIRE_MESSAGE(
ethash_check_difficulty(&hash, &target), "");
// "\nexpected \"" << hash << "\" to have the same or less difficulty than \"" << target << "\"\n");
memcpy(&target, "11111111111111111111111111111110", 32);
BOOST_REQUIRE_MESSAGE(
!ethash_check_difficulty(&hash, &target), "");
// "\nexpected \"" << hash << "\" to have more difficulty than \"" << target << "\"\n");
}
BOOST_AUTO_TEST_CASE(test_ethash_io_mutable_name) {
char mutable_name[DAG_MUTABLE_NAME_MAX_SIZE];
// should have at least 8 bytes provided since this is what we test :)
ethash_h256_t seed1 = ethash_h256_static_init(0, 10, 65, 255, 34, 55, 22, 8);
ethash_io_mutable_name(1, &seed1, mutable_name);
BOOST_REQUIRE_EQUAL(0, strcmp(mutable_name, "full-R1-000a41ff22371608"));
ethash_h256_t seed2 = ethash_h256_static_init(0, 0, 0, 0, 0, 0, 0, 0);
ethash_io_mutable_name(44, &seed2, mutable_name);
BOOST_REQUIRE_EQUAL(0, strcmp(mutable_name, "full-R44-0000000000000000"));
}
BOOST_AUTO_TEST_CASE(test_ethash_dir_creation) {
ethash_h256_t seedhash;
FILE *f = NULL;
memset(&seedhash, 0, 32);
BOOST_REQUIRE_EQUAL(
ETHASH_IO_MEMO_MISMATCH,
ethash_io_prepare("./test_ethash_directory/", seedhash, &f, 64, false)
);
BOOST_REQUIRE(f);
// let's make sure that the directory was created
BOOST_REQUIRE(fs::is_directory(fs::path("./test_ethash_directory/")));
// cleanup
fclose(f);
fs::remove_all("./test_ethash_directory/");
}
BOOST_AUTO_TEST_CASE(test_ethash_io_memo_file_match) {
uint64_t full_size;
uint64_t cache_size;
ethash_h256_t seed;
ethash_h256_t hash;
FILE* f;
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
cache_size = 1024;
full_size = 1024 * 32;
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
ethash_full_t full = ethash_full_new_internal(
"./test_ethash_directory/",
seed,
full_size,
light,
NULL
);
BOOST_ASSERT(full);
// let's make sure that the directory was created
BOOST_REQUIRE(fs::is_directory(fs::path("./test_ethash_directory/")));
// delete the full here so that memory is properly unmapped and FILE handler freed
ethash_full_delete(full);
// and check that we have a match when checking again
BOOST_REQUIRE_EQUAL(
ETHASH_IO_MEMO_MATCH,
ethash_io_prepare("./test_ethash_directory/", seed, &f, full_size, false)
);
BOOST_REQUIRE(f);
// cleanup
fclose(f);
ethash_light_delete(light);
fs::remove_all("./test_ethash_directory/");
}
BOOST_AUTO_TEST_CASE(test_ethash_io_memo_file_size_mismatch) {
static const int blockn = 0;
ethash_h256_t seedhash = ethash_get_seedhash(blockn);
FILE *f = NULL;
BOOST_REQUIRE_EQUAL(
ETHASH_IO_MEMO_MISMATCH,
ethash_io_prepare("./test_ethash_directory/", seedhash, &f, 64, false)
);
BOOST_REQUIRE(f);
fclose(f);
// let's make sure that the directory was created
BOOST_REQUIRE(fs::is_directory(fs::path("./test_ethash_directory/")));
// and check that we get the size mismatch detected if we request diffferent size
BOOST_REQUIRE_EQUAL(
ETHASH_IO_MEMO_SIZE_MISMATCH,
ethash_io_prepare("./test_ethash_directory/", seedhash, &f, 65, false)
);
// cleanup
fs::remove_all("./test_ethash_directory/");
}
BOOST_AUTO_TEST_CASE(test_ethash_get_default_dirname) {
char result[256];
// this is really not an easy thing to test for in a unit test
// TODO: Improve this test ...
#ifdef _WIN32
char homedir[256];
BOOST_REQUIRE(SUCCEEDED(SHGetFolderPathA(NULL, CSIDL_PROFILE, NULL, 0, (CHAR*)homedir)));
BOOST_REQUIRE(ethash_get_default_dirname(result, 256));
std::string res = std::string(homedir) + std::string("\\AppData\\Local\\Ethash\\");
#else
char* homedir = getenv("HOME");
BOOST_REQUIRE(ethash_get_default_dirname(result, 256));
std::string res = std::string(homedir) + std::string("/.ethash/");
#endif
BOOST_CHECK_MESSAGE(strcmp(res.c_str(), result) == 0,
"Expected \"" + res + "\" but got \"" + std::string(result) + "\""
);
}
BOOST_AUTO_TEST_CASE(light_and_full_client_checks) {
uint64_t full_size;
uint64_t cache_size;
ethash_h256_t seed;
ethash_h256_t hash;
ethash_h256_t difficulty;
ethash_return_value_t light_out;
ethash_return_value_t full_out;
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
// Set the difficulty
ethash_h256_set(&difficulty, 0, 197);
ethash_h256_set(&difficulty, 1, 90);
for (int i = 2; i < 32; i++)
ethash_h256_set(&difficulty, i, 255);
cache_size = 1024;
full_size = 1024 * 32;
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
ethash_full_t full = ethash_full_new_internal(
"./test_ethash_directory/",
seed,
full_size,
light,
NULL
);
BOOST_ASSERT(full);
{
const std::string
expected = "2da2b506f21070e1143d908e867962486d6b0a02e31d468fd5e3a7143aafa76a14201f63374314e2a6aaf84ad2eb57105dea3378378965a1b3873453bb2b78f9a8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995c259440b89fa3481c2c33171477c305c8e1e421f8d8f6d59585449d0034f3e421808d8da6bbd0b6378f567647cc6c4ba6c434592b198ad444e7284905b7c6adaf70bf43ec2daa7bd5e8951aa609ab472c124cf9eba3d38cff5091dc3f58409edcc386c743c3bd66f92408796ee1e82dd149eaefbf52b00ce33014a6eb3e50625413b072a58bc01da28262f42cbe4f87d4abc2bf287d15618405a1fe4e386fcdafbb171064bd99901d8f81dd6789396ce5e364ac944bbbd75a7827291c70b42d26385910cd53ca535ab29433dd5c5714d26e0dce95514c5ef866329c12e958097e84462197c2b32087849dab33e88b11da61d52f9dbc0b92cc61f742c07dbbf751c49d7678624ee60dfbe62e5e8c47a03d8247643f3d16ad8c8e663953bcda1f59d7e2d4a9bf0768e789432212621967a8f41121ad1df6ae1fa78782530695414c6213942865b2730375019105cae91a4c17a558d4b63059661d9f108362143107babe0b848de412e4da59168cce82bfbff3c99e022dd6ac1e559db991f2e3f7bb910cefd173e65ed00a8d5d416534e2c8416ff23977dbf3eb7180b75c71580d08ce95efeb9b0afe904ea12285a392aff0c8561ff79fca67f694a62b9e52377485c57cc3598d84cac0a9d27960de0cc31ff9bbfe455acaa62c8aa5d2cce96f345da9afe843d258a99c4eaf3650fc62efd81c7b81cd0d534d2d71eeda7a6e315d540b4473c80f8730037dc2ae3e47b986240cfc65ccc565f0d8cde0bc68a57e39a271dda57440b3598bee19f799611d25731a96b5dbbbefdff6f4f656161462633030d62560ea4e9c161cf78fc96a2ca5aaa32453a6c5dea206f766244e8c9d9a8dc61185ce37f1fc804459c5f07434f8ecb34141b8dcae7eae704c950b55556c5f40140c3714b45eddb02637513268778cbf937a33e4e33183685f9deb31ef54e90161e76d969587dd782eaa94e289420e7c2ee908517f5893a26fdb5873d68f92d118d4bcf98d7a4916794d6ab290045e30f9ea00ca547c584b8482b0331ba1539a0f2714fddc3a0b06b0cfbb6a607b8339c39bcfd6640b1f653e9d70ef6c985b",
actual = bytesToHexString((uint8_t const *) light->cache, cache_size);
BOOST_REQUIRE_MESSAGE(expected == actual,
"\nexpected: " << expected.c_str() << "\n"
<< "actual: " << actual.c_str() << "\n");
}
{
node node;
ethash_calculate_dag_item(&node, 0, light);
const std::string
actual = bytesToHexString((uint8_t const *) &node, sizeof(node)),
expected = "b1698f829f90b35455804e5185d78f549fcb1bdce2bee006d4d7e68eb154b596be1427769eb1c3c3e93180c760af75f81d1023da6a0ffbe321c153a7c0103597";
BOOST_REQUIRE_MESSAGE(actual == expected,
"\n" << "expected: " << expected.c_str() << "\n"
<< "actual: " << actual.c_str() << "\n");
}
{
for (int i = 0; i < full_size / sizeof(node); ++i) {
for (uint32_t j = 0; j < 32; ++j) {
node expected_node;
ethash_calculate_dag_item(&expected_node, j, light);
const std::string
actual = bytesToHexString((uint8_t const *) &(full->data[j]), sizeof(node)),
expected = bytesToHexString((uint8_t const *) &expected_node, sizeof(node));
BOOST_REQUIRE_MESSAGE(actual == expected,
"\ni: " << j << "\n"
<< "expected: " << expected.c_str() << "\n"
<< "actual: " << actual.c_str() << "\n");
}
}
}
{
uint64_t nonce = 0x7c7c597c;
full_out = ethash_full_compute(full, hash, nonce);
BOOST_REQUIRE(full_out.success);
light_out = ethash_light_compute_internal(light, full_size, hash, nonce);
BOOST_REQUIRE(light_out.success);
const std::string
light_result_string = blockhashToHexString(&light_out.result),
full_result_string = blockhashToHexString(&full_out.result);
BOOST_REQUIRE_MESSAGE(light_result_string == full_result_string,
"\nlight result: " << light_result_string.c_str() << "\n"
<< "full result: " << full_result_string.c_str() << "\n");
const std::string
light_mix_hash_string = blockhashToHexString(&light_out.mix_hash),
full_mix_hash_string = blockhashToHexString(&full_out.mix_hash);
BOOST_REQUIRE_MESSAGE(full_mix_hash_string == light_mix_hash_string,
"\nlight mix hash: " << light_mix_hash_string.c_str() << "\n"
<< "full mix hash: " << full_mix_hash_string.c_str() << "\n");
ethash_h256_t check_hash;
ethash_quick_hash(&check_hash, &hash, nonce, &full_out.mix_hash);
const std::string check_hash_string = blockhashToHexString(&check_hash);
BOOST_REQUIRE_MESSAGE(check_hash_string == full_result_string,
"\ncheck hash string: " << check_hash_string.c_str() << "\n"
<< "full result: " << full_result_string.c_str() << "\n");
}
{
full_out = ethash_full_compute(full, hash, 5);
BOOST_REQUIRE(full_out.success);
std::string
light_result_string = blockhashToHexString(&light_out.result),
full_result_string = blockhashToHexString(&full_out.result);
BOOST_REQUIRE_MESSAGE(light_result_string != full_result_string,
"\nlight result and full result should differ: " << light_result_string.c_str() << "\n");
light_out = ethash_light_compute_internal(light, full_size, hash, 5);
BOOST_REQUIRE(light_out.success);
light_result_string = blockhashToHexString(&light_out.result);
BOOST_REQUIRE_MESSAGE(light_result_string == full_result_string,
"\nlight result and full result should be the same\n"
<< "light result: " << light_result_string.c_str() << "\n"
<< "full result: " << full_result_string.c_str() << "\n");
std::string
light_mix_hash_string = blockhashToHexString(&light_out.mix_hash),
full_mix_hash_string = blockhashToHexString(&full_out.mix_hash);
BOOST_REQUIRE_MESSAGE(full_mix_hash_string == light_mix_hash_string,
"\nlight mix hash: " << light_mix_hash_string.c_str() << "\n"
<< "full mix hash: " << full_mix_hash_string.c_str() << "\n");
BOOST_REQUIRE_MESSAGE(ethash_check_difficulty(&full_out.result, &difficulty),
"ethash_check_difficulty failed"
);
BOOST_REQUIRE_MESSAGE(ethash_quick_check_difficulty(&hash, 5U, &full_out.mix_hash, &difficulty),
"ethash_quick_check_difficulty failed"
);
}
ethash_light_delete(light);
ethash_full_delete(full);
fs::remove_all("./test_ethash_directory/");
}
BOOST_AUTO_TEST_CASE(ethash_full_new_when_dag_exists_with_wrong_size) {
uint64_t full_size;
uint64_t cache_size;
ethash_h256_t seed;
ethash_h256_t hash;
ethash_return_value_t full_out;
ethash_return_value_t light_out;
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
cache_size = 1024;
full_size = 1024 * 32;
// first make a DAG file of "wrong size"
FILE *f;
BOOST_REQUIRE_EQUAL(
ETHASH_IO_MEMO_MISMATCH,
ethash_io_prepare("./test_ethash_directory/", seed, &f, 64, false)
);
fclose(f);
// then create new DAG, which should detect the wrong size and force create a new file
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
BOOST_ASSERT(light);
ethash_full_t full = ethash_full_new_internal(
"./test_ethash_directory/",
seed,
full_size,
light,
NULL
);
BOOST_ASSERT(full);
{
uint64_t nonce = 0x7c7c597c;
full_out = ethash_full_compute(full, hash, nonce);
BOOST_REQUIRE(full_out.success);
light_out = ethash_light_compute_internal(light, full_size, hash, nonce);
BOOST_REQUIRE(light_out.success);
const std::string
light_result_string = blockhashToHexString(&light_out.result),
full_result_string = blockhashToHexString(&full_out.result);
BOOST_REQUIRE_MESSAGE(light_result_string == full_result_string,
"\nlight result: " << light_result_string.c_str() << "\n"
<< "full result: " << full_result_string.c_str() << "\n");
const std::string
light_mix_hash_string = blockhashToHexString(&light_out.mix_hash),
full_mix_hash_string = blockhashToHexString(&full_out.mix_hash);
BOOST_REQUIRE_MESSAGE(full_mix_hash_string == light_mix_hash_string,
"\nlight mix hash: " << light_mix_hash_string.c_str() << "\n"
<< "full mix hash: " << full_mix_hash_string.c_str() << "\n");
ethash_h256_t check_hash;
ethash_quick_hash(&check_hash, &hash, nonce, &full_out.mix_hash);
const std::string check_hash_string = blockhashToHexString(&check_hash);
BOOST_REQUIRE_MESSAGE(check_hash_string == full_result_string,
"\ncheck hash string: " << check_hash_string.c_str() << "\n"
<< "full result: " << full_result_string.c_str() << "\n");
}
ethash_light_delete(light);
ethash_full_delete(full);
fs::remove_all("./test_ethash_directory/");
}
static bool g_executed = false;
static unsigned g_prev_progress = 0;
static int test_full_callback(unsigned _progress)
{
g_executed = true;
BOOST_CHECK(_progress >= g_prev_progress);
g_prev_progress = _progress;
return 0;
}
static int test_full_callback_that_fails(unsigned _progress)
{
return 1;
}
static int test_full_callback_create_incomplete_dag(unsigned _progress)
{
if (_progress >= 30) {
return 1;
}
return 0;
}
BOOST_AUTO_TEST_CASE(full_client_callback) {
uint64_t full_size;
uint64_t cache_size;
ethash_h256_t seed;
ethash_h256_t hash;
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
cache_size = 1024;
full_size = 1024 * 32;
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
ethash_full_t full = ethash_full_new_internal(
"./test_ethash_directory/",
seed,
full_size,
light,
test_full_callback
);
BOOST_ASSERT(full);
BOOST_CHECK(g_executed);
BOOST_REQUIRE_EQUAL(g_prev_progress, 100);
ethash_full_delete(full);
ethash_light_delete(light);
fs::remove_all("./test_ethash_directory/");
}
BOOST_AUTO_TEST_CASE(failing_full_client_callback) {
uint64_t full_size;
uint64_t cache_size;
ethash_h256_t seed;
ethash_h256_t hash;
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
cache_size = 1024;
full_size = 1024 * 32;
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
ethash_full_t full = ethash_full_new_internal(
"./test_ethash_directory/",
seed,
full_size,
light,
test_full_callback_that_fails
);
BOOST_ASSERT(!full);
ethash_light_delete(light);
fs::remove_all("./test_ethash_directory/");
}
BOOST_AUTO_TEST_CASE(test_incomplete_dag_file) {
uint64_t full_size;
uint64_t cache_size;
ethash_h256_t seed;
ethash_h256_t hash;
memcpy(&seed, "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
memcpy(&hash, "~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~~~", 32);
cache_size = 1024;
full_size = 1024 * 32;
ethash_light_t light = ethash_light_new_internal(cache_size, &seed);
// create a full but stop at 30%, so no magic number is written
ethash_full_t full = ethash_full_new_internal(
"./test_ethash_directory/",
seed,
full_size,
light,
test_full_callback_create_incomplete_dag
);
BOOST_ASSERT(!full);
FILE *f = NULL;
// confirm that we get a size_mismatch because the magic number is missing
BOOST_REQUIRE_EQUAL(
ETHASH_IO_MEMO_SIZE_MISMATCH,
ethash_io_prepare("./test_ethash_directory/", seed, &f, full_size, false)
);
ethash_light_delete(light);
fs::remove_all("./test_ethash_directory/");
}
BOOST_AUTO_TEST_CASE(test_block22_verification) {
// from POC-9 testnet, epoch 0
ethash_light_t light = ethash_light_new(22);
ethash_h256_t seedhash = stringToBlockhash("372eca2454ead349c3df0ab5d00b0b706b23e49d469387db91811cee0358fc6d");
BOOST_ASSERT(light);
ethash_return_value_t ret = ethash_light_compute(
light,
seedhash,
0x495732e0ed7a801cU
);
BOOST_REQUIRE_EQUAL(blockhashToHexString(&ret.result), "00000b184f1fdd88bfd94c86c39e65db0c36144d5e43f745f722196e730cb614");
ethash_h256_t difficulty = ethash_h256_static_init(0x2, 0x5, 0x40);
BOOST_REQUIRE(ethash_check_difficulty(&ret.result, &difficulty));
ethash_light_delete(light);
}
BOOST_AUTO_TEST_CASE(test_block30001_verification) {
// from POC-9 testnet, epoch 1
ethash_light_t light = ethash_light_new(30001);
ethash_h256_t seedhash = stringToBlockhash("7e44356ee3441623bc72a683fd3708fdf75e971bbe294f33e539eedad4b92b34");
BOOST_ASSERT(light);
ethash_return_value_t ret = ethash_light_compute(
light,
seedhash,
0x318df1c8adef7e5eU
);
ethash_h256_t difficulty = ethash_h256_static_init(0x17, 0x62, 0xff);
BOOST_REQUIRE(ethash_check_difficulty(&ret.result, &difficulty));
ethash_light_delete(light);
}
BOOST_AUTO_TEST_CASE(test_block60000_verification) {
// from POC-9 testnet, epoch 2
ethash_light_t light = ethash_light_new(60000);
ethash_h256_t seedhash = stringToBlockhash("5fc898f16035bf5ac9c6d9077ae1e3d5fc1ecc3c9fd5bee8bb00e810fdacbaa0");
BOOST_ASSERT(light);
ethash_return_value_t ret = ethash_light_compute(
light,
seedhash,
0x50377003e5d830caU
);
ethash_h256_t difficulty = ethash_h256_static_init(0x25, 0xa6, 0x1e);
BOOST_REQUIRE(ethash_check_difficulty(&ret.result, &difficulty));
ethash_light_delete(light);
}
// Test of Full DAG creation with the minimal ethash.h API.
// Commented out since travis tests would take too much time.
// Uncomment and run on your own machine if you want to confirm
// it works fine.
#if 0
static int progress_cb(unsigned _progress)
{
printf("CREATING DAG. PROGRESS: %u\n", _progress);
fflush(stdout);
return 0;
}
BOOST_AUTO_TEST_CASE(full_dag_test) {
ethash_light_t light = ethash_light_new(55);
BOOST_ASSERT(light);
ethash_full_t full = ethash_full_new(light, progress_cb);
BOOST_ASSERT(full);
ethash_light_delete(light);
ethash_full_delete(full);
}
#endif

View File

@ -1,32 +0,0 @@
#!/bin/bash
# Strict mode
set -e
VALGRIND_ARGS="--tool=memcheck"
VALGRIND_ARGS+=" --leak-check=yes"
VALGRIND_ARGS+=" --track-origins=yes"
VALGRIND_ARGS+=" --show-reachable=yes"
VALGRIND_ARGS+=" --num-callers=20"
VALGRIND_ARGS+=" --track-fds=yes"
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
TEST_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
rm -rf $TEST_DIR/build
mkdir -p $TEST_DIR/build
cd $TEST_DIR/build ;
cmake ../../.. > /dev/null
make Test
./test/c/Test
# If we have valgrind also run memory check tests
if hash valgrind 2>/dev/null; then
echo "======== Running tests under valgrind ========";
cd $TEST_DIR/build/ && valgrind $VALGRIND_ARGS ./test/c/Test
fi

View File

@ -1 +0,0 @@
python-virtual-env/

View File

@ -1,3 +0,0 @@
pyethereum==0.7.522
nose==1.3.4
pysha3==0.3

View File

@ -1,30 +0,0 @@
#!/bin/bash
# Strict mode
set -e
if [ -x "$(which virtualenv2)" ] ; then
VIRTUALENV_EXEC=virtualenv2
elif [ -x "$(which virtualenv)" ] ; then
VIRTUALENV_EXEC=virtualenv
else
echo "Could not find a suitable version of virtualenv"
false
fi
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
TEST_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
[ -d $TEST_DIR/python-virtual-env ] || $VIRTUALENV_EXEC --system-site-packages $TEST_DIR/python-virtual-env
source $TEST_DIR/python-virtual-env/bin/activate
pip install -r $TEST_DIR/requirements.txt > /dev/null
# force installation of nose in virtualenv even if existing in thereuser's system
pip install nose -I
pip install --upgrade --no-deps --force-reinstall -e $TEST_DIR/../..
cd $TEST_DIR
nosetests --with-doctest -v --nocapture

View File

@ -1,105 +0,0 @@
import pyethash
from random import randint
def test_get_cache_size_not_None():
for _ in range(100):
block_num = randint(0,12456789)
out = pyethash.get_cache_size(block_num)
assert out != None
def test_get_full_size_not_None():
for _ in range(100):
block_num = randint(0,12456789)
out = pyethash.get_full_size(block_num)
assert out != None
def test_get_cache_size_based_on_EPOCH():
for _ in range(100):
block_num = randint(0,12456789)
out1 = pyethash.get_cache_size(block_num)
out2 = pyethash.get_cache_size((block_num // pyethash.EPOCH_LENGTH) * pyethash.EPOCH_LENGTH)
assert out1 == out2
def test_get_full_size_based_on_EPOCH():
for _ in range(100):
block_num = randint(0,12456789)
out1 = pyethash.get_full_size(block_num)
out2 = pyethash.get_full_size((block_num // pyethash.EPOCH_LENGTH) * pyethash.EPOCH_LENGTH)
assert out1 == out2
# See light_and_full_client_checks in test.cpp
def test_mkcache_is_as_expected():
actual = pyethash.mkcache_bytes(
1024,
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~").encode('hex')
expected = "2da2b506f21070e1143d908e867962486d6b0a02e31d468fd5e3a7143aafa76a14201f63374314e2a6aaf84ad2eb57105dea3378378965a1b3873453bb2b78f9a8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995ca8620b2ebeca41fbc773bb837b5e724d6eb2de570d99858df0d7d97067fb8103b21757873b735097b35d3bea8fd1c359a9e8a63c1540c76c9784cf8d975e995c259440b89fa3481c2c33171477c305c8e1e421f8d8f6d59585449d0034f3e421808d8da6bbd0b6378f567647cc6c4ba6c434592b198ad444e7284905b7c6adaf70bf43ec2daa7bd5e8951aa609ab472c124cf9eba3d38cff5091dc3f58409edcc386c743c3bd66f92408796ee1e82dd149eaefbf52b00ce33014a6eb3e50625413b072a58bc01da28262f42cbe4f87d4abc2bf287d15618405a1fe4e386fcdafbb171064bd99901d8f81dd6789396ce5e364ac944bbbd75a7827291c70b42d26385910cd53ca535ab29433dd5c5714d26e0dce95514c5ef866329c12e958097e84462197c2b32087849dab33e88b11da61d52f9dbc0b92cc61f742c07dbbf751c49d7678624ee60dfbe62e5e8c47a03d8247643f3d16ad8c8e663953bcda1f59d7e2d4a9bf0768e789432212621967a8f41121ad1df6ae1fa78782530695414c6213942865b2730375019105cae91a4c17a558d4b63059661d9f108362143107babe0b848de412e4da59168cce82bfbff3c99e022dd6ac1e559db991f2e3f7bb910cefd173e65ed00a8d5d416534e2c8416ff23977dbf3eb7180b75c71580d08ce95efeb9b0afe904ea12285a392aff0c8561ff79fca67f694a62b9e52377485c57cc3598d84cac0a9d27960de0cc31ff9bbfe455acaa62c8aa5d2cce96f345da9afe843d258a99c4eaf3650fc62efd81c7b81cd0d534d2d71eeda7a6e315d540b4473c80f8730037dc2ae3e47b986240cfc65ccc565f0d8cde0bc68a57e39a271dda57440b3598bee19f799611d25731a96b5dbbbefdff6f4f656161462633030d62560ea4e9c161cf78fc96a2ca5aaa32453a6c5dea206f766244e8c9d9a8dc61185ce37f1fc804459c5f07434f8ecb34141b8dcae7eae704c950b55556c5f40140c3714b45eddb02637513268778cbf937a33e4e33183685f9deb31ef54e90161e76d969587dd782eaa94e289420e7c2ee908517f5893a26fdb5873d68f92d118d4bcf98d7a4916794d6ab290045e30f9ea00ca547c584b8482b0331ba1539a0f2714fddc3a0b06b0cfbb6a607b8339c39bcfd6640b1f653e9d70ef6c985b"
assert actual == expected
def test_calc_dataset_is_not_None():
cache = pyethash.mkcache_bytes(
1024,
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
assert pyethash.calc_dataset_bytes(1024 * 32, cache) != None
def test_light_and_full_agree():
cache = pyethash.mkcache_bytes(
1024,
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
full_size = 1024 * 32
header = "~~~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~"
light_result = pyethash.hashimoto_light(full_size, cache, header, 0)
dataset = pyethash.calc_dataset_bytes(full_size, cache)
full_result = pyethash.hashimoto_full(dataset, header, 0)
assert light_result["mix digest"] != None
assert len(light_result["mix digest"]) == 32
assert light_result["mix digest"] == full_result["mix digest"]
assert light_result["result"] != None
assert len(light_result["result"]) == 32
assert light_result["result"] == full_result["result"]
def int_to_bytes(i):
b = []
for _ in range(32):
b.append(chr(i & 0xff))
i >>= 8
b.reverse()
return "".join(b)
def test_mining_basic():
easy_difficulty = int_to_bytes(2**256 - 1)
assert easy_difficulty.encode('hex') == 'f' * 64
cache = pyethash.mkcache_bytes(
1024,
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
full_size = 1024 * 32
header = "~~~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~"
dataset = pyethash.calc_dataset_bytes(full_size, cache)
# Check type of outputs
assert type(pyethash.mine(dataset,header,easy_difficulty)) == dict
assert type(pyethash.mine(dataset,header,easy_difficulty)["nonce"]) == long
assert type(pyethash.mine(dataset,header,easy_difficulty)["mix digest"]) == str
assert type(pyethash.mine(dataset,header,easy_difficulty)["result"]) == str
def test_mining_doesnt_always_return_the_same_value():
easy_difficulty1 = int_to_bytes(int(2**256 * 0.999))
# 1 in 1000 difficulty
easy_difficulty2 = int_to_bytes(int(2**256 * 0.001))
assert easy_difficulty1 != easy_difficulty2
cache = pyethash.mkcache_bytes(
1024,
"~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
full_size = 1024 * 32
header = "~~~~~X~~~~~~~~~~~~~~~~~~~~~~~~~~"
dataset = pyethash.calc_dataset_bytes(full_size, cache)
# Check type of outputs
assert pyethash.mine(dataset, header, easy_difficulty1)['nonce'] != pyethash.mine(dataset, header, easy_difficulty2)['nonce']
def test_get_seedhash():
assert pyethash.get_seedhash(0).encode('hex') == '0' * 64
import hashlib, sha3
expected = pyethash.get_seedhash(0)
#print "checking seed hashes:",
for i in range(0, 30000*2048, 30000):
#print i // 30000,
assert pyethash.get_seedhash(i) == expected
expected = hashlib.sha3_256(expected).digest()

View File

@ -1,32 +0,0 @@
#!/bin/bash
# Strict mode
set -e
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE"
done
TEST_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
echo -e "\n################# Testing JS ##################"
# TODO: Use mocha and real testing tools instead of rolling our own
cd $TEST_DIR/../js
if [ -x "$(which nodejs)" ] ; then
nodejs test.js
fi
if [ -x "$(which node)" ] ; then
node test.js
fi
echo -e "\n################# Testing C ##################"
$TEST_DIR/c/test.sh
# Temporarily commenting out python tests until they conform to the API
#echo -e "\n################# Testing Python ##################"
#$TEST_DIR/python/test.sh
echo "################# Testing Go ##################"
cd $TEST_DIR/.. && go test -timeout 9999s

View File

@ -74,6 +74,7 @@ type Config struct {
DocRoot string
AutoDAG bool
PowTest bool
PowShared bool
ExtraData []byte
AccountManager *accounts.Manager
@ -211,14 +212,18 @@ func New(ctx *node.ServiceContext, config *Config) (*Ethereum, error) {
GpobaseCorrectionFactor: config.GpobaseCorrectionFactor,
httpclient: httpclient.New(config.DocRoot),
}
if config.PowTest {
switch {
case config.PowTest:
glog.V(logger.Info).Infof("ethash used in test mode")
eth.pow, err = ethash.NewForTesting()
if err != nil {
return nil, err
}
} else {
case config.PowShared:
glog.V(logger.Info).Infof("ethash used in shared mode")
eth.pow = ethash.NewShared()
default:
eth.pow = ethash.New()
}
//genesis := core.GenesisBlock(uint64(config.GenesisNonce), stateDb)

View File

@ -178,6 +178,7 @@ func runBlockTest(test *BlockTest) error {
TestGenesisBlock: test.Genesis,
Etherbase: common.Address{},
AccountManager: am,
PowShared: true,
}
ethereum, err := eth.New(&node.ServiceContext{EventMux: new(event.TypeMux)}, cfg)
if err != nil {