diff --git a/ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/mainnet/EthHasher.java b/ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/mainnet/EthHasher.java index 9b92227985..d1c1dd1fee 100644 --- a/ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/mainnet/EthHasher.java +++ b/ethereum/core/src/main/java/tech/pegasys/pantheon/ethereum/mainnet/EthHasher.java @@ -12,16 +12,6 @@ */ package tech.pegasys.pantheon.ethereum.mainnet; -import java.io.Closeable; -import java.io.IOException; -import java.io.RandomAccessFile; -import java.nio.file.Path; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import com.google.common.primitives.Ints; - public interface EthHasher { /** @@ -45,135 +35,4 @@ public void hash( System.arraycopy(hash, 0, buffer, 0, hash.length); } } - - final class Full implements EthHasher, Closeable { - - private static final int HASHERS = Runtime.getRuntime().availableProcessors(); - - private long epoch = -1L; - - private long datasetSize; - - private final RandomAccessFile cacheFile; - - private final ExecutorService hashers = Executors.newFixedThreadPool(HASHERS); - - public Full(final Path cacheFile) throws IOException { - this.cacheFile = new RandomAccessFile(cacheFile.toFile(), "rw"); - datasetSize = this.cacheFile.length(); - } - - @Override - public void hash( - final byte[] buffer, final long nonce, final long number, final byte[] headerHash) { - final long newEpoch = EthHash.epoch(number); - if (epoch != newEpoch) { - updateCache(number, newEpoch); - } - final byte[] hash = - EthHash.hashimoto( - headerHash, - datasetSize, - nonce, - (bytes, offset) -> { - try { - cacheFile.seek(((long) offset) * EthHash.HASH_BYTES); - cacheFile.readFully(bytes); - } catch (final IOException ex) { - throw new IllegalStateException(ex); - } - }); - System.arraycopy(hash, 0, buffer, 0, hash.length); - } - - private void updateCache(final long number, final long newEpoch) { - final int[] cache = EthHash.mkCache(Ints.checkedCast(EthHash.cacheSize(newEpoch)), number); - epoch = newEpoch; - final long newDatasetSize = EthHash.datasetSize(epoch); - if (newDatasetSize != datasetSize) { - datasetSize = newDatasetSize; - final CountDownLatch doneLatch = new CountDownLatch(HASHERS); - final int upperBound = Ints.checkedCast(datasetSize / EthHash.HASH_BYTES); - final int partitionSize = upperBound / HASHERS; - for (int partition = 0; partition < HASHERS; ++partition) { - hashers.execute( - new EthHasher.Full.HasherTask( - partition * partitionSize, - partition == HASHERS - 1 ? upperBound : (partition + 1) * partitionSize, - cache, - doneLatch, - cacheFile)); - } - try { - doneLatch.await(); - } catch (final InterruptedException ex) { - throw new IllegalStateException(ex); - } - } - } - - @Override - public void close() throws IOException { - cacheFile.close(); - hashers.shutdownNow(); - } - - private static final class HasherTask implements Runnable { - - private static final int DISK_BATCH_SIZE = 256; - - private final int start; - private final int end; - private final int[] cache; - private final CountDownLatch doneLatch; - private final RandomAccessFile cacheFile; - - HasherTask( - final int start, - final int upperBound, - final int[] cache, - final CountDownLatch doneLatch, - final RandomAccessFile cacheFile) { - this.end = upperBound; - this.cache = cache; - this.start = start; - this.doneLatch = doneLatch; - this.cacheFile = cacheFile; - } - - @Override - public void run() { - try { - final byte[] itemBuffer = new byte[EthHash.HASH_BYTES]; - final byte[] writeBuffer = new byte[EthHash.HASH_BYTES * DISK_BATCH_SIZE]; - int buffered = 0; - long lastOffset = 0; - for (int i = start; i < end; ++i) { - EthHash.calcDatasetItem(itemBuffer, cache, i); - System.arraycopy( - itemBuffer, 0, writeBuffer, buffered * EthHash.HASH_BYTES, EthHash.HASH_BYTES); - ++buffered; - if (buffered == DISK_BATCH_SIZE) { - synchronized (cacheFile) { - lastOffset = - (long) ((i - DISK_BATCH_SIZE + 1) * EthHash.HASH_BYTES) + writeBuffer.length; - cacheFile.seek(lastOffset - writeBuffer.length); - cacheFile.write(writeBuffer); - } - buffered = 0; - } - } - if (buffered > 0) { - synchronized (cacheFile) { - cacheFile.seek(lastOffset); - cacheFile.write(writeBuffer, 0, buffered * EthHash.HASH_BYTES); - } - } - doneLatch.countDown(); - } catch (final IOException ex) { - throw new IllegalStateException(ex); - } - } - } - } } diff --git a/ethereum/core/src/test/java/tech/pegasys/pantheon/ethereum/mainnet/EthHasherTest.java b/ethereum/core/src/test/java/tech/pegasys/pantheon/ethereum/mainnet/EthHasherTest.java deleted file mode 100644 index 906afe0c85..0000000000 --- a/ethereum/core/src/test/java/tech/pegasys/pantheon/ethereum/mainnet/EthHasherTest.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright 2018 ConsenSys AG. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package tech.pegasys.pantheon.ethereum.mainnet; - -import tech.pegasys.pantheon.ethereum.core.BlockHeader; -import tech.pegasys.pantheon.ethereum.rlp.BytesValueRLPInput; -import tech.pegasys.pantheon.ethereum.rlp.RLPInput; -import tech.pegasys.pantheon.ethereum.util.ByteArrayUtil; -import tech.pegasys.pantheon.util.bytes.BytesValue; - -import com.google.common.io.Resources; -import org.assertj.core.api.Assertions; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.TemporaryFolder; - -/** Tests for {@link EthHasher}. */ -public final class EthHasherTest { - - @Rule public final TemporaryFolder folder = new TemporaryFolder(); - - // TODO: Find a faster way to test HashimotoFull, this test takes almost 2 minutes. - @Test - @Ignore - public void hashimotoFull() throws Exception { - try (final EthHasher.Full hasher = new EthHasher.Full(folder.newFile().toPath())) { - final RLPInput input = - new BytesValueRLPInput( - BytesValue.wrap( - Resources.toByteArray(EthHashTest.class.getResource("block_300005.blocks"))), - false); - input.enterList(); - final BlockHeader header = BlockHeader.readFrom(input, MainnetBlockHashFunction::createHash); - final byte[] buffer = new byte[64]; - hasher.hash(buffer, header.getNonce(), header.getNumber(), EthHash.hashHeader(header)); - Assertions.assertThat( - ByteArrayUtil.compare(buffer, 0, 32, header.getMixHash().extractArray(), 0, 32)) - .isEqualTo(0); - } - } -}