Repository: cassandra
Updated Branches:
  refs/heads/trunk ec7d5f9da -> 30ed83d92


Replace usages of MessageDigest with Guava's Hasher

patch by mkjellman; reviewed by jasobrown and Sam Tunnicliffe for 
CASSANDRA-13291


Project: http://git-wip-us.apache.org/repos/asf/cassandra/repo
Commit: http://git-wip-us.apache.org/repos/asf/cassandra/commit/30ed83d9
Tree: http://git-wip-us.apache.org/repos/asf/cassandra/tree/30ed83d9
Diff: http://git-wip-us.apache.org/repos/asf/cassandra/diff/30ed83d9

Branch: refs/heads/trunk
Commit: 30ed83d9266a03debad98ffac5610dcb3ae30934
Parents: ec7d5f9
Author: Michael Kjellman <kjell...@apple.com>
Authored: Tue Sep 26 14:41:03 2017 -0700
Committer: Jason Brown <jasedbr...@gmail.com>
Committed: Tue Oct 24 11:35:29 2017 -0700

----------------------------------------------------------------------
 CHANGES.txt                                     |   1 +
 .../org/apache/cassandra/cql3/ResultSet.java    |  11 +-
 .../cassandra/db/AbstractClusteringPrefix.java  |  11 +-
 .../apache/cassandra/db/ClusteringPrefix.java   |   9 +-
 src/java/org/apache/cassandra/db/Columns.java   |   7 +-
 .../org/apache/cassandra/db/DeletionTime.java   |   8 +-
 .../org/apache/cassandra/db/LivenessInfo.java   |  19 ++--
 .../org/apache/cassandra/db/ReadResponse.java   |  10 +-
 .../cassandra/db/context/CounterContext.java    |   8 +-
 .../UnfilteredPartitionIterators.java           |   9 +-
 .../apache/cassandra/db/rows/AbstractCell.java  |  19 ++--
 .../apache/cassandra/db/rows/AbstractRow.java   |  16 +--
 .../org/apache/cassandra/db/rows/CellPath.java  |   8 +-
 .../apache/cassandra/db/rows/ColumnData.java    |   7 +-
 .../cassandra/db/rows/ComplexColumnData.java    |   8 +-
 .../db/rows/RangeTombstoneBoundMarker.java      |   9 +-
 .../db/rows/RangeTombstoneBoundaryMarker.java   |  11 +-
 src/java/org/apache/cassandra/db/rows/Row.java  |  10 +-
 .../apache/cassandra/db/rows/RowIterators.java  |  19 ++--
 .../apache/cassandra/db/rows/Unfiltered.java    |   8 +-
 .../db/rows/UnfilteredRowIterators.java         |  21 ++--
 .../apache/cassandra/dht/RandomPartitioner.java |  44 +++++++-
 .../apache/cassandra/net/MessagingService.java  |   1 +
 .../org/apache/cassandra/repair/Validator.java  | 102 +++++++++++++----
 .../cassandra/schema/SchemaConstants.java       |  18 ++-
 .../apache/cassandra/schema/SchemaKeyspace.java |  19 +---
 .../org/apache/cassandra/utils/FBUtilities.java |  86 ---------------
 .../apache/cassandra/utils/GuidGenerator.java   |   2 +-
 .../apache/cassandra/utils/HashingUtils.java    | 109 ++++++++++++++++++
 .../org/apache/cassandra/utils/MD5Digest.java   |  35 +++++-
 .../org/apache/cassandra/utils/UUIDGen.java     |  43 +++-----
 .../cassandra/test/microbench/HashingBench.java | 110 +++++++++++++++++++
 .../cassandra/cache/CacheProviderTest.java      |  22 ++--
 .../apache/cassandra/db/CounterCellTest.java    |  14 +--
 .../org/apache/cassandra/db/PartitionTest.java  |  35 +++---
 .../cassandra/utils/HashingUtilsTest.java       |  92 ++++++++++++++++
 36 files changed, 652 insertions(+), 309 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/CHANGES.txt
----------------------------------------------------------------------
diff --git a/CHANGES.txt b/CHANGES.txt
index 46c7614..157cbff 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -1,4 +1,5 @@
 4.0
+ * Replace usages of MessageDigest with Guava's Hasher (CASSANDRA-13291)
  * Add nodetool cmd to print hinted handoff window (CASSANDRA-13728)
  * Fix some alerts raised by static analysis (CASSANDRA-13799)
  * Checksum sstable metadata (CASSANDRA-13321, CASSANDRA-13593)

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/cql3/ResultSet.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/cql3/ResultSet.java 
b/src/java/org/apache/cassandra/cql3/ResultSet.java
index e4b03ca..7dd51fd 100644
--- a/src/java/org/apache/cassandra/cql3/ResultSet.java
+++ b/src/java/org/apache/cassandra/cql3/ResultSet.java
@@ -29,7 +29,6 @@ import java.util.List;
 import java.util.Objects;
 
 import io.netty.buffer.ByteBuf;
-import org.apache.cassandra.cql3.statements.ModificationStatement;
 import org.apache.cassandra.cql3.statements.ParsedStatement;
 import org.apache.cassandra.cql3.statements.SelectStatement;
 import org.apache.cassandra.db.marshal.AbstractType;
@@ -39,7 +38,6 @@ import org.apache.cassandra.transport.CBUtil;
 import org.apache.cassandra.transport.DataType;
 import org.apache.cassandra.transport.ProtocolVersion;
 import org.apache.cassandra.utils.ByteBufferUtil;
-import org.apache.cassandra.utils.FBUtilities;
 import org.apache.cassandra.utils.MD5Digest;
 
 public class ResultSet
@@ -697,9 +695,14 @@ public class ResultSet
         }
     }
 
-    public static MD5Digest computeResultMetadataId(List<ColumnSpecification> 
columnSpecifications)
+    static MD5Digest computeResultMetadataId(List<ColumnSpecification> 
columnSpecifications)
     {
-        MessageDigest md = FBUtilities.threadLocalMD5Digest();
+        // still using the MD5 MessageDigest thread local here instead of a 
HashingUtils/Guava
+        // Hasher, as ResultSet will need to be changed alongside other usages 
of MD5
+        // in the native transport/protocol and it seems to make more sense to 
do that
+        // then than as part of the Guava Hasher refactor which is focused on 
non-client
+        // protocol digests
+        MessageDigest md = MD5Digest.threadLocalMD5Digest();
 
         if (columnSpecifications != null)
         {

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/AbstractClusteringPrefix.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/AbstractClusteringPrefix.java 
b/src/java/org/apache/cassandra/db/AbstractClusteringPrefix.java
index 0b1daf7..884a091 100644
--- a/src/java/org/apache/cassandra/db/AbstractClusteringPrefix.java
+++ b/src/java/org/apache/cassandra/db/AbstractClusteringPrefix.java
@@ -18,10 +18,11 @@
 package org.apache.cassandra.db;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.Objects;
 
-import org.apache.cassandra.utils.FBUtilities;
+import com.google.common.hash.Hasher;
+
+import org.apache.cassandra.utils.HashingUtils;
 
 public abstract class AbstractClusteringPrefix implements ClusteringPrefix
 {
@@ -41,15 +42,15 @@ public abstract class AbstractClusteringPrefix implements 
ClusteringPrefix
         return size;
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
         for (int i = 0; i < size(); i++)
         {
             ByteBuffer bb = get(i);
             if (bb != null)
-                digest.update(bb.duplicate());
+                HashingUtils.updateBytes(hasher, bb.duplicate());
         }
-        FBUtilities.updateWithByte(digest, kind().ordinal());
+        HashingUtils.updateWithByte(hasher, kind().ordinal());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/ClusteringPrefix.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/ClusteringPrefix.java 
b/src/java/org/apache/cassandra/db/ClusteringPrefix.java
index fd34f24..4355aa4 100644
--- a/src/java/org/apache/cassandra/db/ClusteringPrefix.java
+++ b/src/java/org/apache/cassandra/db/ClusteringPrefix.java
@@ -19,9 +19,10 @@ package org.apache.cassandra.db;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.*;
 
+import com.google.common.hash.Hasher;
+
 import org.apache.cassandra.cache.IMeasurableMemory;
 import org.apache.cassandra.config.*;
 import org.apache.cassandra.db.marshal.CompositeType;
@@ -217,11 +218,11 @@ public interface ClusteringPrefix extends 
IMeasurableMemory, Clusterable
     public ByteBuffer get(int i);
 
     /**
-     * Adds the data of this clustering prefix to the provided digest.
+     * Adds the data of this clustering prefix to the provided Hasher instance.
      *
-     * @param digest the digest to which to add this prefix.
+     * @param hasher the Hasher instance to which to add this prefix.
      */
-    public void digest(MessageDigest digest);
+    public void digest(Hasher hasher);
 
     /**
      * The size of the data hold by this prefix.

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/Columns.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/Columns.java 
b/src/java/org/apache/cassandra/db/Columns.java
index f38856f..00cdf4a 100644
--- a/src/java/org/apache/cassandra/db/Columns.java
+++ b/src/java/org/apache/cassandra/db/Columns.java
@@ -22,10 +22,10 @@ import java.util.*;
 import java.util.function.Consumer;
 import java.util.function.Predicate;
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Iterators;
+import com.google.common.hash.Hasher;
 
 import net.nicoulaj.compilecommand.annotations.DontInline;
 import org.apache.cassandra.schema.ColumnMetadata;
@@ -36,6 +36,7 @@ import org.apache.cassandra.db.marshal.UTF8Type;
 import org.apache.cassandra.io.util.DataInputPlus;
 import org.apache.cassandra.io.util.DataOutputPlus;
 import org.apache.cassandra.utils.ByteBufferUtil;
+import org.apache.cassandra.utils.HashingUtils;
 import org.apache.cassandra.utils.SearchIterator;
 import org.apache.cassandra.utils.btree.BTree;
 import org.apache.cassandra.utils.btree.BTreeSearchIterator;
@@ -361,10 +362,10 @@ public class Columns extends 
AbstractCollection<ColumnMetadata> implements Colle
         return column -> iter.next(column) != null;
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
         for (ColumnMetadata c : this)
-            digest.update(c.name.bytes.duplicate());
+            HashingUtils.updateBytes(hasher, c.name.bytes.duplicate());
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/DeletionTime.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/DeletionTime.java 
b/src/java/org/apache/cassandra/db/DeletionTime.java
index 652689c..9dcbb07 100644
--- a/src/java/org/apache/cassandra/db/DeletionTime.java
+++ b/src/java/org/apache/cassandra/db/DeletionTime.java
@@ -18,16 +18,16 @@
 package org.apache.cassandra.db;
 
 import java.io.IOException;
-import java.security.MessageDigest;
 
 import com.google.common.base.Objects;
+import com.google.common.hash.Hasher;
 
 import org.apache.cassandra.cache.IMeasurableMemory;
 import org.apache.cassandra.db.rows.Cell;
 import org.apache.cassandra.io.ISerializer;
 import org.apache.cassandra.io.util.DataInputPlus;
 import org.apache.cassandra.io.util.DataOutputPlus;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 import org.apache.cassandra.utils.ObjectSizes;
 
 /**
@@ -80,12 +80,12 @@ public class DeletionTime implements 
Comparable<DeletionTime>, IMeasurableMemory
         return markedForDeleteAt() == Long.MIN_VALUE && localDeletionTime() == 
Integer.MAX_VALUE;
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
         // localDeletionTime is basically a metadata of the deletion time that 
tells us when it's ok to purge it.
         // It's thus intrinsically a local information and shouldn't be part 
of the digest (which exists for
         // cross-nodes comparisons).
-        FBUtilities.updateWithLong(digest, markedForDeleteAt());
+        HashingUtils.updateWithLong(hasher, markedForDeleteAt());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/LivenessInfo.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/LivenessInfo.java 
b/src/java/org/apache/cassandra/db/LivenessInfo.java
index b581f78..97b8b3f 100644
--- a/src/java/org/apache/cassandra/db/LivenessInfo.java
+++ b/src/java/org/apache/cassandra/db/LivenessInfo.java
@@ -18,10 +18,11 @@
 package org.apache.cassandra.db;
 
 import java.util.Objects;
-import java.security.MessageDigest;
+
+import com.google.common.hash.Hasher;
 
 import org.apache.cassandra.serializers.MarshalException;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 
 /**
  * Stores the information relating to the liveness of the primary key columns 
of a row.
@@ -141,11 +142,11 @@ public class LivenessInfo
     /**
      * Adds this liveness information to the provided digest.
      *
-     * @param digest the digest to add this liveness information to.
+     * @param hasher the hasher digest to add this liveness information to.
      */
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
-        FBUtilities.updateWithLong(digest, timestamp());
+        HashingUtils.updateWithLong(hasher, timestamp());
     }
 
     /**
@@ -268,11 +269,11 @@ public class LivenessInfo
         }
 
         @Override
-        public void digest(MessageDigest digest)
+        public void digest(Hasher hasher)
         {
-            super.digest(digest);
-            FBUtilities.updateWithInt(digest, localExpirationTime);
-            FBUtilities.updateWithInt(digest, ttl);
+            super.digest(hasher);
+            HashingUtils.updateWithInt(hasher, localExpirationTime);
+            HashingUtils.updateWithInt(hasher, ttl);
         }
 
         @Override

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/ReadResponse.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/ReadResponse.java 
b/src/java/org/apache/cassandra/db/ReadResponse.java
index 48cd811..2c06f65 100644
--- a/src/java/org/apache/cassandra/db/ReadResponse.java
+++ b/src/java/org/apache/cassandra/db/ReadResponse.java
@@ -19,9 +19,9 @@ package org.apache.cassandra.db;
 
 import java.io.*;
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.hash.Hasher;
 
 import org.apache.cassandra.db.filter.ColumnFilter;
 import org.apache.cassandra.db.partitions.*;
@@ -33,7 +33,7 @@ import org.apache.cassandra.io.util.DataOutputBuffer;
 import org.apache.cassandra.io.util.DataOutputPlus;
 import org.apache.cassandra.net.MessagingService;
 import org.apache.cassandra.utils.ByteBufferUtil;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 
 public abstract class ReadResponse
 {
@@ -89,9 +89,9 @@ public abstract class ReadResponse
 
     protected static ByteBuffer makeDigest(UnfilteredPartitionIterator 
iterator, ReadCommand command)
     {
-        MessageDigest digest = FBUtilities.threadLocalMD5Digest();
-        UnfilteredPartitionIterators.digest(iterator, digest, 
command.digestVersion());
-        return ByteBuffer.wrap(digest.digest());
+        Hasher hasher = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        UnfilteredPartitionIterators.digest(iterator, hasher, 
command.digestVersion());
+        return ByteBuffer.wrap(hasher.hash().asBytes());
     }
 
     private static class DigestResponse extends ReadResponse

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/context/CounterContext.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/context/CounterContext.java 
b/src/java/org/apache/cassandra/db/context/CounterContext.java
index 29e5cfc..9b34231 100644
--- a/src/java/org/apache/cassandra/db/context/CounterContext.java
+++ b/src/java/org/apache/cassandra/db/context/CounterContext.java
@@ -18,11 +18,11 @@
 package org.apache.cassandra.db.context;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.ArrayList;
 import java.util.List;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.hash.Hasher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -684,17 +684,17 @@ public class CounterContext
     }
 
     /**
-     * Update a MessageDigest with the content of a context.
+     * Update a {@link Hasher} with the content of a context.
      * Note that this skips the header entirely since the header information
      * has local meaning only, while digests are meant for comparison across
      * nodes. This means in particular that we always have:
      *  updateDigest(ctx) == updateDigest(clearAllLocal(ctx))
      */
-    public void updateDigest(MessageDigest message, ByteBuffer context)
+    public void updateDigest(Hasher hasher, ByteBuffer context)
     {
         ByteBuffer dup = context.duplicate();
         dup.position(context.position() + headerLength(context));
-        message.update(dup);
+        HashingUtils.updateBytes(hasher, dup);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/partitions/UnfilteredPartitionIterators.java
----------------------------------------------------------------------
diff --git 
a/src/java/org/apache/cassandra/db/partitions/UnfilteredPartitionIterators.java 
b/src/java/org/apache/cassandra/db/partitions/UnfilteredPartitionIterators.java
index 2d67e21..dff6dae 100644
--- 
a/src/java/org/apache/cassandra/db/partitions/UnfilteredPartitionIterators.java
+++ 
b/src/java/org/apache/cassandra/db/partitions/UnfilteredPartitionIterators.java
@@ -19,9 +19,10 @@ package org.apache.cassandra.db.partitions;
 
 import java.io.IOError;
 import java.io.IOException;
-import java.security.MessageDigest;
 import java.util.*;
 
+import com.google.common.hash.Hasher;
+
 import org.apache.cassandra.db.*;
 import org.apache.cassandra.db.filter.ColumnFilter;
 import org.apache.cassandra.db.rows.*;
@@ -234,10 +235,10 @@ public abstract class UnfilteredPartitionIterators
      * Digests the the provided iterator.
      *
      * @param iterator the iterator to digest.
-     * @param digest the {@code MessageDigest} to use for the digest.
+     * @param hasher the {@link Hasher} to use for the digest.
      * @param version the messaging protocol to use when producing the digest.
      */
-    public static void digest(UnfilteredPartitionIterator iterator, 
MessageDigest digest, int version)
+    public static void digest(UnfilteredPartitionIterator iterator, Hasher 
hasher, int version)
     {
         try (UnfilteredPartitionIterator iter = iterator)
         {
@@ -245,7 +246,7 @@ public abstract class UnfilteredPartitionIterators
             {
                 try (UnfilteredRowIterator partition = iter.next())
                 {
-                    UnfilteredRowIterators.digest(partition, digest, version);
+                    UnfilteredRowIterators.digest(partition, hasher, version);
                 }
             }
         }

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/AbstractCell.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/AbstractCell.java 
b/src/java/org/apache/cassandra/db/rows/AbstractCell.java
index 73a3b74..4946a46 100644
--- a/src/java/org/apache/cassandra/db/rows/AbstractCell.java
+++ b/src/java/org/apache/cassandra/db/rows/AbstractCell.java
@@ -18,9 +18,10 @@
 package org.apache.cassandra.db.rows;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.Objects;
 
+import com.google.common.hash.Hasher;
+
 import org.apache.cassandra.schema.ColumnMetadata;
 import org.apache.cassandra.db.DeletionPurger;
 import org.apache.cassandra.db.TypeSizes;
@@ -28,7 +29,7 @@ import org.apache.cassandra.db.context.CounterContext;
 import org.apache.cassandra.db.marshal.AbstractType;
 import org.apache.cassandra.db.marshal.CollectionType;
 import org.apache.cassandra.serializers.MarshalException;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 import org.apache.cassandra.utils.memory.AbstractAllocator;
 
 /**
@@ -119,22 +120,22 @@ public abstract class AbstractCell extends Cell
                + (path == null ? 0 : path.dataSize());
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
         if (isCounterCell())
         {
-            CounterContext.instance().updateDigest(digest, value());
+            CounterContext.instance().updateDigest(hasher, value());
         }
         else
         {
-            digest.update(value().duplicate());
+            HashingUtils.updateBytes(hasher, value().duplicate());
         }
 
-        FBUtilities.updateWithLong(digest, timestamp());
-        FBUtilities.updateWithInt(digest, ttl());
-        FBUtilities.updateWithBoolean(digest, isCounterCell());
+        HashingUtils.updateWithLong(hasher, timestamp());
+        HashingUtils.updateWithInt(hasher, ttl());
+        HashingUtils.updateWithBoolean(hasher, isCounterCell());
         if (path() != null)
-            path().digest(digest);
+            path().digest(hasher);
     }
 
     public void validate()

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/AbstractRow.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/AbstractRow.java 
b/src/java/org/apache/cassandra/db/rows/AbstractRow.java
index c986d63..211b13f 100644
--- a/src/java/org/apache/cassandra/db/rows/AbstractRow.java
+++ b/src/java/org/apache/cassandra/db/rows/AbstractRow.java
@@ -17,7 +17,6 @@
 package org.apache.cassandra.db.rows;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.AbstractCollection;
 import java.util.Objects;
 import java.util.function.Function;
@@ -25,13 +24,14 @@ import java.util.stream.Collectors;
 import java.util.stream.StreamSupport;
 
 import com.google.common.collect.Iterables;
+import com.google.common.hash.Hasher;
 
 import org.apache.cassandra.schema.TableMetadata;
 import org.apache.cassandra.db.*;
 import org.apache.cassandra.db.marshal.CollectionType;
 import org.apache.cassandra.db.marshal.UserType;
 import org.apache.cassandra.serializers.MarshalException;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 
 /**
  * Base abstract class for {@code Row} implementations.
@@ -61,16 +61,16 @@ public abstract class AbstractRow extends 
AbstractCollection<ColumnData> impleme
         return clustering() == Clustering.STATIC_CLUSTERING;
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
-        FBUtilities.updateWithByte(digest, kind().ordinal());
-        clustering().digest(digest);
+        HashingUtils.updateWithByte(hasher, kind().ordinal());
+        clustering().digest(hasher);
 
-        deletion().digest(digest);
-        primaryKeyLivenessInfo().digest(digest);
+        deletion().digest(hasher);
+        primaryKeyLivenessInfo().digest(hasher);
 
         for (ColumnData cd : this)
-            cd.digest(digest);
+            cd.digest(hasher);
     }
 
     public void validateData(TableMetadata metadata)

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/CellPath.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/CellPath.java 
b/src/java/org/apache/cassandra/db/rows/CellPath.java
index 91a5217..94fa8e7 100644
--- a/src/java/org/apache/cassandra/db/rows/CellPath.java
+++ b/src/java/org/apache/cassandra/db/rows/CellPath.java
@@ -19,12 +19,14 @@ package org.apache.cassandra.db.rows;
 
 import java.io.IOException;
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.Objects;
 
+import com.google.common.hash.Hasher;
+
 import org.apache.cassandra.io.util.DataOutputPlus;
 import org.apache.cassandra.io.util.DataInputPlus;
 import org.apache.cassandra.utils.ByteBufferUtil;
+import org.apache.cassandra.utils.HashingUtils;
 import org.apache.cassandra.utils.ObjectSizes;
 import org.apache.cassandra.utils.memory.AbstractAllocator;
 
@@ -54,10 +56,10 @@ public abstract class CellPath
         return size;
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
         for (int i = 0; i < size(); i++)
-            digest.update(get(i).duplicate());
+            HashingUtils.updateBytes(hasher, get(i).duplicate());
     }
 
     public abstract CellPath copy(AbstractAllocator allocator);

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/ColumnData.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/ColumnData.java 
b/src/java/org/apache/cassandra/db/rows/ColumnData.java
index 166a805..ccfcfa5 100644
--- a/src/java/org/apache/cassandra/db/rows/ColumnData.java
+++ b/src/java/org/apache/cassandra/db/rows/ColumnData.java
@@ -17,9 +17,10 @@
  */
 package org.apache.cassandra.db.rows;
 
-import java.security.MessageDigest;
 import java.util.Comparator;
 
+import com.google.common.hash.Hasher;
+
 import org.apache.cassandra.schema.ColumnMetadata;
 import org.apache.cassandra.db.DeletionPurger;
 import org.apache.cassandra.db.partitions.PartitionUpdate;
@@ -67,9 +68,9 @@ public abstract class ColumnData
     /**
      * Adds the data to the provided digest.
      *
-     * @param digest the {@code MessageDigest} to add the data to.
+     * @param hasher the {@link Hasher} to add the data to.
      */
-    public abstract void digest(MessageDigest digest);
+    public abstract void digest(Hasher hasher);
 
     /**
      * Returns a copy of the data where all timestamps for live data have 
replaced by {@code newTimestamp} and

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/ComplexColumnData.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/ComplexColumnData.java 
b/src/java/org/apache/cassandra/db/rows/ComplexColumnData.java
index 79bb00e..57851d8 100644
--- a/src/java/org/apache/cassandra/db/rows/ComplexColumnData.java
+++ b/src/java/org/apache/cassandra/db/rows/ComplexColumnData.java
@@ -18,11 +18,11 @@
 package org.apache.cassandra.db.rows;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.Iterator;
 import java.util.Objects;
 
 import com.google.common.base.Function;
+import com.google.common.hash.Hasher;
 
 import org.apache.cassandra.db.DeletionPurger;
 import org.apache.cassandra.db.DeletionTime;
@@ -129,13 +129,13 @@ public class ComplexColumnData extends ColumnData 
implements Iterable<Cell>
             cell.validate();
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
         if (!complexDeletion.isLive())
-            complexDeletion.digest(digest);
+            complexDeletion.digest(hasher);
 
         for (Cell cell : this)
-            cell.digest(digest);
+            cell.digest(hasher);
     }
 
     public ComplexColumnData markCounterLocalToBeCleared()

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundMarker.java
----------------------------------------------------------------------
diff --git 
a/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundMarker.java 
b/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundMarker.java
index fd80b36..a1f2b2c 100644
--- a/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundMarker.java
+++ b/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundMarker.java
@@ -18,9 +18,10 @@
 package org.apache.cassandra.db.rows;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.Objects;
 
+import com.google.common.hash.Hasher;
+
 import org.apache.cassandra.schema.TableMetadata;
 import org.apache.cassandra.db.*;
 import org.apache.cassandra.utils.memory.AbstractAllocator;
@@ -126,10 +127,10 @@ public class RangeTombstoneBoundMarker extends 
AbstractRangeTombstoneMarker<Clus
         return new RangeTombstoneBoundMarker(clustering(), newDeletionTime);
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
-        bound.digest(digest);
-        deletion.digest(digest);
+        bound.digest(hasher);
+        deletion.digest(hasher);
     }
 
     public String toString(TableMetadata metadata)

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundaryMarker.java
----------------------------------------------------------------------
diff --git 
a/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundaryMarker.java 
b/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundaryMarker.java
index 1038787..60b7dab 100644
--- a/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundaryMarker.java
+++ b/src/java/org/apache/cassandra/db/rows/RangeTombstoneBoundaryMarker.java
@@ -18,9 +18,10 @@
 package org.apache.cassandra.db.rows;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
 import java.util.Objects;
 
+import com.google.common.hash.Hasher;
+
 import org.apache.cassandra.schema.TableMetadata;
 import org.apache.cassandra.db.*;
 import org.apache.cassandra.utils.memory.AbstractAllocator;
@@ -144,11 +145,11 @@ public class RangeTombstoneBoundaryMarker extends 
AbstractRangeTombstoneMarker<C
         return new RangeTombstoneBoundMarker(openBound(reversed), 
startDeletion);
     }
 
-    public void digest(MessageDigest digest)
+    public void digest(Hasher hasher)
     {
-        bound.digest(digest);
-        endDeletion.digest(digest);
-        startDeletion.digest(digest);
+        bound.digest(hasher);
+        endDeletion.digest(hasher);
+        startDeletion.digest(hasher);
     }
 
     public String toString(TableMetadata metadata)

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/Row.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/Row.java 
b/src/java/org/apache/cassandra/db/rows/Row.java
index 381f290..f922ffb 100644
--- a/src/java/org/apache/cassandra/db/rows/Row.java
+++ b/src/java/org/apache/cassandra/db/rows/Row.java
@@ -18,17 +18,17 @@
 package org.apache.cassandra.db.rows;
 
 import java.util.*;
-import java.security.MessageDigest;
 import java.util.function.Consumer;
 
 import com.google.common.base.Predicate;
+import com.google.common.hash.Hasher;
 
 import org.apache.cassandra.db.*;
 import org.apache.cassandra.db.filter.ColumnFilter;
 import org.apache.cassandra.schema.ColumnMetadata;
 import org.apache.cassandra.schema.TableMetadata;
 import org.apache.cassandra.service.paxos.Commit;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 import org.apache.cassandra.utils.MergeIterator;
 import org.apache.cassandra.utils.SearchIterator;
 import org.apache.cassandra.utils.btree.BTree;
@@ -376,10 +376,10 @@ public interface Row extends Unfiltered, 
Collection<ColumnData>
             return time.deletes(cell);
         }
 
-        public void digest(MessageDigest digest)
+        public void digest(Hasher hasher)
         {
-            time.digest(digest);
-            FBUtilities.updateWithBoolean(digest, isShadowable);
+            time.digest(hasher);
+            HashingUtils.updateWithBoolean(hasher, isShadowable);
         }
 
         public int dataSize()

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/RowIterators.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/RowIterators.java 
b/src/java/org/apache/cassandra/db/rows/RowIterators.java
index b155d68..d340777 100644
--- a/src/java/org/apache/cassandra/db/rows/RowIterators.java
+++ b/src/java/org/apache/cassandra/db/rows/RowIterators.java
@@ -17,15 +17,14 @@
  */
 package org.apache.cassandra.db.rows;
 
-import java.security.MessageDigest;
-
+import com.google.common.hash.Hasher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.cassandra.db.filter.ColumnFilter;
 import org.apache.cassandra.db.transform.Transformation;
 import org.apache.cassandra.schema.TableMetadata;
-import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 
 /**
  * Static methods to work with row iterators.
@@ -36,20 +35,20 @@ public abstract class RowIterators
 
     private RowIterators() {}
 
-    public static void digest(RowIterator iterator, MessageDigest digest)
+    public static void digest(RowIterator iterator, Hasher hasher)
     {
         // TODO: we're not computing digest the same way that old nodes. This 
is
         // currently ok as this is only used for schema digest and the is no 
exchange
         // of schema digest between different versions. If this changes 
however,
         // we'll need to agree on a version.
-        digest.update(iterator.partitionKey().getKey().duplicate());
-        iterator.columns().regulars.digest(digest);
-        iterator.columns().statics.digest(digest);
-        FBUtilities.updateWithBoolean(digest, iterator.isReverseOrder());
-        iterator.staticRow().digest(digest);
+        HashingUtils.updateBytes(hasher, 
iterator.partitionKey().getKey().duplicate());
+        iterator.columns().regulars.digest(hasher);
+        iterator.columns().statics.digest(hasher);
+        HashingUtils.updateWithBoolean(hasher, iterator.isReverseOrder());
+        iterator.staticRow().digest(hasher);
 
         while (iterator.hasNext())
-            iterator.next().digest(digest);
+            iterator.next().digest(hasher);
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/Unfiltered.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/Unfiltered.java 
b/src/java/org/apache/cassandra/db/rows/Unfiltered.java
index e1158e2..3a65f4e 100644
--- a/src/java/org/apache/cassandra/db/rows/Unfiltered.java
+++ b/src/java/org/apache/cassandra/db/rows/Unfiltered.java
@@ -17,7 +17,7 @@
  */
 package org.apache.cassandra.db.rows;
 
-import java.security.MessageDigest;
+import com.google.common.hash.Hasher;
 
 import org.apache.cassandra.schema.TableMetadata;
 import org.apache.cassandra.db.Clusterable;
@@ -39,11 +39,11 @@ public interface Unfiltered extends Clusterable
     public Kind kind();
 
     /**
-     * Digest the atom using the provided {@code MessageDigest}.
+     * Digest the atom using the provided {@link Hasher}.
      *
-     * @param digest the {@code MessageDigest} to use.
+     * @param hasher the {@see Hasher} to use.
      */
-    public void digest(MessageDigest digest);
+    public void digest(Hasher hasher);
 
     /**
      * Validate the data of this atom.

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/db/rows/UnfilteredRowIterators.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/db/rows/UnfilteredRowIterators.java 
b/src/java/org/apache/cassandra/db/rows/UnfilteredRowIterators.java
index 30e9da1..e1c6685 100644
--- a/src/java/org/apache/cassandra/db/rows/UnfilteredRowIterators.java
+++ b/src/java/org/apache/cassandra/db/rows/UnfilteredRowIterators.java
@@ -18,8 +18,8 @@
 package org.apache.cassandra.db.rows;
 
 import java.util.*;
-import java.security.MessageDigest;
 
+import com.google.common.hash.Hasher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -33,6 +33,7 @@ import org.apache.cassandra.io.util.FileUtils;
 import org.apache.cassandra.schema.TableMetadata;
 import org.apache.cassandra.serializers.MarshalException;
 import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 import org.apache.cassandra.utils.IMergeIterator;
 import org.apache.cassandra.utils.MergeIterator;
 
@@ -143,14 +144,14 @@ public abstract class UnfilteredRowIterators
      * Digests the partition represented by the provided iterator.
      *
      * @param iterator the iterator to digest.
-     * @param digest the {@code MessageDigest} to use for the digest.
+     * @param hasher the {@link Hasher} to use for the digest.
      * @param version the messaging protocol to use when producing the digest.
      */
-    public static void digest(UnfilteredRowIterator iterator, MessageDigest 
digest, int version)
+    public static void digest(UnfilteredRowIterator iterator, Hasher hasher, 
int version)
     {
-        digest.update(iterator.partitionKey().getKey().duplicate());
-        iterator.partitionLevelDeletion().digest(digest);
-        iterator.columns().regulars.digest(digest);
+        HashingUtils.updateBytes(hasher, 
iterator.partitionKey().getKey().duplicate());
+        iterator.partitionLevelDeletion().digest(hasher);
+        iterator.columns().regulars.digest(hasher);
         // When serializing an iterator, we skip the static columns if the 
iterator has not static row, even if the
         // columns() object itself has some (the columns() is a superset of 
what the iterator actually contains, and
         // will correspond to the queried columns pre-serialization). So we 
must avoid taking the satic column names
@@ -162,14 +163,14 @@ public abstract class UnfilteredRowIterators
         // different), but removing them entirely is stricly speaking a 
breaking change (it would create mismatches on
         // upgrade) so we can only do on the next protocol version bump.
         if (iterator.staticRow() != Rows.EMPTY_STATIC_ROW)
-            iterator.columns().statics.digest(digest);
-        FBUtilities.updateWithBoolean(digest, iterator.isReverseOrder());
-        iterator.staticRow().digest(digest);
+            iterator.columns().statics.digest(hasher);
+        HashingUtils.updateWithBoolean(hasher, iterator.isReverseOrder());
+        iterator.staticRow().digest(hasher);
 
         while (iterator.hasNext())
         {
             Unfiltered unfiltered = iterator.next();
-            unfiltered.digest(digest);
+            unfiltered.digest(hasher);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/dht/RandomPartitioner.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/dht/RandomPartitioner.java 
b/src/java/org/apache/cassandra/dht/RandomPartitioner.java
index 82c2493..77cbfc5 100644
--- a/src/java/org/apache/cassandra/dht/RandomPartitioner.java
+++ b/src/java/org/apache/cassandra/dht/RandomPartitioner.java
@@ -20,6 +20,7 @@ package org.apache.cassandra.dht;
 import java.math.BigDecimal;
 import java.math.BigInteger;
 import java.nio.ByteBuffer;
+import java.security.MessageDigest;
 import java.util.*;
 
 import com.google.common.annotations.VisibleForTesting;
@@ -33,6 +34,7 @@ import 
org.apache.cassandra.db.marshal.PartitionerDefinedOrder;
 import org.apache.cassandra.utils.ByteBufferUtil;
 import org.apache.cassandra.utils.FBUtilities;
 import org.apache.cassandra.utils.GuidGenerator;
+import org.apache.cassandra.utils.HashingUtils;
 import org.apache.cassandra.utils.ObjectSizes;
 import org.apache.cassandra.utils.Pair;
 
@@ -45,7 +47,30 @@ public class RandomPartitioner implements IPartitioner
     public static final BigIntegerToken MINIMUM = new BigIntegerToken("-1");
     public static final BigInteger MAXIMUM = new BigInteger("2").pow(127);
 
-    private static final int HEAP_SIZE = (int) ObjectSizes.measureDeep(new 
BigIntegerToken(FBUtilities.hashToBigInteger(ByteBuffer.allocate(1))));
+    /**
+     * Maintain a separate threadlocal message digest, exclusively for token 
hashing. This is necessary because
+     * when Tracing is enabled and using the default tracing implementation, 
creating the mutations for the trace
+     * events involves tokenizing the partition keys. This happens multiple 
times whilst servicing a ReadCommand,
+     * and so can interfere with the stateful digest calculation if the node 
is a replica producing a digest response.
+     */
+    private static final ThreadLocal<MessageDigest> localMD5Digest = new 
ThreadLocal<MessageDigest>()
+    {
+        @Override
+        protected MessageDigest initialValue()
+        {
+            return HashingUtils.newMessageDigest("MD5");
+        }
+
+        @Override
+        public MessageDigest get()
+        {
+            MessageDigest digest = super.get();
+            digest.reset();
+            return digest;
+        }
+    };
+
+    private static final int HEAP_SIZE = (int) ObjectSizes.measureDeep(new 
BigIntegerToken(hashToBigInteger(ByteBuffer.allocate(1))));
 
     public static final RandomPartitioner instance = new RandomPartitioner();
     public static final AbstractType<?> partitionOrdering = new 
PartitionerDefinedOrder(instance);
@@ -111,7 +136,7 @@ public class RandomPartitioner implements IPartitioner
 
     public BigIntegerToken getRandomToken()
     {
-        BigInteger token = 
FBUtilities.hashToBigInteger(GuidGenerator.guidAsBytes());
+        BigInteger token = hashToBigInteger(GuidGenerator.guidAsBytes());
         if ( token.signum() == -1 )
             token = token.multiply(BigInteger.valueOf(-1L));
         return new BigIntegerToken(token);
@@ -119,7 +144,7 @@ public class RandomPartitioner implements IPartitioner
 
     public BigIntegerToken getRandomToken(Random random)
     {
-        BigInteger token = 
FBUtilities.hashToBigInteger(GuidGenerator.guidAsBytes(random, 
"host/127.0.0.1", 0));
+        BigInteger token = hashToBigInteger(GuidGenerator.guidAsBytes(random, 
"host/127.0.0.1", 0));
         if ( token.signum() == -1 )
             token = token.multiply(BigInteger.valueOf(-1L));
         return new BigIntegerToken(token);
@@ -223,7 +248,7 @@ public class RandomPartitioner implements IPartitioner
     {
         if (key.remaining() == 0)
             return MINIMUM;
-        return new BigIntegerToken(FBUtilities.hashToBigInteger(key));
+        return new BigIntegerToken(hashToBigInteger(key));
     }
 
     public Map<Token, Float> describeOwnership(List<Token> sortedTokens)
@@ -260,6 +285,17 @@ public class RandomPartitioner implements IPartitioner
         return ownerships;
     }
 
+    private static BigInteger hashToBigInteger(ByteBuffer data)
+    {
+        MessageDigest messageDigest = localMD5Digest.get();
+        if (data.hasArray())
+            messageDigest.update(data.array(), data.arrayOffset() + 
data.position(), data.remaining());
+        else
+            messageDigest.update(data.duplicate());
+
+        return new BigInteger(messageDigest.digest()).abs();
+    }
+
     public Token getMaximumToken()
     {
         return new BigIntegerToken(MAXIMUM);

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/net/MessagingService.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/net/MessagingService.java 
b/src/java/org/apache/cassandra/net/MessagingService.java
index 6caada1..2a44e68 100644
--- a/src/java/org/apache/cassandra/net/MessagingService.java
+++ b/src/java/org/apache/cassandra/net/MessagingService.java
@@ -43,6 +43,7 @@ import javax.management.ObjectName;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
+
 import org.cliffc.high_scale_lib.NonBlockingHashMap;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/repair/Validator.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/repair/Validator.java 
b/src/java/org/apache/cassandra/repair/Validator.java
index f9556d6..9000dd9 100644
--- a/src/java/org/apache/cassandra/repair/Validator.java
+++ b/src/java/org/apache/cassandra/repair/Validator.java
@@ -18,15 +18,20 @@
 package org.apache.cassandra.repair;
 
 import java.net.InetAddress;
-import java.security.MessageDigest;
+import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.hash.Funnel;
+import com.google.common.hash.HashCode;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+
 import org.apache.cassandra.concurrent.Stage;
 import org.apache.cassandra.concurrent.StageManager;
 import org.apache.cassandra.db.ColumnFamilyStore;
@@ -43,6 +48,7 @@ import org.apache.cassandra.utils.FBUtilities;
 import org.apache.cassandra.utils.MerkleTree;
 import org.apache.cassandra.utils.MerkleTree.RowHash;
 import org.apache.cassandra.utils.MerkleTrees;
+import org.apache.cassandra.utils.ObjectSizes;
 
 /**
  * Handles the building of a merkle tree for a column family.
@@ -185,54 +191,106 @@ public class Validator implements Runnable
         return range.contains(t);
     }
 
-    static class CountingDigest extends MessageDigest
+    static class CountingHasher implements Hasher
     {
         private long count;
-        private MessageDigest underlying;
+        private final Hasher underlying;
 
-        public CountingDigest(MessageDigest underlying)
+        CountingHasher(Hasher underlying)
         {
-            super(underlying.getAlgorithm());
             this.underlying = underlying;
         }
 
-        @Override
-        protected void engineUpdate(byte input)
+        public Hasher putByte(byte b)
         {
-            underlying.update(input);
             count += 1;
+            return underlying.putByte(b);
+        }
+
+        public Hasher putBytes(byte[] bytes)
+        {
+            count += bytes.length;
+            return underlying.putBytes(bytes);
+        }
+
+        public Hasher putBytes(byte[] bytes, int offset, int length)
+        {
+            count += length;
+            return underlying.putBytes(bytes, offset, length);
+        }
+
+        public Hasher putShort(short i)
+        {
+            count += Short.BYTES;
+            return underlying.putShort(i);
+        }
+
+        public Hasher putInt(int i)
+        {
+            count += Integer.BYTES;
+            return underlying.putInt(i);
+        }
+
+        public Hasher putLong(long l)
+        {
+            count += Long.BYTES;
+            return underlying.putLong(l);
+        }
+
+        public Hasher putFloat(float v)
+        {
+            count += Float.BYTES;
+            return underlying.putFloat(v);
         }
 
-        @Override
-        protected void engineUpdate(byte[] input, int offset, int len)
+        public Hasher putDouble(double v)
         {
-            underlying.update(input, offset, len);
-            count += len;
+            count += Double.BYTES;
+            return underlying.putDouble(v);
         }
 
-        @Override
-        protected byte[] engineDigest()
+        public Hasher putBoolean(boolean b)
         {
-            return underlying.digest();
+            count += Byte.BYTES;
+            return underlying.putBoolean(b);
         }
 
-        @Override
-        protected void engineReset()
+        public Hasher putChar(char c)
         {
-            underlying.reset();
+            count += Character.BYTES;
+            return underlying.putChar(c);
         }
 
+        public Hasher putUnencodedChars(CharSequence charSequence)
+        {
+            throw new UnsupportedOperationException();
+        }
+
+        public Hasher putString(CharSequence charSequence, Charset charset)
+        {
+            throw new UnsupportedOperationException();
+        }
+
+        public <T> Hasher putObject(T t, Funnel<? super T> funnel)
+        {
+            throw new UnsupportedOperationException();
+        }
+
+        public HashCode hash()
+        {
+            return underlying.hash();
+        }
     }
 
     private MerkleTree.RowHash rowHash(UnfilteredRowIterator partition)
     {
         validated++;
         // MerkleTree uses XOR internally, so we want lots of output bits here
-        CountingDigest digest = new 
CountingDigest(FBUtilities.newMessageDigest("SHA-256"));
-        UnfilteredRowIterators.digest(partition, digest, 
MessagingService.current_version);
+        CountingHasher hasher = new 
CountingHasher(Hashing.sha256().newHasher());
+        UnfilteredRowIterators.digest(partition, hasher, 
MessagingService.current_version);
         // only return new hash for merkle tree in case digest was updated - 
see CASSANDRA-8979
-        return digest.count > 0
-             ? new MerkleTree.RowHash(partition.partitionKey().getToken(), 
digest.digest(), digest.count)
+        return hasher.count > 0
+             ? new MerkleTree.RowHash(partition.partitionKey().getToken(), 
hasher.hash().asBytes(), hasher.count)
              : null;
     }
 

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/schema/SchemaConstants.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/schema/SchemaConstants.java 
b/src/java/org/apache/cassandra/schema/SchemaConstants.java
index d92f09c..e51a31b 100644
--- a/src/java/org/apache/cassandra/schema/SchemaConstants.java
+++ b/src/java/org/apache/cassandra/schema/SchemaConstants.java
@@ -18,13 +18,16 @@
 
 package org.apache.cassandra.schema;
 
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.*;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Set;
+import java.util.UUID;
 import java.util.regex.Pattern;
 
 import com.google.common.collect.ImmutableSet;
 
+import org.apache.cassandra.utils.HashingUtils;
+
 public final class SchemaConstants
 {
     public static final Pattern PATTERN_WORD_CHARS = Pattern.compile("\\w+");
@@ -63,14 +66,7 @@ public final class SchemaConstants
 
     static
     {
-        try
-        {
-            emptyVersion = 
UUID.nameUUIDFromBytes(MessageDigest.getInstance("MD5").digest());
-        }
-        catch (NoSuchAlgorithmException e)
-        {
-            throw new AssertionError();
-        }
+        emptyVersion = 
UUID.nameUUIDFromBytes(HashingUtils.CURRENT_HASH_FUNCTION.newHasher().hash().asBytes());
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/schema/SchemaKeyspace.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/schema/SchemaKeyspace.java 
b/src/java/org/apache/cassandra/schema/SchemaKeyspace.java
index 91583bc..460fb9f 100644
--- a/src/java/org/apache/cassandra/schema/SchemaKeyspace.java
+++ b/src/java/org/apache/cassandra/schema/SchemaKeyspace.java
@@ -19,18 +19,18 @@ package org.apache.cassandra.schema;
 
 import java.nio.ByteBuffer;
 import java.nio.charset.CharacterCodingException;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
 import java.util.*;
 import java.util.concurrent.TimeUnit;
 
 import com.google.common.collect.*;
 import com.google.common.collect.Maps;
+import com.google.common.hash.Hasher;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.cassandra.config.*;
 import org.apache.cassandra.cql3.statements.CreateTableStatement;
+import org.apache.cassandra.net.MessagingService;
 import org.apache.cassandra.schema.ColumnMetadata.ClusteringOrder;
 import org.apache.cassandra.cql3.*;
 import org.apache.cassandra.cql3.functions.*;
@@ -45,6 +45,7 @@ import 
org.apache.cassandra.exceptions.InvalidRequestException;
 import org.apache.cassandra.transport.ProtocolVersion;
 import org.apache.cassandra.utils.ByteBufferUtil;
 import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 
 import static java.lang.String.format;
 
@@ -308,15 +309,7 @@ public final class SchemaKeyspace
      */
     static UUID calculateSchemaDigest()
     {
-        MessageDigest digest;
-        try
-        {
-            digest = MessageDigest.getInstance("MD5");
-        }
-        catch (NoSuchAlgorithmException e)
-        {
-            throw new RuntimeException(e);
-        }
+        Hasher hasher = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
 
         for (String table : ALL)
         {
@@ -334,12 +327,12 @@ public final class SchemaKeyspace
                     try (RowIterator partition = schema.next())
                     {
                         if 
(!isSystemKeyspaceSchemaPartition(partition.partitionKey()))
-                            RowIterators.digest(partition, digest);
+                            RowIterators.digest(partition, hasher);
                     }
                 }
             }
         }
-        return UUID.nameUUIDFromBytes(digest.digest());
+        return UUID.nameUUIDFromBytes(hasher.hash().asBytes());
     }
 
     /**

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/utils/FBUtilities.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/utils/FBUtilities.java 
b/src/java/org/apache/cassandra/utils/FBUtilities.java
index f45a1ab..3faa034 100644
--- a/src/java/org/apache/cassandra/utils/FBUtilities.java
+++ b/src/java/org/apache/cassandra/utils/FBUtilities.java
@@ -22,8 +22,6 @@ import java.lang.reflect.Field;
 import java.math.BigInteger;
 import java.net.*;
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
 import java.util.*;
 import java.util.concurrent.*;
 import java.util.zip.CRC32;
@@ -40,7 +38,6 @@ import org.apache.commons.lang3.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import io.netty.util.concurrent.FastThreadLocal;
 import org.apache.cassandra.auth.IAuthenticator;
 import org.apache.cassandra.auth.IAuthorizer;
 import org.apache.cassandra.auth.IRoleManager;
@@ -92,36 +89,8 @@ public class FBUtilities
             return Runtime.getRuntime().availableProcessors();
     }
 
-    private static final FastThreadLocal<MessageDigest> localMD5Digest = new 
FastThreadLocal<MessageDigest>()
-    {
-        @Override
-        protected MessageDigest initialValue()
-        {
-            return newMessageDigest("MD5");
-        }
-    };
-
     public static final int MAX_UNSIGNED_SHORT = 0xFFFF;
 
-    public static MessageDigest threadLocalMD5Digest()
-    {
-        MessageDigest md = localMD5Digest.get();
-        md.reset();
-        return md;
-    }
-
-    public static MessageDigest newMessageDigest(String algorithm)
-    {
-        try
-        {
-            return MessageDigest.getInstance(algorithm);
-        }
-        catch (NoSuchAlgorithmException nsae)
-        {
-            throw new RuntimeException("the requested digest algorithm (" + 
algorithm + ") is not available", nsae);
-        }
-    }
-
     /**
      * Please use getBroadcastAddress instead. You need this only when you 
have to listen/connect.
      */
@@ -271,25 +240,6 @@ public class FBUtilities
         return out;
     }
 
-    public static byte[] hash(ByteBuffer... data)
-    {
-        MessageDigest messageDigest = localMD5Digest.get();
-        for (ByteBuffer block : data)
-        {
-            if (block.hasArray())
-                messageDigest.update(block.array(), block.arrayOffset() + 
block.position(), block.remaining());
-            else
-                messageDigest.update(block.duplicate());
-        }
-
-        return messageDigest.digest();
-    }
-
-    public static BigInteger hashToBigInteger(ByteBuffer data)
-    {
-        return new BigInteger(hash(data)).abs();
-    }
-
     public static void sortSampledKeys(List<DecoratedKey> keys, Range<Token> 
range)
     {
         if (range.left.compareTo(range.right) >= 0)
@@ -830,42 +780,6 @@ public class FBUtilities
         return historyDir;
     }
 
-    public static void updateWithShort(MessageDigest digest, int val)
-    {
-        digest.update((byte) ((val >> 8) & 0xFF));
-        digest.update((byte) (val & 0xFF));
-    }
-
-    public static void updateWithByte(MessageDigest digest, int val)
-    {
-        digest.update((byte) (val & 0xFF));
-    }
-
-    public static void updateWithInt(MessageDigest digest, int val)
-    {
-        digest.update((byte) ((val >>> 24) & 0xFF));
-        digest.update((byte) ((val >>> 16) & 0xFF));
-        digest.update((byte) ((val >>>  8) & 0xFF));
-        digest.update((byte) ((val >>> 0) & 0xFF));
-    }
-
-    public static void updateWithLong(MessageDigest digest, long val)
-    {
-        digest.update((byte) ((val >>> 56) & 0xFF));
-        digest.update((byte) ((val >>> 48) & 0xFF));
-        digest.update((byte) ((val >>> 40) & 0xFF));
-        digest.update((byte) ((val >>> 32) & 0xFF));
-        digest.update((byte) ((val >>> 24) & 0xFF));
-        digest.update((byte) ((val >>> 16) & 0xFF));
-        digest.update((byte) ((val >>>  8) & 0xFF));
-        digest.update((byte)  ((val >>> 0) & 0xFF));
-    }
-
-    public static void updateWithBoolean(MessageDigest digest, boolean val)
-    {
-        updateWithByte(digest, val ? 0 : 1);
-    }
-
     public static void closeAll(List<? extends AutoCloseable> l) throws 
Exception
     {
         Exception toThrow = null;

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/utils/GuidGenerator.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/utils/GuidGenerator.java 
b/src/java/org/apache/cassandra/utils/GuidGenerator.java
index 2209f6a..b8c3732 100644
--- a/src/java/org/apache/cassandra/utils/GuidGenerator.java
+++ b/src/java/org/apache/cassandra/utils/GuidGenerator.java
@@ -87,7 +87,7 @@ public class GuidGenerator
                         .append(Long.toString(rand));
 
         String valueBeforeMD5 = sbValueBeforeMD5.toString();
-        return 
ByteBuffer.wrap(FBUtilities.threadLocalMD5Digest().digest(valueBeforeMD5.getBytes()));
+        return 
ByteBuffer.wrap(MD5Digest.threadLocalMD5Digest().digest(valueBeforeMD5.getBytes()));
     }
 
     public static ByteBuffer guidAsBytes()

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/utils/HashingUtils.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/utils/HashingUtils.java 
b/src/java/org/apache/cassandra/utils/HashingUtils.java
new file mode 100644
index 0000000..9e65a5d
--- /dev/null
+++ b/src/java/org/apache/cassandra/utils/HashingUtils.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.cassandra.utils;
+
+import java.nio.ByteBuffer;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+
+import com.google.common.hash.HashFunction;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
+
+public class HashingUtils
+{
+    public static final HashFunction CURRENT_HASH_FUNCTION = Hashing.md5();
+
+    public static MessageDigest newMessageDigest(String algorithm)
+    {
+        try
+        {
+            return MessageDigest.getInstance(algorithm);
+        }
+        catch (NoSuchAlgorithmException nsae)
+        {
+            throw new RuntimeException("the requested digest algorithm (" + 
algorithm + ") is not available", nsae);
+        }
+    }
+
+    public static void updateBytes(Hasher hasher, ByteBuffer input)
+    {
+        if (!input.hasRemaining())
+            return;
+
+        if (input.hasArray())
+        {
+            byte[] b = input.array();
+            int ofs = input.arrayOffset();
+            int pos = input.position();
+            int lim = input.limit();
+            hasher.putBytes(b, ofs + pos, lim - pos);
+            input.position(lim);
+        }
+        else
+        {
+            int len = input.remaining();
+            int n = Math.min(len, 1 << 12); // either the remaining amount or 
4kb
+            byte[] tempArray = new byte[n];
+            while (len > 0)
+            {
+                int chunk = Math.min(len, tempArray.length);
+                input.get(tempArray, 0, chunk);
+                hasher.putBytes(tempArray, 0, chunk);
+                len -= chunk;
+            }
+        }
+    }
+
+    public static void updateWithShort(Hasher hasher, int val)
+    {
+        hasher.putByte((byte) ((val >> 8) & 0xFF));
+        hasher.putByte((byte) (val & 0xFF));
+    }
+
+    public static void updateWithByte(Hasher hasher, int val)
+    {
+        hasher.putByte((byte) (val & 0xFF));
+    }
+
+    public static void updateWithInt(Hasher hasher, int val)
+    {
+        hasher.putByte((byte) ((val >>> 24) & 0xFF));
+        hasher.putByte((byte) ((val >>> 16) & 0xFF));
+        hasher.putByte((byte) ((val >>>  8) & 0xFF));
+        hasher.putByte((byte) ((val >>> 0) & 0xFF));
+    }
+
+    public static void updateWithLong(Hasher hasher, long val)
+    {
+        hasher.putByte((byte) ((val >>> 56) & 0xFF));
+        hasher.putByte((byte) ((val >>> 48) & 0xFF));
+        hasher.putByte((byte) ((val >>> 40) & 0xFF));
+        hasher.putByte((byte) ((val >>> 32) & 0xFF));
+        hasher.putByte((byte) ((val >>> 24) & 0xFF));
+        hasher.putByte((byte) ((val >>> 16) & 0xFF));
+        hasher.putByte((byte) ((val >>>  8) & 0xFF));
+        hasher.putByte((byte)  ((val >>> 0) & 0xFF));
+    }
+
+    public static void updateWithBoolean(Hasher hasher, boolean val)
+    {
+        updateWithByte(hasher, val ? 0 : 1);
+    }
+}

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/utils/MD5Digest.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/utils/MD5Digest.java 
b/src/java/org/apache/cassandra/utils/MD5Digest.java
index 4e736dc..5c0c1de 100644
--- a/src/java/org/apache/cassandra/utils/MD5Digest.java
+++ b/src/java/org/apache/cassandra/utils/MD5Digest.java
@@ -19,9 +19,9 @@ package org.apache.cassandra.utils;
 
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
 import java.util.Arrays;
 
-
 /**
  * The result of the computation of an MD5 digest.
  *
@@ -32,6 +32,32 @@ import java.util.Arrays;
  */
 public class MD5Digest
 {
+    /**
+     * In the interest not breaking things, we're consciously keeping this 
single remaining instance
+     * of MessageDigest around for usage by GuidGenerator (which is only ever 
used by RandomPartitioner)
+     * and some client native transport methods, where we're tied to the usage 
of MD5 in the protocol.
+     * As RandomPartitioner will always be MD5 and cannot be changed, we can 
switch over all our
+     * other digest usage to Guava's Hasher to make switching the hashing 
function used during message
+     * digests etc possible, but not regress on performance or bugs in 
RandomPartitioner's usage of
+     * MD5 and MessageDigest.
+     */
+    private static final ThreadLocal<MessageDigest> localMD5Digest = new 
ThreadLocal<MessageDigest>()
+    {
+        @Override
+        protected MessageDigest initialValue()
+        {
+            return HashingUtils.newMessageDigest("MD5");
+        }
+
+        @Override
+        public MessageDigest get()
+        {
+            MessageDigest digest = super.get();
+            digest.reset();
+            return digest;
+        }
+    };
+
     public final byte[] bytes;
     private final int hashCode;
 
@@ -48,7 +74,7 @@ public class MD5Digest
 
     public static MD5Digest compute(byte[] toHash)
     {
-        return new 
MD5Digest(FBUtilities.threadLocalMD5Digest().digest(toHash));
+        return new MD5Digest(localMD5Digest.get().digest(toHash));
     }
 
     public static MD5Digest compute(String toHash)
@@ -82,4 +108,9 @@ public class MD5Digest
     {
         return Hex.bytesToHex(bytes);
     }
+
+    public static MessageDigest threadLocalMD5Digest()
+    {
+        return localMD5Digest.get();
+    }
 }

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/src/java/org/apache/cassandra/utils/UUIDGen.java
----------------------------------------------------------------------
diff --git a/src/java/org/apache/cassandra/utils/UUIDGen.java 
b/src/java/org/apache/cassandra/utils/UUIDGen.java
index 37f08bf..94b5832 100644
--- a/src/java/org/apache/cassandra/utils/UUIDGen.java
+++ b/src/java/org/apache/cassandra/utils/UUIDGen.java
@@ -19,8 +19,6 @@ package org.apache.cassandra.utils;
 
 import java.net.InetAddress;
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
 import java.security.SecureRandom;
 import java.util.Collection;
 import java.util.Random;
@@ -29,6 +27,8 @@ import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.TimeUnit;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
 import com.google.common.primitives.Ints;
 
 /**
@@ -379,29 +379,22 @@ public class UUIDGen
 
     private static byte[] hash(Collection<InetAddress> data)
     {
-        try
-        {
-            // Identify the host.
-            MessageDigest messageDigest = MessageDigest.getInstance("MD5");
-            for(InetAddress addr : data)
-                messageDigest.update(addr.getAddress());
-
-            // Identify the process on the load: we use both the PID and class 
loader hash.
-            long pid = NativeLibrary.getProcessID();
-            if (pid < 0)
-                pid = new Random(System.currentTimeMillis()).nextLong();
-            FBUtilities.updateWithLong(messageDigest, pid);
-
-            ClassLoader loader = UUIDGen.class.getClassLoader();
-            int loaderId = loader != null ? System.identityHashCode(loader) : 
0;
-            FBUtilities.updateWithInt(messageDigest, loaderId);
-
-            return messageDigest.digest();
-        }
-        catch (NoSuchAlgorithmException nsae)
-        {
-            throw new RuntimeException("MD5 digest algorithm is not 
available", nsae);
-        }
+        // Identify the host.
+        Hasher hasher = Hashing.md5().newHasher();
+        for(InetAddress addr : data)
+            hasher.putBytes(addr.getAddress());
+
+        // Identify the process on the load: we use both the PID and class 
loader hash.
+        long pid = NativeLibrary.getProcessID();
+        if (pid < 0)
+            pid = new Random(System.currentTimeMillis()).nextLong();
+        HashingUtils.updateWithLong(hasher, pid);
+
+        ClassLoader loader = UUIDGen.class.getClassLoader();
+        int loaderId = loader != null ? System.identityHashCode(loader) : 0;
+        HashingUtils.updateWithInt(hasher, loaderId);
+
+        return hasher.hash().asBytes();
     }
 }
 

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/test/microbench/org/apache/cassandra/test/microbench/HashingBench.java
----------------------------------------------------------------------
diff --git 
a/test/microbench/org/apache/cassandra/test/microbench/HashingBench.java 
b/test/microbench/org/apache/cassandra/test/microbench/HashingBench.java
new file mode 100644
index 0000000..b6c81a6
--- /dev/null
+++ b/test/microbench/org/apache/cassandra/test/microbench/HashingBench.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.cassandra.test.microbench;
+
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Random;
+import java.util.concurrent.TimeUnit;
+
+import com.google.common.hash.Hasher;
+import com.google.common.hash.Hashing;
+
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Measurement;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Param;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.Setup;
+import org.openjdk.jmh.annotations.State;
+import org.openjdk.jmh.annotations.Threads;
+import org.openjdk.jmh.annotations.Warmup;
+
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(TimeUnit.NANOSECONDS)
+@Warmup(iterations = 3, time = 1, timeUnit = TimeUnit.SECONDS)
+@Measurement(iterations = 5, time = 2, timeUnit = TimeUnit.SECONDS)
+@Fork(value = 1,jvmArgsAppend = { "-Xmx512M", "-Djmh.executor=CUSTOM", 
"-Djmh.executor.class=org.apache.cassandra.test.microbench.FastThreadExecutor"})
+@Threads(4) // make sure this matches the number of _physical_cores_
+@State(Scope.Benchmark)
+public class HashingBench
+{
+    private static final Random random = new Random(12345678);
+
+    private static final MessageDigest messageDigest;
+    static
+    {
+        try
+        {
+            messageDigest = MessageDigest.getInstance("MD5");
+        }
+        catch (NoSuchAlgorithmException nsae)
+        {
+            throw new RuntimeException("MD5 digest algorithm is not 
available", nsae);
+        }
+    }
+
+
+    // intentionally not on power-of-2 values
+    @Param({ "31", "131", "517", "2041" })
+    private int bufferSize;
+
+    private byte[] array;
+
+    @Setup
+    public void setup() throws NoSuchAlgorithmException
+    {
+        array = new byte[bufferSize];
+        random.nextBytes(array);
+    }
+
+    @Benchmark
+    public byte[] benchMessageDigestMD5()
+    {
+        try
+        {
+            MessageDigest clone = (MessageDigest) messageDigest.clone();
+            clone.update(array);
+            return clone.digest();
+        }
+        catch (CloneNotSupportedException e)
+        {
+            throw new RuntimeException(e);
+        }
+    }
+
+    @Benchmark
+    public byte[] benchHasherMD5()
+    {
+        Hasher md5Hasher = Hashing.md5().newHasher();
+        md5Hasher.putBytes(array);
+        return md5Hasher.hash().asBytes();
+    }
+
+    @Benchmark
+    public byte[] benchHasherMurmur3_128()
+    {
+        Hasher murmur3_128Hasher = Hashing.murmur3_128().newHasher();
+        murmur3_128Hasher.putBytes(array);
+        return murmur3_128Hasher.hash().asBytes();
+    }
+}

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/test/unit/org/apache/cassandra/cache/CacheProviderTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/cache/CacheProviderTest.java 
b/test/unit/org/apache/cassandra/cache/CacheProviderTest.java
index 1aa536e..7b8ef94 100644
--- a/test/unit/org/apache/cassandra/cache/CacheProviderTest.java
+++ b/test/unit/org/apache/cassandra/cache/CacheProviderTest.java
@@ -19,12 +19,12 @@
 package org.apache.cassandra.cache;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 
+import com.google.common.hash.Hasher;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -45,6 +45,7 @@ import org.apache.cassandra.schema.KeyspaceParams;
 import org.apache.cassandra.schema.TableId;
 import org.apache.cassandra.schema.TableMetadata;
 import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
@@ -104,18 +105,11 @@ public class CacheProviderTest
     private void assertDigests(IRowCacheEntry one, CachedBTreePartition two)
     {
         assertTrue(one instanceof CachedBTreePartition);
-        try
-        {
-            MessageDigest d1 = MessageDigest.getInstance("MD5");
-            MessageDigest d2 = MessageDigest.getInstance("MD5");
-            UnfilteredRowIterators.digest(((CachedBTreePartition) 
one).unfilteredIterator(), d1, MessagingService.current_version);
-            UnfilteredRowIterators.digest(((CachedBTreePartition) 
two).unfilteredIterator(), d2, MessagingService.current_version);
-            assertTrue(MessageDigest.isEqual(d1.digest(), d2.digest()));
-        }
-        catch (NoSuchAlgorithmException e)
-        {
-            throw new RuntimeException(e);
-        }
+        Hasher h1 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        Hasher h2 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        UnfilteredRowIterators.digest(((CachedBTreePartition) 
one).unfilteredIterator(), h1, MessagingService.current_version);
+        UnfilteredRowIterators.digest(((CachedBTreePartition) 
two).unfilteredIterator(), h2, MessagingService.current_version);
+        Assert.assertEquals(h1.hash(), h2.hash());
     }
 
     private void concurrentCase(final CachedBTreePartition partition, final 
ICache<MeasureableString, IRowCacheEntry> cache) throws InterruptedException

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/test/unit/org/apache/cassandra/db/CounterCellTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/db/CounterCellTest.java 
b/test/unit/org/apache/cassandra/db/CounterCellTest.java
index b10a9c7..b410427 100644
--- a/test/unit/org/apache/cassandra/db/CounterCellTest.java
+++ b/test/unit/org/apache/cassandra/db/CounterCellTest.java
@@ -19,15 +19,15 @@
 package org.apache.cassandra.db;
 
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
-import java.util.Arrays;
 
+import com.google.common.hash.Hasher;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
 import org.apache.cassandra.SchemaLoader;
+import org.apache.cassandra.net.MessagingService;
 import org.apache.cassandra.schema.ColumnMetadata;
 import org.apache.cassandra.db.rows.BufferCell;
 import org.apache.cassandra.db.rows.Cell;
@@ -261,8 +261,8 @@ public class CounterCellTest
         ColumnFamilyStore cfs = 
Keyspace.open(KEYSPACE1).getColumnFamilyStore(COUNTER1);
         ByteBuffer col = ByteBufferUtil.bytes("val");
 
-        MessageDigest digest1 = MessageDigest.getInstance("md5");
-        MessageDigest digest2 = MessageDigest.getInstance("md5");
+        Hasher hasher1 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        Hasher hasher2 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
 
         CounterContext.ContextState state = 
CounterContext.ContextState.allocate(0, 2, 2);
         state.writeRemote(CounterId.fromInt(1), 4L, 4L);
@@ -275,9 +275,9 @@ public class CounterCellTest
         ColumnMetadata cDef = cfs.metadata().getColumn(col);
         Cell cleared = BufferCell.live(cDef, 5, 
CounterContext.instance().clearAllLocal(state.context));
 
-        original.digest(digest1);
-        cleared.digest(digest2);
+        original.digest(hasher1);
+        cleared.digest(hasher2);
 
-        assert Arrays.equals(digest1.digest(), digest2.digest());
+        Assert.assertEquals(hasher1.hash(), hasher2.hash());
     }
 }

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/test/unit/org/apache/cassandra/db/PartitionTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/db/PartitionTest.java 
b/test/unit/org/apache/cassandra/db/PartitionTest.java
index c1796be..38fb38a 100644
--- a/test/unit/org/apache/cassandra/db/PartitionTest.java
+++ b/test/unit/org/apache/cassandra/db/PartitionTest.java
@@ -19,10 +19,10 @@
 package org.apache.cassandra.db;
 
 import java.io.IOException;
-import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
-import java.util.Arrays;
 
+import com.google.common.hash.Hasher;
+import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.apache.cassandra.schema.ColumnMetadata;
@@ -40,6 +40,7 @@ import org.apache.cassandra.Util;
 import org.apache.cassandra.schema.KeyspaceParams;
 import org.apache.cassandra.utils.ByteBufferUtil;
 import org.apache.cassandra.utils.FBUtilities;
+import org.apache.cassandra.utils.HashingUtils;
 
 import static junit.framework.Assert.assertTrue;
 import static org.junit.Assert.assertEquals;
@@ -137,28 +138,28 @@ public class PartitionTest
             ImmutableBTreePartition p1 = Util.getOnlyPartitionUnfiltered(cmd1);
             ImmutableBTreePartition p2 = Util.getOnlyPartitionUnfiltered(cmd2);
 
-            MessageDigest digest1 = MessageDigest.getInstance("MD5");
-            MessageDigest digest2 = MessageDigest.getInstance("MD5");
-            UnfilteredRowIterators.digest(p1.unfilteredIterator(), digest1, 
version);
-            UnfilteredRowIterators.digest(p2.unfilteredIterator(), digest2, 
version);
-            assertFalse(Arrays.equals(digest1.digest(), digest2.digest()));
+            Hasher hasher1 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+            Hasher hasher2 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+            UnfilteredRowIterators.digest(p1.unfilteredIterator(), hasher1, 
version);
+            UnfilteredRowIterators.digest(p2.unfilteredIterator(), hasher2, 
version);
+            Assert.assertFalse(hasher1.hash().equals(hasher2.hash()));
 
             p1 = Util.getOnlyPartitionUnfiltered(Util.cmd(cfs, 
"key2").build());
             p2 = Util.getOnlyPartitionUnfiltered(Util.cmd(cfs, 
"key2").build());
-            digest1 = MessageDigest.getInstance("MD5");
-            digest2 = MessageDigest.getInstance("MD5");
-            UnfilteredRowIterators.digest(p1.unfilteredIterator(), digest1, 
version);
-            UnfilteredRowIterators.digest(p2.unfilteredIterator(), digest2, 
version);
-            assertTrue(Arrays.equals(digest1.digest(), digest2.digest()));
+            hasher1 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+            hasher2 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+            UnfilteredRowIterators.digest(p1.unfilteredIterator(), hasher1, 
version);
+            UnfilteredRowIterators.digest(p2.unfilteredIterator(), hasher2, 
version);
+            Assert.assertEquals(hasher1.hash(), hasher2.hash());
 
             p1 = Util.getOnlyPartitionUnfiltered(Util.cmd(cfs, 
"key2").build());
             RowUpdateBuilder.deleteRow(cfs.metadata(), 6, "key2", 
"c").applyUnsafe();
             p2 = Util.getOnlyPartitionUnfiltered(Util.cmd(cfs, 
"key2").build());
-            digest1 = MessageDigest.getInstance("MD5");
-            digest2 = MessageDigest.getInstance("MD5");
-            UnfilteredRowIterators.digest(p1.unfilteredIterator(), digest1, 
version);
-            UnfilteredRowIterators.digest(p2.unfilteredIterator(), digest2, 
version);
-            assertFalse(Arrays.equals(digest1.digest(), digest2.digest()));
+            hasher1 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+            hasher2 = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+            UnfilteredRowIterators.digest(p1.unfilteredIterator(), hasher1, 
version);
+            UnfilteredRowIterators.digest(p2.unfilteredIterator(), hasher2, 
version);
+            Assert.assertFalse(hasher1.hash().equals(hasher2.hash()));
         }
         finally
         {

http://git-wip-us.apache.org/repos/asf/cassandra/blob/30ed83d9/test/unit/org/apache/cassandra/utils/HashingUtilsTest.java
----------------------------------------------------------------------
diff --git a/test/unit/org/apache/cassandra/utils/HashingUtilsTest.java 
b/test/unit/org/apache/cassandra/utils/HashingUtilsTest.java
new file mode 100644
index 0000000..3988903
--- /dev/null
+++ b/test/unit/org/apache/cassandra/utils/HashingUtilsTest.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.cassandra.utils;
+
+import java.nio.ByteBuffer;
+
+import com.google.common.hash.Hasher;
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HashingUtilsTest
+{
+    private static final Logger logger = 
LoggerFactory.getLogger(HashingUtilsTest.class);
+
+    @Test
+    public void hashEmptyBytes() throws Exception {
+        Hasher hasher = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        HashingUtils.updateBytes(hasher, ByteBuffer.wrap(new byte[]{}));
+        String md5HashInHexOfEmptyByteArray = 
ByteBufferUtil.bytesToHex(ByteBuffer.wrap(hasher.hash().asBytes()));
+        Assert.assertEquals("d41d8cd98f00b204e9800998ecf8427e", 
md5HashInHexOfEmptyByteArray);
+    }
+
+    @Test
+    public void hashBytesFromTinyDirectByteBuffer() throws Exception {
+        Hasher hasher = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        ByteBuffer directBuf = ByteBuffer.allocateDirect(8);
+        directBuf.putLong(5L);
+        directBuf.position(0);
+        HashingUtils.updateBytes(hasher, directBuf);
+
+        String md5HashInHexOfDirectByteBuffer = 
ByteBufferUtil.bytesToHex(ByteBuffer.wrap(hasher.hash().asBytes()));
+        Assert.assertEquals("aaa07454fa93ed2d37b4c5da9f2f87fd", 
md5HashInHexOfDirectByteBuffer);
+    }
+
+    @Test
+    public void hashBytesFromLargerDirectByteBuffer() throws Exception {
+        Hasher hasher = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        ByteBuffer directBuf = ByteBuffer.allocateDirect(1024);
+        for (int i = 0; i < 100; i++) {
+            directBuf.putInt(i);
+        }
+        directBuf.position(0);
+        HashingUtils.updateBytes(hasher, directBuf);
+
+        String md5HashInHexOfDirectByteBuffer = 
ByteBufferUtil.bytesToHex(ByteBuffer.wrap(hasher.hash().asBytes()));
+        Assert.assertEquals("daf10ea8894783b1b2618309494cde21", 
md5HashInHexOfDirectByteBuffer);
+    }
+
+    @Test
+    public void hashBytesFromTinyOnHeapByteBuffer() throws Exception {
+        Hasher hasher = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        ByteBuffer onHeapBuf = ByteBuffer.allocate(8);
+        onHeapBuf.putLong(5L);
+        onHeapBuf.position(0);
+        HashingUtils.updateBytes(hasher, onHeapBuf);
+
+        String md5HashInHexOfDirectByteBuffer = 
ByteBufferUtil.bytesToHex(ByteBuffer.wrap(hasher.hash().asBytes()));
+        Assert.assertEquals("aaa07454fa93ed2d37b4c5da9f2f87fd", 
md5HashInHexOfDirectByteBuffer);
+    }
+
+    @Test
+    public void hashBytesFromLargerOnHeapByteBuffer() throws Exception {
+        Hasher hasher = HashingUtils.CURRENT_HASH_FUNCTION.newHasher();
+        ByteBuffer onHeapBuf = ByteBuffer.allocate(1024);
+        for (int i = 0; i < 100; i++) {
+            onHeapBuf.putInt(i);
+        }
+        onHeapBuf.position(0);
+        HashingUtils.updateBytes(hasher, onHeapBuf);
+
+        String md5HashInHexOfDirectByteBuffer = 
ByteBufferUtil.bytesToHex(ByteBuffer.wrap(hasher.hash().asBytes()));
+        Assert.assertEquals("daf10ea8894783b1b2618309494cde21", 
md5HashInHexOfDirectByteBuffer);
+    }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@cassandra.apache.org
For additional commands, e-mail: commits-h...@cassandra.apache.org

Reply via email to