This is an automated email from the ASF dual-hosted git repository.

ifesdjeen pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/cassandra.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 52396ec1fc Simplify and deduplicate Harry ModelChecker
52396ec1fc is described below

commit 52396ec1fc4d08a988714704ff0cbb796ee06f7f
Author: Alex Petrov <oleksandr.pet...@gmail.com>
AuthorDate: Fri Jul 19 14:44:31 2024 +0200

    Simplify and deduplicate Harry ModelChecker
    
    Patch by Alex Petrov; reviewed by Caleb Rackliffe for CASSANDRA-19788.
---
 .../cassandra/concurrent/InfiniteLoopExecutor.java |  13 +-
 .../test/log/MetadataChangeSimulationTest.java     |  10 +-
 .../distributed/test/log/ModelChecker.java         | 307 ---------------------
 .../test/log/PlacementSimulatorTest.java           |   7 +-
 .../cassandra/distributed/test/log/RngUtils.java   | 106 -------
 .../harry/examples/RangeTombstoneBurnTest.java     |  55 ++--
 .../fuzz/harry/examples/RepairBurnTest.java        |  11 +-
 .../fuzz/harry/integration/ddl/SchemaGenTest.java  |   5 +-
 .../dsl/HistoryBuilderIntegrationTest.java         |  92 +++---
 .../HistoryBuilderOverridesIntegrationTest.java    |   8 +-
 .../model/QuiescentCheckerIntegrationTest.java     |  24 +-
 .../model/ReconcilerIntegrationTest.java           |   2 +-
 .../model/reconciler/SimpleReconcilerTest.java     |   2 +-
 .../cassandra/fuzz/sai/SingleNodeSAITest.java      |   2 +-
 .../cassandra/harry/checker/ModelChecker.java      | 287 +++++++++++++++----
 .../apache/cassandra/harry/dsl/HistoryBuilder.java |  18 +-
 .../harry/dsl/SingleOperationVisitBuilder.java     |   2 +-
 .../cassandra/harry/model/QuiescentChecker.java    |  36 ++-
 .../model/QuiescentLocalStateCheckerBase.java      |   2 +-
 .../apache/cassandra/harry/model/SelectHelper.java |  14 +-
 .../harry/model/reconciler/PartitionState.java     |   2 +-
 .../harry/model/reconciler/Reconciler.java         |  31 ++-
 .../apache/cassandra/harry/operations/Query.java   |  80 +++++-
 .../cassandra/harry/operations/QueryGenerator.java |   2 +-
 .../harry/visitors/AllPartitionsValidator.java     |  14 +-
 .../harry/visitors/CorruptingVisitor.java          |   2 +-
 .../cassandra/harry/visitors/SingleValidator.java  |   2 +-
 .../simulator/test/HarrySimulatorTest.java         |   2 +-
 .../tcm/sequences/ProgressBarrierTest.java         |   4 +-
 29 files changed, 496 insertions(+), 646 deletions(-)

diff --git a/src/java/org/apache/cassandra/concurrent/InfiniteLoopExecutor.java 
b/src/java/org/apache/cassandra/concurrent/InfiniteLoopExecutor.java
index 51c5f9f69e..ac10a70c30 100644
--- a/src/java/org/apache/cassandra/concurrent/InfiniteLoopExecutor.java
+++ b/src/java/org/apache/cassandra/concurrent/InfiniteLoopExecutor.java
@@ -149,10 +149,21 @@ public class InfiniteLoopExecutor implements Interruptible
         interruptHandler.accept(thread);
     }
 
+    public void shutdownGracefully()
+    {
+        stateUpdater.updateAndGet(this, cur -> cur != TERMINATED && cur != 
SHUTTING_DOWN_NOW ? SHUTTING_DOWN : cur);
+    }
+
     public void shutdown()
+    {
+        shutdown(true);
+    }
+
+    public void shutdown(boolean interrupt)
     {
         stateUpdater.updateAndGet(this, cur -> cur != TERMINATED && cur != 
SHUTTING_DOWN_NOW ? SHUTTING_DOWN : cur);
-        interruptHandler.accept(thread);
+        if (interrupt)
+            interruptHandler.accept(thread);
     }
 
     public Object shutdownNow()
diff --git 
a/test/distributed/org/apache/cassandra/distributed/test/log/MetadataChangeSimulationTest.java
 
b/test/distributed/org/apache/cassandra/distributed/test/log/MetadataChangeSimulationTest.java
index 26d1cff7ac..d5c25fe6d2 100644
--- 
a/test/distributed/org/apache/cassandra/distributed/test/log/MetadataChangeSimulationTest.java
+++ 
b/test/distributed/org/apache/cassandra/distributed/test/log/MetadataChangeSimulationTest.java
@@ -35,7 +35,6 @@ import java.util.function.Supplier;
 import java.util.stream.Collectors;
 
 import com.google.common.collect.Sets;
-import org.apache.cassandra.harry.sut.TokenPlacementModel.DCReplicas;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -48,7 +47,10 @@ import org.apache.cassandra.dht.Murmur3Partitioner;
 import org.apache.cassandra.dht.Murmur3Partitioner.LongToken;
 import org.apache.cassandra.dht.Range;
 import org.apache.cassandra.dht.Token;
+import org.apache.cassandra.harry.checker.ModelChecker;
+import org.apache.cassandra.harry.gen.EntropySource;
 import org.apache.cassandra.harry.sut.TokenPlacementModel;
+import org.apache.cassandra.harry.sut.TokenPlacementModel.DCReplicas;
 import org.apache.cassandra.locator.CMSPlacementStrategy;
 import org.apache.cassandra.locator.EndpointsForRange;
 import org.apache.cassandra.locator.InetAddressAndPort;
@@ -646,17 +648,17 @@ public class MetadataChangeSimulationTest extends 
CMSTestBase
         return pair(newState, node);
     }
 
-    private Node getRemovalCandidate(ModelState state, 
ModelChecker.EntropySource entropySource)
+    private Node getRemovalCandidate(ModelState state, EntropySource 
entropySource)
     {
         return getCandidate(state, entropySource);
     }
 
-    private Node getMoveCandidate(ModelState state, ModelChecker.EntropySource 
entropySource)
+    private Node getMoveCandidate(ModelState state, EntropySource 
entropySource)
     {
         return getCandidate(state, entropySource);
     }
 
-    private Node getCandidate(ModelState modelState, 
ModelChecker.EntropySource entropySource)
+    private Node getCandidate(ModelState modelState, EntropySource 
entropySource)
     {
         List<String> dcs = new 
ArrayList<>(modelState.simulatedPlacements.rf.asMap().keySet());
         while (!dcs.isEmpty())
diff --git 
a/test/distributed/org/apache/cassandra/distributed/test/log/ModelChecker.java 
b/test/distributed/org/apache/cassandra/distributed/test/log/ModelChecker.java
deleted file mode 100644
index a601b415ce..0000000000
--- 
a/test/distributed/org/apache/cassandra/distributed/test/log/ModelChecker.java
+++ /dev/null
@@ -1,307 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.cassandra.distributed.test.log;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-import java.util.function.BiConsumer;
-
-public class ModelChecker<STATE, SUT>
-{
-    private final List<StepExecutor<STATE, SUT>> steps;
-    private final List<Precondition<STATE, SUT>> invariants;
-    private Precondition<STATE, SUT> exitCondition;
-    private BiConsumer<STATE, SUT> beforeAll;
-    private Pair<STATE, SUT> init;
-
-    public ModelChecker()
-    {
-        steps = new ArrayList<>();
-        invariants = new ArrayList<>();
-    }
-
-    public void run() throws Throwable
-    {
-        run(0, Integer.MAX_VALUE);
-    }
-
-    public void run(int minSteps, int maxSteps) throws Throwable
-    {
-        assert exitCondition != null : "Exit condition is not specified";
-        assert init != null : "Initial condition is not specified";
-
-        Ref<Pair<STATE, SUT>> state = new Ref<>(init);
-        EntropySource entropySource = new FakeEntropySource(new Random(88));
-        if (beforeAll != null)
-            beforeAll.accept(state.get().l, state.get().r);
-
-        for (int i = 0; i < maxSteps; i++)
-        {
-            if (i > minSteps && exitCondition.test(state.get()))
-                return;
-
-            // TODO: add randomisation / probability for triggering a specific 
step
-            steps.get(entropySource.nextInt(steps.size())).execute(state, 
entropySource.derive());
-            for (Precondition<STATE, SUT> invariant : invariants)
-                invariant.test(state.get());
-        }
-    }
-    public ModelChecker<STATE, SUT> init(STATE state, SUT sut)
-    {
-        this.init = new Pair<>(state, sut);
-        return this;
-    }
-
-    public ModelChecker<STATE, SUT> beforeAll(BiConsumer<STATE, SUT> 
precondition)
-    {
-        this.beforeAll = precondition;
-        return this;
-    }
-
-    public ModelChecker<STATE, SUT> exitCondition(Precondition<STATE, SUT> 
precondition)
-    {
-        this.exitCondition = precondition;
-        return this;
-    }
-
-    public ModelChecker<STATE, SUT> step(Precondition<STATE, SUT> 
precondition, Step<STATE, SUT> step)
-    {
-        steps.add((ref, entropySource) -> {
-            ref.map(state -> {
-                if (!precondition.test(state))
-                    return state;
-
-                Pair<STATE, SUT> next = step.next(state.l, state.r, 
entropySource);
-                if (next == Pair.unchanged())
-                    return state;
-                else
-                    return next;
-            });
-        });
-
-        return this;
-    }
-
-    public ModelChecker<STATE, SUT> invariant(Precondition<STATE, SUT> 
invariant)
-    {
-        invariants.add(invariant);
-        return this;
-    }
-
-    public ModelChecker<STATE, SUT> step(Step<STATE, SUT> step)
-    {
-        return step(Precondition.alwaysTrue(), step);
-    }
-
-    public ModelChecker<STATE, SUT> step(StatePrecondition<STATE> 
precondition, ThrowingFunction<STATE, STATE> step)
-    {
-        steps.add((ref, entropySource) -> {
-            ref.map(state -> {
-                if (!precondition.test(state.l))
-                    return state;
-
-                STATE newState = step.apply(state.l);
-                if (state.l == newState)
-                    return state;
-
-                return state.next(newState);
-            });
-        });
-
-        return this;
-    }
-
-    public ModelChecker<STATE, SUT> step(StatePrecondition<STATE> 
precondition, ThrowingBiFunction<STATE, SUT, STATE> step)
-    {
-        steps.add((ref, entropySource) -> {
-            ref.map(state -> {
-                if (!precondition.test(state.l))
-                    return state;
-
-                STATE newState = step.apply(state.l, state.r);
-                if (state.l == newState)
-                    return state;
-
-                return state.next(newState);
-            });
-        });
-
-        return this;
-    }
-
-    public ModelChecker<STATE, SUT> step(ThrowingFunction<STATE, STATE> step)
-    {
-        return step((t, sut, entropySource) -> {
-            return new Pair<>(step.apply(t), sut);
-        });
-    }
-
-    static interface StepExecutor<STATE, SUT>
-    {
-        void execute(Ref<Pair<STATE, SUT>> state, EntropySource entropySource) 
throws Throwable;
-    }
-
-    public static interface StatePrecondition<STATE>
-    {
-        boolean test(STATE state) throws Throwable;
-    }
-
-    public static interface Precondition<STATE, SUT>
-    {
-        boolean test(STATE state, SUT sut) throws Throwable;
-
-        default boolean test(Pair<STATE, SUT> state) throws Throwable
-        {
-            return test(state.l, state.r);
-        }
-
-        public static <STATE, SUT> Precondition<STATE, SUT> alwaysTrue()
-        {
-            return (a,b) -> true;
-        }
-    }
-
-    public static interface Step<STATE, SUT>
-    {
-        Pair<STATE, SUT> next(STATE t, SUT sut, EntropySource entropySource) 
throws Throwable;
-    }
-
-    public static interface ThrowingFunction<I, O>
-    {
-        O apply(I t) throws Throwable;
-    }
-
-    public static interface ThrowingBiFunction<I1, I2, O>
-    {
-        O apply(I1 t1, I2 t2) throws Throwable;
-    }
-
-    // Borrowed from Harry
-    public static interface EntropySource
-    {
-        long next();
-        // We derive from entropy source here to avoid letting the step change 
state for other states
-        // For example, if you start drawing more entropy bits from one of the 
steps, but won't change
-        // other steps, their states won't change either.
-        EntropySource derive();
-
-        Random seededRandom();
-        default long[] next(int n)
-        {
-            long[] next = new long[n];
-            for (int i = 0; i < n; i++)
-                next[i] = next();
-            return next;
-        }
-
-        default int nextInt()
-        {
-            return RngUtils.asInt(next());
-        }
-
-        default int nextInt(int max)
-        {
-            return RngUtils.asInt(next(), max);
-        }
-
-        default int nextInt(int min, int max)
-        {
-            return RngUtils.asInt(next(), min, max);
-        }
-
-        default boolean nextBoolean()
-        {
-            return RngUtils.asBoolean(next());
-        }
-    }
-
-    public static class FakeEntropySource implements EntropySource
-    {
-        private final Random rng;
-
-        public FakeEntropySource(Random rng)
-        {
-            this.rng = rng;
-        }
-
-        public long next()
-        {
-            return rng.nextLong();
-        }
-
-        public EntropySource derive()
-        {
-            return new FakeEntropySource(new Random(rng.nextLong()));
-        }
-
-        public Random seededRandom()
-        {
-            return new Random(rng.nextLong());
-        }
-    }
-
-    public static class Ref<T>
-    {
-        public T ref;
-
-        public Ref(T init)
-        {
-            this.ref = init;
-        }
-
-        public T get()
-        {
-            return ref;
-        }
-
-        public void set(T v)
-        {
-            this.ref = v;
-        }
-
-        public void map(ThrowingFunction<T, T> fn) throws Throwable
-        {
-            this.ref = fn.apply(ref);
-        }
-    }
-
-    public static class Pair<L, R>
-    {
-        private static Pair<?,?> UNCHANGED = new Pair<>(null,null);
-        public static <L, R> Pair<L, R> unchanged()
-        {
-            return (Pair<L, R>) UNCHANGED;
-        }
-
-        public final L l;
-        public final R r;
-
-        public Pair(L l, R r)
-        {
-            this.l = l;
-            this.r = r;
-        }
-
-        public Pair<L, R> next(L state)
-        {
-            return new Pair<>(state, this.r);
-        }
-    }
-}
diff --git 
a/test/distributed/org/apache/cassandra/distributed/test/log/PlacementSimulatorTest.java
 
b/test/distributed/org/apache/cassandra/distributed/test/log/PlacementSimulatorTest.java
index 386af0d3a0..2ea853bf01 100644
--- 
a/test/distributed/org/apache/cassandra/distributed/test/log/PlacementSimulatorTest.java
+++ 
b/test/distributed/org/apache/cassandra/distributed/test/log/PlacementSimulatorTest.java
@@ -29,10 +29,11 @@ import java.util.Random;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Supplier;
 
-import org.apache.cassandra.harry.sut.TokenPlacementModel.Replica;
 import org.junit.Test;
 
+import org.apache.cassandra.harry.checker.ModelChecker;
 import org.apache.cassandra.harry.sut.TokenPlacementModel;
+import org.apache.cassandra.harry.sut.TokenPlacementModel.Replica;
 
 import static 
org.apache.cassandra.distributed.test.log.PlacementSimulator.SimulatedPlacements;
 import static 
org.apache.cassandra.distributed.test.log.PlacementSimulator.Transformations;
@@ -283,7 +284,7 @@ public class PlacementSimulatorTest
                               if (operationCounter.getAndIncrement() % 
rf.total() == 1)
                               {
                                   // randomly schedule either decommission or 
replacement of an existing node
-                                  Node toRemove = 
state.nodes.get(rng.nextInt(0, state.nodes.size() - 1));
+                                  Node toRemove = 
state.nodes.get(rng.nextInt(0, state.nodes.size()));
                                   state = state.withStashed(rng.nextBoolean()
                                                             ? replace(state, 
toRemove, factory.make(addressCounter.incrementAndGet(), 1, 
1).overrideToken(toRemove.token()))
                                                             : leave(state, 
toRemove));
@@ -299,7 +300,7 @@ public class PlacementSimulatorTest
                           })
                     .step((state, sut) -> !state.stashedStates.isEmpty(),
                           (state, sut, rng) -> {
-                              int idx = rng.nextInt(0, 
state.stashedStates.size() - 1);
+                              int idx = rng.nextInt(0, 
state.stashedStates.size());
                               state = 
state.stashedStates.get(idx).advance(state);
                               return new ModelChecker.Pair<>(state, sut);
                           })
diff --git 
a/test/distributed/org/apache/cassandra/distributed/test/log/RngUtils.java 
b/test/distributed/org/apache/cassandra/distributed/test/log/RngUtils.java
deleted file mode 100644
index d2cf69bd3e..0000000000
--- a/test/distributed/org/apache/cassandra/distributed/test/log/RngUtils.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.cassandra.distributed.test.log;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-//TODO: this is borrowed from Harry, temporarily
-public class RngUtils
-{
-    private static final Logger logger = 
LoggerFactory.getLogger(RngUtils.class);
-
-    private static final long CONSTANT = 0x2545F4914F6CDD1DL;
-    public static long next(long input)
-    {
-        if (input == 0)
-            return next(CONSTANT);
-
-        return xorshift64star(input);
-    }
-
-    public static long xorshift64star(long input)
-    {
-        input ^= input >> 12;
-        input ^= input << 25; // b
-        input ^= input >> 27; // c
-        return input * CONSTANT;
-    }
-
-    public static long[] next(long current, int n)
-    {
-        long[] next = new long[n];
-        for (int i = 0; i < n; i++)
-        {
-            current = next(current);
-            next[i] = current;
-        }
-        return next;
-    }
-
-    public static byte[] asBytes(long current)
-    {
-        byte[] bytes = new byte[Long.BYTES];
-        for (int i = 0; i < Long.BYTES; i++)
-        {
-            bytes[i] = (byte) (current & 0xFF);
-            current >>= current;
-        }
-        return bytes;
-    }
-
-    public static byte asByte(long current)
-    {
-        return (byte) current;
-    }
-
-    public static int asInt(long current)
-    {
-        return (int) current;
-    }
-
-    // TODO: this needs some improvement
-    public static int asInt(long current, int max)
-    {
-        return Math.abs((int) current % max);
-    }
-
-    // Generate a value in [min, max] range: from min _inclusive_ to max 
_inclusive_.
-    public static int asInt(long current, int min, int max)
-    {
-        if (min == max)
-            return min;
-        return min + asInt(current, max - min);
-    }
-
-    public static boolean asBoolean(long current)
-    {
-        return (current & 1) == 1;
-    }
-
-    public static float asFloat(long current)
-    {
-        return Float.intBitsToFloat((int) current);
-    }
-
-    public static double asDouble(long current)
-    {
-        return Double.longBitsToDouble(current);
-    }
-}
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/examples/RangeTombstoneBurnTest.java
 
b/test/distributed/org/apache/cassandra/fuzz/harry/examples/RangeTombstoneBurnTest.java
index b67813b0a5..ddddb57c09 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/examples/RangeTombstoneBurnTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/examples/RangeTombstoneBurnTest.java
@@ -65,7 +65,7 @@ public class RangeTombstoneBurnTest extends 
IntegrationTestBase
 
             for (int iteration = 0; iteration < ITERATIONS; iteration++)
             {
-                ModelChecker<ReplayingHistoryBuilder> modelChecker = new 
ModelChecker<>();
+                ModelChecker<ReplayingHistoryBuilder, Void> modelChecker = new 
ModelChecker<>();
                 EntropySource entropySource = new 
JdkRandomEntropySource(iteration);
 
                 int maxPartitionSize = entropySource.nextInt(1, 1 << 
entropySource.nextInt(5, 11));
@@ -84,52 +84,45 @@ public class RangeTombstoneBurnTest extends 
IntegrationTestBase
 
                 DataTracker tracker = new DefaultDataTracker();
                 modelChecker.init(new ReplayingHistoryBuilder(seed, 
maxPartitionSize, STEPS_PER_ITERATION, new DefaultDataTracker(), sut, schema, 
rf, SystemUnderTest.ConsistencyLevel.ALL))
-                            .step((history) -> {
-                                      int rowIdx = 
entropySource.nextInt(maxPartitionSize);
-                                      int partitionIdx = 
partitions[entropySource.nextInt(partitions.length)];
+                            .step((history, rng) -> {
+                                      int rowIdx = 
rng.nextInt(maxPartitionSize);
+                                      int partitionIdx = 
partitions[rng.nextInt(partitions.length)];
                                       
history.visitPartition(partitionIdx).insert(rowIdx);
-                                      return history;
                                   })
-                            .step((history) -> entropySource.nextFloat() > 
deleteRowChance,
-                                  (history) -> {
-                                      int partitionIdx = 
partitions[entropySource.nextInt(partitions.length)];
+                            .step((history, rng) -> rng.nextFloat() > 
deleteRowChance,
+                                  (history, rng) -> {
+                                      int partitionIdx = 
partitions[rng.nextInt(partitions.length)];
                                       
history.visitPartition(partitionIdx).deleteRow();
-                                      return history;
                                   })
-                            .step((history) -> entropySource.nextFloat() > 
deleteRowChance,
-                                  (history) -> {
-                                      int partitionIdx = 
partitions[entropySource.nextInt(partitions.length)];
+                            .step((history, rng) -> rng.nextFloat() > 
deleteRowChance,
+                                  (history, rng) -> {
+                                      int partitionIdx = 
partitions[rng.nextInt(partitions.length)];
                                       
history.visitPartition(partitionIdx).deleteColumns();
-                                      return history;
                                   })
-                            .step((history) -> entropySource.nextFloat() > 
deletePartitionChance,
-                                  (history) -> {
-                                      int partitionIdx = 
partitions[entropySource.nextInt(partitions.length)];
+                            .step((history, rng) -> rng.nextFloat() > 
deletePartitionChance,
+                                  (history, rng) -> {
+                                      int partitionIdx = 
partitions[rng.nextInt(partitions.length)];
                                       
history.visitPartition(partitionIdx).deletePartition();
-                                      return history;
                                   })
-                            .step((history) -> entropySource.nextFloat() > 
flushChance,
-                                  (history) -> {
+                            .step((history, rng) -> rng.nextFloat() > 
flushChance,
+                                  (history, rng) -> {
                                       cluster.get(1).nodetool("flush", 
schema.keyspace, schema.table);
                                       flushes.incrementAndGet();
-                                      return history;
                                   })
-                            .step((history) -> entropySource.nextFloat() > 
deleteRangeChance,
-                                  (history) -> {
-                                      int partitionIdx = 
partitions[entropySource.nextInt(partitions.length)];
+                            .step((history, rng) -> rng.nextFloat() > 
deleteRangeChance,
+                                  (history, rng) -> {
+                                      int partitionIdx = 
partitions[rng.nextInt(partitions.length)];
                                       
history.visitPartition(partitionIdx).deleteRowSlice();
-                                      return history;
                                   })
-                            .step((history) -> entropySource.nextFloat() > 
deleteRangeChance,
-                                  (history) -> {
-                                      int row1 = 
entropySource.nextInt(maxPartitionSize);
-                                      int row2 = 
entropySource.nextInt(maxPartitionSize);
-                                      int partitionIdx = 
partitions[entropySource.nextInt(partitions.length)];
+                            .step((history, rng) -> rng.nextFloat() > 
deleteRangeChance,
+                                  (history, rng) -> {
+                                      int row1 = rng.nextInt(maxPartitionSize);
+                                      int row2 = rng.nextInt(maxPartitionSize);
+                                      int partitionIdx = 
partitions[rng.nextInt(partitions.length)];
                                       
history.visitPartition(partitionIdx).deleteRowRange(Math.min(row1, row2),
                                                                                
           Math.max(row1, row2),
                                                                                
           entropySource.nextBoolean(),
                                                                                
           entropySource.nextBoolean());
-                                      return history;
                                   })
                             .afterAll((history) -> {
                                 // Sanity check
@@ -137,7 +130,7 @@ public class RangeTombstoneBurnTest extends 
IntegrationTestBase
                                                  partitions);
                                 history.validate(partitions);
                             })
-                            .run(STEPS_PER_ITERATION, seed);
+                            .run(STEPS_PER_ITERATION, seed, entropySource);
             }
         }
     }
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/examples/RepairBurnTest.java 
b/test/distributed/org/apache/cassandra/fuzz/harry/examples/RepairBurnTest.java
index 4092d6a02f..19f2997d69 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/examples/RepairBurnTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/examples/RepairBurnTest.java
@@ -82,8 +82,7 @@ public class RepairBurnTest extends IntegrationTestBase
         sut.schemaChange("CREATE KEYSPACE " + schema.keyspace + " WITH 
replication = {'class': 'SimpleStrategy', 'replication_factor': 3};");
         sut.schemaChange(schema.compile().cql());
 
-        ModelChecker<HistoryBuilder> modelChecker = new ModelChecker<>();
-        JdkRandomEntropySource rng = new JdkRandomEntropySource(new 
Random(seed));
+        ModelChecker<HistoryBuilder, Void> modelChecker = new ModelChecker<>();
         DataTracker tracker = new DefaultDataTracker();
 
         TokenPlacementModel.ReplicationFactor rf = new 
TokenPlacementModel.SimpleReplicationFactor(3);
@@ -92,7 +91,7 @@ public class RepairBurnTest extends IntegrationTestBase
         int partitions = 1000;
 
         modelChecker.init(new HistoryBuilder(seed, maxPartitionSize, 10, 
schema, rf))
-                    .step((history) -> {
+                    .step((history, rng) -> {
                         history.visitPartition(rng.nextInt(partitions),
                                                (ps) -> {
                                                    Object[][] clusterings = 
new Object[maxPartitionSize][];
@@ -113,7 +112,7 @@ public class RepairBurnTest extends IntegrationTestBase
                                                })
                                .insert(rng.nextInt(maxPartitionSize));
                     })
-                    .step((history) -> {
+                    .step((history, rng) -> {
                         history.visitPartition(rng.nextInt(partitions))
                                .deleteRow(rng.nextInt(maxPartitionSize));
                     })
@@ -129,10 +128,10 @@ public class RepairBurnTest extends IntegrationTestBase
                         Model model = history.quiescentLocalChecker(tracker, 
sut);
 
                         for (Long pd : history.visitedPds())
-                            
model.validate(Query.selectPartition(history.schema(), pd, false));
+                            
model.validate(Query.selectAllColumns(history.schema(), pd, false));
 
                         return true;
                     })
-                    .run(Integer.MAX_VALUE, seed);
+                    .run(Integer.MAX_VALUE, seed, new 
JdkRandomEntropySource(new Random(seed)));
     }
 }
\ No newline at end of file
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/ddl/SchemaGenTest.java
 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/ddl/SchemaGenTest.java
index 8334ae4911..c39421fb84 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/ddl/SchemaGenTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/ddl/SchemaGenTest.java
@@ -61,7 +61,6 @@ public class SchemaGenTest extends CQLTester
                                                                           
.staticColumnCount(0, 10)
                                                                           
.generator();
 
-
         TestRunner.test(gen,
                         schemaDefinition -> {
                             String tableDef = schemaDefinition.compile().cql();
@@ -69,9 +68,9 @@ public class SchemaGenTest extends CQLTester
 
                             try
                             {
-                                CompiledStatement statement = 
Query.selectPartition(schemaDefinition, 1, false).toSelectStatement();
+                                CompiledStatement statement = 
Query.selectAllColumns(schemaDefinition, 1, false).toSelectStatement();
                                 execute(statement.cql(), statement.bindings());
-                                statement = 
Query.selectPartition(schemaDefinition, 1, true).toSelectStatement();
+                                statement = 
Query.selectAllColumns(schemaDefinition, 1, true).toSelectStatement();
                                 execute(statement.cql(), statement.bindings());
                             }
                             catch (Throwable t)
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderIntegrationTest.java
 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderIntegrationTest.java
index e64835ac6f..bd0d2533a9 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderIntegrationTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderIntegrationTest.java
@@ -56,44 +56,43 @@ public class HistoryBuilderIntegrationTest extends 
IntegrationTestBase
             beforeEach();
             sut.schemaChange(schema.compile().cql());
 
-            ModelChecker<SingleOperationBuilder> modelChecker = new 
ModelChecker<>();
-            JdkRandomEntropySource rng = new JdkRandomEntropySource(new 
Random(SEED));
+            ModelChecker<SingleOperationBuilder, Void> modelChecker = new 
ModelChecker<>();
 
             TokenPlacementModel.ReplicationFactor rf = new 
TokenPlacementModel.SimpleReplicationFactor(1);
 
             int maxPartitionSize = 100;
             modelChecker.init(new HistoryBuilder(SEED, maxPartitionSize, 10, 
schema, rf))
                         .step((history) -> {
-                            return history.insert();
+                            history.insert();
                         })
-                        .step((history) -> {
-                            return 
history.insert(rng.nextInt(maxPartitionSize));
+                        .step((history, rng) -> {
+                            history.insert(rng.nextInt(maxPartitionSize));
                         })
-                        .step((history) -> {
+                        .step((history, rng) -> {
                             int row = rng.nextInt(maxPartitionSize);
                             long[] vIdxs = new 
long[schema.regularColumns.size()];
                             for (int j = 0; j < schema.regularColumns.size(); 
j++)
                                 vIdxs[j] = rng.nextInt(20);
 
-                            return history.insert(row, vIdxs);
+                            history.insert(row, vIdxs);
                         })
                         .step((history) -> {
-                            return history.deleteRow();
+                            history.deleteRow();
                         })
-                        .step((history) -> {
-                            return 
history.deleteRow(rng.nextInt(maxPartitionSize));
+                        .step((history, rng) -> {
+                            history.deleteRow(rng.nextInt(maxPartitionSize));
                         })
                         .step(SingleOperationBuilder::deletePartition)
                         .step(SingleOperationBuilder::deleteColumns)
                         .step(SingleOperationBuilder::deleteRowSlice)
                         .step((history) -> {
-                            return history.deleteRowRange();
+                            history.deleteRowRange();
                         })
-                        .step((history) -> {
-                            return 
history.deleteRowRange(rng.nextInt(maxPartitionSize),
-                                                          
rng.nextInt(maxPartitionSize),
-                                                          rng.nextBoolean(),
-                                                          rng.nextBoolean());
+                        .step((history, rng) -> {
+                            
history.deleteRowRange(rng.nextInt(maxPartitionSize),
+                                                   
rng.nextInt(maxPartitionSize),
+                                                   rng.nextBoolean(),
+                                                   rng.nextBoolean());
                         })
                         .step((history) -> history instanceof HistoryBuilder,
                               (history) -> ((HistoryBuilder) 
history).beginBatch())
@@ -113,11 +112,11 @@ public class HistoryBuilderIntegrationTest extends 
IntegrationTestBase
                             Model model = 
historyBuilder.quiescentChecker(tracker, sut);
 
                             for (Long pd : historyBuilder.visitedPds())
-                                
model.validate(Query.selectPartition(historyBuilder.schema(), pd,false));
+                                
model.validate(Query.selectAllColumns(historyBuilder.schema(), pd, false));
 
                             return true;
                         })
-                        .run(STEPS_PER_ITERATION, SEED);
+                        .run(STEPS_PER_ITERATION, SEED, new 
JdkRandomEntropySource(new Random(SEED)));
         }
     }
 
@@ -132,49 +131,24 @@ public class HistoryBuilderIntegrationTest extends 
IntegrationTestBase
             beforeEach();
             sut.schemaChange(schema.compile().cql());
 
-            ModelChecker<HistoryBuilder> modelChecker = new ModelChecker<>();
-            JdkRandomEntropySource rng = new JdkRandomEntropySource(new 
Random(SEED));
+            ModelChecker<HistoryBuilder, Void> modelChecker = new 
ModelChecker<>();
 
             TokenPlacementModel.ReplicationFactor rf = new 
TokenPlacementModel.SimpleReplicationFactor(1);
 
             int maxPartitionSize = 10;
             modelChecker.init(new HistoryBuilder(SEED, maxPartitionSize, 10, 
schema, rf))
-                        .beforeAll((history) -> {
+                        .beforeAll((history, rng) -> {
                             for (int i = 0; i < MAX_PARTITIONS; i++)
-                                
history.forPartition(i).ensureClustering(schema.ckGenerator.inflate(rng.nextLong()));
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .insert();
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .insert(rng.nextInt(maxPartitionSize));
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .deleteRow();
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .deleteRow(rng.nextInt(maxPartitionSize));
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .deletePartition();
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .deleteColumns();
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .deleteRowRange();
-                        })
-                        .step((history) -> {
-                            history.visitPartition(rng.nextInt(MAX_PARTITIONS))
-                                   .deleteRowSlice();
-                        })
+                                
history.forPartition(i).ensureClustering(schema.ckGenerator.inflate(rng.next()));
+                        })
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).insert())
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).insert(rng.nextInt(maxPartitionSize)))
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).deleteRow())
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).deleteRow(rng.nextInt(maxPartitionSize)))
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).deletePartition())
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).deleteColumns())
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).deleteRowRange())
+                        .step((history, rng) -> 
history.visitPartition(rng.nextInt(MAX_PARTITIONS)).deleteRowSlice())
                         .exitCondition((history) -> {
                             ReplayingVisitor visitor = 
history.visitor(tracker, sut, SystemUnderTest.ConsistencyLevel.ALL);
                             visitor.replayAll();
@@ -185,11 +159,15 @@ public class HistoryBuilderIntegrationTest extends 
IntegrationTestBase
                             Model model = history.quiescentChecker(tracker, 
sut);
 
                             for (Long pd : history.visitedPds())
-                                
model.validate(Query.selectPartition(history.schema(), pd,false));
+                            {
+                                
model.validate(Query.selectAllColumns(history.schema(), pd, false));
+                                
model.validate(Query.selectAllColumnsWildcard(history.schema(), pd, false));
+                            }
+
 
                             return true;
                         })
-                        .run(STEPS_PER_ITERATION, SEED);
+                        .run(STEPS_PER_ITERATION, SEED, new 
JdkRandomEntropySource(new Random(SEED)));
         }
     }
 }
\ No newline at end of file
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderOverridesIntegrationTest.java
 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderOverridesIntegrationTest.java
index 7625ac9f32..a6cfb74cd8 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderOverridesIntegrationTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/dsl/HistoryBuilderOverridesIntegrationTest.java
@@ -87,7 +87,7 @@ public class HistoryBuilderOverridesIntegrationTest extends 
IntegrationTestBase
 
         history.visitor(tracker, sut, 
SystemUnderTest.ConsistencyLevel.ALL).replayAll();
 
-        Object[][] res = sut.execute(Query.selectPartition(history.schema(), 
history.visitedPds().get(0), false).toSelectStatement(),
+        Object[][] res = sut.execute(Query.selectAllColumns(history.schema(), 
history.visitedPds().get(0), false).toSelectStatement(),
                                      SystemUnderTest.ConsistencyLevel.ALL);
         int found = 0;
         for (Object[] row : res)
@@ -132,7 +132,7 @@ public class HistoryBuilderOverridesIntegrationTest extends 
IntegrationTestBase
                 visitor.replayAll();
                 long visitedPd = history.forPartition(pdIdx).pd();
                 {
-                    Object[][] res = 
sut.execute(Query.selectPartition(history.schema(), visitedPd, 
false).toSelectStatement(),
+                    Object[][] res = 
sut.execute(Query.selectAllColumns(history.schema(), visitedPd, 
false).toSelectStatement(),
                                                  
SystemUnderTest.ConsistencyLevel.ALL);
 
                     int found = 0;
@@ -201,7 +201,7 @@ public class HistoryBuilderOverridesIntegrationTest extends 
IntegrationTestBase
                     visitor.replayAll();
                     long visitedPd = history.forPartition(pdIdx).pd();
                     {
-                        Object[][] res = 
sut.execute(Query.selectPartition(history.schema(), visitedPd, 
false).toSelectStatement(),
+                        Object[][] res = 
sut.execute(Query.selectAllColumns(history.schema(), visitedPd, 
false).toSelectStatement(),
                                                      
SystemUnderTest.ConsistencyLevel.ALL);
 
                         int found = 0;
@@ -333,7 +333,7 @@ public class HistoryBuilderOverridesIntegrationTest extends 
IntegrationTestBase
                 history.validateAll(tracker, sut);
 
                 long visitedPd = history.forPartition(pdIdx).pd();
-                Object[][] res = 
sut.execute(Query.selectPartition(history.schema(), visitedPd, 
false).toSelectStatement(),
+                Object[][] res = 
sut.execute(Query.selectAllColumns(history.schema(), visitedPd, 
false).toSelectStatement(),
                                              
SystemUnderTest.ConsistencyLevel.ALL);
 
                 for (int i = 0; i < res.length; i++)
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/QuiescentCheckerIntegrationTest.java
 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/QuiescentCheckerIntegrationTest.java
index ffb0177897..e88dd30d7e 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/QuiescentCheckerIntegrationTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/QuiescentCheckerIntegrationTest.java
@@ -109,9 +109,9 @@ public class QuiescentCheckerIntegrationTest extends 
ModelTestBase
                                                                                
                    run.clock,
                                                                                
                    HideRowCorruptor::new);
 
-                         Query query = Query.selectPartition(run.schemaSpec,
-                                                             
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
-                                                             false);
+                         Query query = Query.selectAllColumns(run.schemaSpec,
+                                                              
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
+                                                              false);
 
                          return corruptor.maybeCorrupt(query, run.sut);
                      },
@@ -137,9 +137,9 @@ public class QuiescentCheckerIntegrationTest extends 
ModelTestBase
                                                                                
      run.tracker,
                                                                                
      run.descriptorSelector);
 
-                         return 
corruptor.maybeCorrupt(Query.selectPartition(run.schemaSpec,
-                                                                             
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
-                                                                             
false),
+                         return 
corruptor.maybeCorrupt(Query.selectAllColumns(run.schemaSpec,
+                                                                              
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
+                                                                              
false),
                                                        run.sut);
                      },
                      (t, run) -> {
@@ -163,9 +163,9 @@ public class QuiescentCheckerIntegrationTest extends 
ModelTestBase
                                                                                
                    run.clock,
                                                                                
                    HideValueCorruptor::new);
 
-                         return 
corruptor.maybeCorrupt(Query.selectPartition(run.schemaSpec,
-                                                                             
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
-                                                                             
false),
+                         return 
corruptor.maybeCorrupt(Query.selectAllColumns(run.schemaSpec,
+                                                                              
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
+                                                                              
false),
                                                        run.sut);
                      },
                      (t, run) -> {
@@ -189,9 +189,9 @@ public class QuiescentCheckerIntegrationTest extends 
ModelTestBase
                                                                                
                    run.clock,
                                                                                
                    ChangeValueCorruptor::new);
 
-                         return 
corruptor.maybeCorrupt(Query.selectPartition(run.schemaSpec,
-                                                                             
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
-                                                                             
false),
+                         return 
corruptor.maybeCorrupt(Query.selectAllColumns(run.schemaSpec,
+                                                                              
run.pdSelector.pd(CORRUPT_LTS, run.schemaSpec),
+                                                                              
false),
                                                        run.sut);
                      },
                      (t, run) -> {
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/ReconcilerIntegrationTest.java
 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/ReconcilerIntegrationTest.java
index 54fe5fe8a6..707fc70ca0 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/ReconcilerIntegrationTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/ReconcilerIntegrationTest.java
@@ -32,7 +32,7 @@ import org.apache.cassandra.harry.tracker.DefaultDataTracker;
 
 public class ReconcilerIntegrationTest extends IntegrationTestBase
 {
-    private static final long SEED = 1; // 88
+    private static final long SEED = 1;
 
     @Test
     public void testTrackingWithStatics() throws Throwable
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/reconciler/SimpleReconcilerTest.java
 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/reconciler/SimpleReconcilerTest.java
index d240ecf037..3f3d4135e1 100644
--- 
a/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/reconciler/SimpleReconcilerTest.java
+++ 
b/test/distributed/org/apache/cassandra/fuzz/harry/integration/model/reconciler/SimpleReconcilerTest.java
@@ -164,7 +164,7 @@ public class SimpleReconcilerTest extends 
IntegrationTestBase
                 {
                     Query query;
 
-                    query = Query.selectPartition(schema, pd, reverse);
+                    query = Query.selectAllColumns(schema, pd, reverse);
 
                     QuiescentChecker.validate(schema,
                                               run.tracker,
diff --git 
a/test/distributed/org/apache/cassandra/fuzz/sai/SingleNodeSAITest.java 
b/test/distributed/org/apache/cassandra/fuzz/sai/SingleNodeSAITest.java
index 09b215d710..bbe4ce8398 100644
--- a/test/distributed/org/apache/cassandra/fuzz/sai/SingleNodeSAITest.java
+++ b/test/distributed/org/apache/cassandra/fuzz/sai/SingleNodeSAITest.java
@@ -246,7 +246,7 @@ public class SingleNodeSAITest extends IntegrationTestBase
                     {
                         logger.debug("Partition index = {}, run = {}, j = {}, 
i = {}", partitionIndex, run, j, i);
 
-                        Query partitionQuery = Query.selectPartition(schema, 
pd, false);
+                        Query partitionQuery = Query.selectAllColumns(schema, 
pd, false);
                         QuiescentChecker.validate(schema,
                                                   tracker,
                                                   columns,
diff --git 
a/test/harry/main/org/apache/cassandra/harry/checker/ModelChecker.java 
b/test/harry/main/org/apache/cassandra/harry/checker/ModelChecker.java
index 74bf441000..a5dc2ffd25 100644
--- a/test/harry/main/org/apache/cassandra/harry/checker/ModelChecker.java
+++ b/test/harry/main/org/apache/cassandra/harry/checker/ModelChecker.java
@@ -20,20 +20,22 @@ package org.apache.cassandra.harry.checker;
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Random;
+import java.util.function.BiConsumer;
+import java.util.function.BiPredicate;
 import java.util.function.Consumer;
+import java.util.function.Predicate;
 
 import org.apache.cassandra.harry.gen.EntropySource;
 import org.apache.cassandra.harry.gen.rng.JdkRandomEntropySource;
 
-public class ModelChecker<STATE>
+public class ModelChecker<STATE, SUT>
 {
-    private final List<StepExecutor<STATE>> steps;
-    private final List<Precondition<STATE>> invariants;
-    private Precondition<STATE> exitCondition;
-    private Consumer<STATE> beforeAll;
-    private Consumer<STATE> afterAll;
-    private STATE init;
+    private final List<StepExecutor<STATE, SUT>> steps;
+    private final List<Precondition<STATE, SUT>> invariants;
+    private Precondition<STATE, SUT> exitCondition;
+    private Step<STATE, SUT> beforeAll;
+    private Step<STATE, SUT> afterAll;
+    private Pair<STATE, SUT> init;
 
     public ModelChecker()
     {
@@ -43,113 +45,139 @@ public class ModelChecker<STATE>
 
     public void run() throws Throwable
     {
-        run(Integer.MAX_VALUE, System.currentTimeMillis());
+        run(0, Long.MAX_VALUE, new 
JdkRandomEntropySource(System.currentTimeMillis()));
     }
 
-    public void run(int maxSteps, long seed) throws Throwable
+    public void run(int minSteps, long maxSteps) throws Throwable
+    {
+        run(minSteps, maxSteps, new 
JdkRandomEntropySource(System.currentTimeMillis()));
+    }
+
+    public void run(int minSteps, long maxSteps, EntropySource entropySource) 
throws Throwable
     {
         assert init != null : "Initial condition is not specified";
 
-        Ref<STATE> state = new Ref<>(init);
-        EntropySource entropySource = new JdkRandomEntropySource(new 
Random(seed));
+        Ref<Pair<STATE, SUT>> state = new Ref<>(init, Pair.unchanged());
         if (beforeAll != null)
-            beforeAll.accept(state.get());
+            state.map((s) -> beforeAll.next(s.l, s.r, entropySource));
 
         for (int i = 0; i < maxSteps; i++)
         {
-            if (exitCondition != null && exitCondition.test(state.get()))
+            if (i > minSteps && exitCondition.test(state.get()))
                 return;
 
             // TODO: add randomisation / probability for triggering a specific 
step
             steps.get(entropySource.nextInt(steps.size())).execute(state, 
entropySource.derive());
-            for (Precondition<STATE> invariant : invariants)
+            for (Precondition<STATE, SUT> invariant : invariants)
                 invariant.test(state.get());
         }
 
         if (afterAll != null)
-            afterAll.accept(state.get());
+            state.map((s) -> afterAll.next(s.l, s.r, entropySource));
     }
 
-    public ModelChecker<STATE> init(STATE state)
+    public ModelChecker<STATE, SUT> init(STATE state, SUT sut)
     {
-        this.init = state;
+        this.init = new Pair<>(state, sut);
         return this;
     }
 
-    public ModelChecker<STATE> beforeAll(Consumer<STATE> precondition)
+    public Simple init(STATE state)
+    {
+        Simple simple = new Simple();
+        simple.init(state);
+        return simple;
+    }
+
+    @SuppressWarnings("unused")
+    public ModelChecker<STATE, SUT> beforeAll(Step<STATE, SUT> beforeAll)
     {
-        this.beforeAll = precondition;
+        this.beforeAll = beforeAll;
         return this;
     }
 
-    public ModelChecker<STATE> afterAll(Consumer<STATE> postcondition)
+    @SuppressWarnings("unused")
+    public ModelChecker<STATE, SUT> afterAll(Step<STATE, SUT> afterAll)
     {
-        this.afterAll = postcondition;
+        this.afterAll = afterAll;
         return this;
     }
 
-    public ModelChecker<STATE> exitCondition(Precondition<STATE> precondition)
+    public ModelChecker<STATE, SUT> exitCondition(Precondition<STATE, SUT> 
precondition)
     {
         this.exitCondition = precondition;
         return this;
     }
 
-    public ModelChecker<STATE> step(Precondition<STATE> precondition, 
Step<STATE> step)
+    public ModelChecker<STATE, SUT> step(Precondition<STATE, SUT> 
precondition, Step<STATE, SUT> step)
     {
         steps.add((ref, entropySource) -> {
             ref.map(state -> {
                 if (!precondition.test(state))
                     return state;
 
-                return step.next(state, entropySource);
+                return step.next(state.l, state.r, entropySource);
             });
         });
 
         return this;
     }
 
-    public ModelChecker<STATE> invariant(Precondition<STATE> invariant)
+    public ModelChecker<STATE, SUT> invariant(Precondition<STATE, SUT> 
invariant)
     {
         invariants.add(invariant);
         return this;
     }
 
-    public ModelChecker<STATE> step(Step<STATE> step)
+    public ModelChecker<STATE, SUT> step(Step<STATE, SUT> step)
     {
         return step(Precondition.alwaysTrue(), step);
     }
 
-    public ModelChecker<STATE> step(StatePrecondition<STATE> precondition, 
ThrowingFunction<STATE, STATE> step)
+    public ModelChecker<STATE, SUT> step(StatePrecondition<STATE> 
precondition, ThrowingFunction<STATE, STATE> step)
     {
         steps.add((ref, entropySource) -> {
             ref.map(state -> {
-                if (!precondition.test(state))
+                if (!precondition.test(state.l))
+                    return state;
+
+                STATE newState = step.apply(state.l);
+                if (state.l == newState)
                     return state;
 
-                return step.apply(state);
+                return state.next(newState);
             });
         });
 
         return this;
     }
 
-    public ModelChecker<STATE> step(ThrowingConsumer<STATE> step)
+    public ModelChecker<STATE, SUT> step(StatePrecondition<STATE> 
precondition, ThrowingBiFunction<STATE, SUT, STATE> step)
     {
-        return step((t, entropySource) -> {
-            step.consume(t);
-            return t;
+        steps.add((ref, entropySource) -> {
+            ref.map(state -> {
+                if (!precondition.test(state.l))
+                    return state;
+
+                STATE newState = step.apply(state.l, state.r);
+                if (state.l == newState)
+                    return state;
+
+                return state.next(newState);
+            });
         });
+
+        return this;
     }
-    public ModelChecker<STATE> step(ThrowingFunction<STATE, STATE> step)
+
+    public ModelChecker<STATE, SUT> step(ThrowingFunction<STATE, STATE> step)
     {
-        return step((t, entropySource) -> {
-            return step.apply(t);
-        });
+        return step((t, sut, entropySource) -> new Pair<>(step.apply(t), sut));
     }
 
-    interface StepExecutor<STATE>
+    interface StepExecutor<STATE, SUT>
     {
-        void execute(Ref<STATE> state, EntropySource entropySource) throws 
Throwable;
+        void execute(Ref<Pair<STATE, SUT>> state, EntropySource entropySource) 
throws Throwable;
     }
 
     public interface StatePrecondition<STATE>
@@ -157,24 +185,34 @@ public class ModelChecker<STATE>
         boolean test(STATE state) throws Throwable;
     }
 
-    public interface Precondition<STATE>
+    public interface Precondition<STATE, SUT>
     {
-        boolean test(STATE state) throws Throwable;
+        boolean test(STATE state, SUT sut) throws Throwable;
 
-        static <STATE> Precondition<STATE> alwaysTrue()
+        default boolean test(Pair<STATE, SUT> state) throws Throwable
         {
-            return (a) -> true;
+            return test(state.l, state.r);
+        }
+
+        static <STATE, SUT> Precondition<STATE, SUT> alwaysTrue()
+        {
+            return (a, b) -> true;
         }
     }
 
-    public interface Step<STATE>
+    public interface Step<STATE, SUT>
     {
-        STATE next(STATE t, EntropySource entropySource) throws Throwable;
+        Pair<STATE, SUT> next(STATE t, SUT sut, EntropySource entropySource) 
throws Throwable;
     }
 
     public interface ThrowingConsumer<I>
     {
-        void consume(I t) throws Throwable;
+        void accept(I t) throws Throwable;
+    }
+
+    public interface ThrowingBiConsumer<I1, I2>
+    {
+        void accept(I1 t1, I2 t2) throws Throwable;
     }
 
     public interface ThrowingFunction<I, O>
@@ -187,13 +225,15 @@ public class ModelChecker<STATE>
         O apply(I1 t1, I2 t2) throws Throwable;
     }
 
-    public static class Ref<T>
+    private static class Ref<T>
     {
         public T ref;
+        private final T unchanged;
 
-        public Ref(T init)
+        public Ref(T init, T unchanged)
         {
             this.ref = init;
+            this.unchanged = unchanged;
         }
 
         public T get()
@@ -203,29 +243,160 @@ public class ModelChecker<STATE>
 
         public void set(T v)
         {
+            if (v == unchanged)
+                return;
             this.ref = v;
         }
 
         public void map(ThrowingFunction<T, T> fn) throws Throwable
         {
-            this.ref = fn.apply(ref);
+            set(fn.apply(ref));
         }
     }
 
-    public static class State<MODEL, SUT>
+    public static class Pair<L, R>
     {
-        public final MODEL model;
-        public final SUT sut;
+        private static final Pair<?, ?> UNCHANGED = new Pair<>(null, null);
+
+        public static <L, R> Pair<L, R> unchanged()
+        {
+            return (Pair<L, R>) UNCHANGED;
+        }
+
+        public final L l;
+        public final R r;
+
+        public Pair(L l, R r)
+        {
+            this.l = l;
+            this.r = r;
+        }
+
+        public Pair<L, R> next(L state)
+        {
+            return new Pair<>(state, this.r);
+        }
+    }
+
+    public class Simple
+    {
+        public Simple init(STATE state)
+        {
+            ModelChecker.this.init = new Pair<>(state, null);
+            return this;
+        }
+
+        @SuppressWarnings("unused")
+        public Simple beforeAll(ThrowingConsumer<STATE> beforeAll)
+        {
+            ModelChecker.this.beforeAll = (t, sut, entropySource) -> {
+                beforeAll.accept(t);
+                return Pair.unchanged();
+            };
+            return this;
+        }
+
+        public Simple beforeAll(ThrowingBiConsumer<STATE, EntropySource> 
beforeAll)
+        {
+            ModelChecker.this.beforeAll = (t, sut, entropySource) -> {
+                beforeAll.accept(t, entropySource);
+                return Pair.unchanged();
+            };
+            return this;
+        }
+
+        @SuppressWarnings("unused")
+        public Simple beforeAll(ThrowingFunction<STATE, STATE> beforeAll)
+        {
+            ModelChecker.this.beforeAll = (t, sut, entropySource) -> new 
Pair<>(beforeAll.apply(t), sut);
+            return this;
+        }
+
+        public Simple afterAll(ThrowingConsumer<STATE> afterAll)
+        {
+            ModelChecker.this.afterAll = (t, sut, entropySource) -> {
+                afterAll.accept(t);
+                return Pair.unchanged();
+            };
+            return this;
+        }
+
+        @SuppressWarnings("unused")
+        public Simple afterAll(ThrowingFunction<STATE, STATE> afterAll)
+        {
+            ModelChecker.this.afterAll = (t, sut, entropySource) -> new 
Pair(afterAll.apply(t), sut);
+            return this;
+        }
+
+        public Simple exitCondition(Predicate<STATE> precondition)
+        {
+            ModelChecker.this.exitCondition = (state, sut) -> 
precondition.test(state);
+            return this;
+        }
+
+        @SuppressWarnings("unused")
+        public Simple invariant(Predicate<STATE> invariant)
+        {
+            invariants.add((state, sut) -> invariant.test(state));
+            return this;
+        }
+
+        public Simple step(ThrowingFunction<STATE, STATE> step)
+        {
+            ModelChecker.this.step((state, sut, entropySource) -> new 
Pair<>(step.apply(state), sut));
+            return this;
+        }
+
+        public Simple step(ThrowingConsumer<STATE> step)
+        {
+            ModelChecker.this.step((state, sut, entropySource) -> {
+                step.accept(state);
+                return Pair.unchanged();
+            });
+            return this;
+        }
+
+        public Simple step(ThrowingBiConsumer<STATE, EntropySource> step)
+        {
+            ModelChecker.this.step((state, sut, entropySource) -> {
+                step.accept(state, entropySource);
+                return Pair.unchanged();
+            });
+            return this;
+        }
+
+        public Simple step(BiPredicate<STATE, EntropySource> precondition, 
BiConsumer<STATE, EntropySource> step)
+        {
+            ModelChecker.this.step((state, sut, entropySource) -> {
+                if (!precondition.test(state, entropySource))
+                    return Pair.unchanged();
+
+                step.accept(state, entropySource);
+                return Pair.unchanged();
+            });
+
+            return this;
+        }
+
+        public Simple step(Predicate<STATE> precondition, Consumer<STATE> step)
+        {
+            ModelChecker.this.step((state, ignore) -> precondition.test(state),
+                                   (t, sut, entropySource) -> {
+                                       step.accept(t);
+                                       return Pair.unchanged();
+                                   });
+
+            return this;
+        }
 
-        public State(MODEL model, SUT sut)
+        public void run() throws Throwable
         {
-            this.model = model;
-            this.sut = sut;
+            ModelChecker.this.run();
         }
 
-        public State<MODEL, SUT> next(MODEL state)
+        public void run(int minSteps, long maxSteps, EntropySource 
entropySource) throws Throwable
         {
-            return new State<>(state, this.sut);
+            ModelChecker.this.run(minSteps, maxSteps, entropySource);
         }
     }
 }
diff --git a/test/harry/main/org/apache/cassandra/harry/dsl/HistoryBuilder.java 
b/test/harry/main/org/apache/cassandra/harry/dsl/HistoryBuilder.java
index a8503b9f98..a5d5fdafe2 100644
--- a/test/harry/main/org/apache/cassandra/harry/dsl/HistoryBuilder.java
+++ b/test/harry/main/org/apache/cassandra/harry/dsl/HistoryBuilder.java
@@ -29,6 +29,7 @@ import java.util.NavigableSet;
 import java.util.Set;
 import java.util.TreeSet;
 import java.util.function.Consumer;
+import java.util.function.Function;
 import java.util.function.LongSupplier;
 
 import org.apache.cassandra.harry.clock.ApproximateClock;
@@ -521,8 +522,8 @@ public class HistoryBuilder implements 
Iterable<ReplayingVisitor.Visit>, SingleO
             long pd = presetSelector.pdAtPosition(partitionIdx);
             if (presetSelector.minLtsFor(pd) < 0)
                 continue;
-            model.validate(Query.selectPartition(schema, pd, false));
-            model.validate(Query.selectPartition(schema, pd, true));
+            model.validate(Query.selectAllColumns(schema, pd, false));
+            model.validate(Query.selectAllColumns(schema, pd, true));
         }
     }
 
@@ -535,8 +536,17 @@ public class HistoryBuilder implements 
Iterable<ReplayingVisitor.Visit>, SingleO
     {
         for (Long pd : partitionStates.keySet())
         {
-            model.validate(Query.selectPartition(schema, pd, false));
-            model.validate(Query.selectPartition(schema, pd, true));
+            model.validate(Query.selectAllColumns(schema, pd, false));
+            model.validate(Query.selectAllColumns(schema, pd, true));
+        }
+    }
+
+    public void validateAll(Model model, Function<Long, List<Query>> queries)
+    {
+        for (Long pd : partitionStates.keySet())
+        {
+            for (Query query : queries.apply(pd))
+                model.validate(query);
         }
     }
 
diff --git 
a/test/harry/main/org/apache/cassandra/harry/dsl/SingleOperationVisitBuilder.java
 
b/test/harry/main/org/apache/cassandra/harry/dsl/SingleOperationVisitBuilder.java
index ce25566a24..33eca3238c 100644
--- 
a/test/harry/main/org/apache/cassandra/harry/dsl/SingleOperationVisitBuilder.java
+++ 
b/test/harry/main/org/apache/cassandra/harry/dsl/SingleOperationVisitBuilder.java
@@ -170,7 +170,7 @@ class SingleOperationVisitBuilder implements 
SingleOperationBuilder
     {
         int opId = opIdCounter++;
         operations.add(new GeneratingVisitor.GeneratedDeleteOp(lts, pd, opId, 
OpSelectors.OperationKind.DELETE_PARTITION,
-                                                               
Query.selectPartition(schema, pd, false)));
+                                                               
Query.selectAllColumns(schema, pd, false)));
         end();
         return this;
     }
diff --git 
a/test/harry/main/org/apache/cassandra/harry/model/QuiescentChecker.java 
b/test/harry/main/org/apache/cassandra/harry/model/QuiescentChecker.java
index 9c8a24398d..64af6520d5 100644
--- a/test/harry/main/org/apache/cassandra/harry/model/QuiescentChecker.java
+++ b/test/harry/main/org/apache/cassandra/harry/model/QuiescentChecker.java
@@ -152,7 +152,7 @@ public class QuiescentChecker implements Model
 
             assertStaticRow(partitionState, actualRows,
                             adjustForSelection(partitionState.staticRow(), 
schema, selection, true),
-                            actualRowState, query, trackerState, schema, 
isWildcardQuery);
+                            actualRowState, query, trackerState, schema);
         }
 
         while (actual.hasNext() && expected.hasNext())
@@ -178,7 +178,7 @@ public class QuiescentChecker implements Model
                                               actualRowState, 
query.toSelectStatement());
             }
 
-            if (!Arrays.equals(actualRowState.vds, expectedRowState.vds))
+            if (!Arrays.equals(expectedRowState.vds, actualRowState.vds))
                 throw new ValidationException(trackerState,
                                               partitionState.toString(schema),
                                               toString(actualRows),
@@ -190,8 +190,7 @@ public class QuiescentChecker implements Model
                                               
descriptorsToString(actualRowState.vds), actualRowState,
                                               query.toSelectStatement());
 
-            // Wildcard queries do not include timestamps
-            if (!isWildcardQuery && !Arrays.equals(actualRowState.lts, 
expectedRowState.lts))
+            if (!ltsEqual(expectedRowState.lts, actualRowState.lts))
                 throw new ValidationException(trackerState,
                                               partitionState.toString(schema),
                                               toString(actualRows),
@@ -206,7 +205,7 @@ public class QuiescentChecker implements Model
             if (partitionState.staticRow() != null || 
actualRowState.hasStaticColumns())
             {
                 Reconciler.RowState expectedStaticRowState = 
adjustForSelection(partitionState.staticRow(), schema, selection, true);
-                assertStaticRow(partitionState, actualRows, 
expectedStaticRowState, actualRowState, query, trackerState, schema, 
isWildcardQuery);
+                assertStaticRow(partitionState, actualRows, 
expectedStaticRowState, actualRowState, query, trackerState, schema);
             }
         }
 
@@ -226,14 +225,34 @@ public class QuiescentChecker implements Model
         }
     }
 
+    public static boolean ltsEqual(long[] expected, long[] actual)
+    {
+        if (actual == expected)
+            return true;
+        if (actual == null || expected == null)
+            return false;
+
+        int length = actual.length;
+        if (expected.length != length)
+            return false;
+
+        for (int i = 0; i < actual.length; i++)
+        {
+            if (actual[i] == NO_TIMESTAMP)
+                continue;
+            if (actual[i] != expected[i])
+                return false;
+        }
+        return true;
+    }
+
     public static void assertStaticRow(PartitionState partitionState,
                                        List<ResultSetRow> actualRows,
                                        Reconciler.RowState staticRow,
                                        ResultSetRow actualRowState,
                                        Query query,
                                        String trackerState,
-                                       SchemaSpec schemaSpec,
-                                       boolean isWildcardQuery)
+                                       SchemaSpec schemaSpec)
     {
         if (!Arrays.equals(staticRow.vds, actualRowState.sds))
             throw new ValidationException(trackerState,
@@ -247,7 +266,7 @@ public class QuiescentChecker implements Model
                                           
descriptorsToString(actualRowState.sds), actualRowState,
                                           query.toSelectStatement());
 
-        if (!isWildcardQuery && !Arrays.equals(staticRow.lts, 
actualRowState.slts))
+        if (!ltsEqual(staticRow.lts, actualRowState.slts))
             throw new ValidationException(trackerState,
                                           partitionState.toString(schemaSpec),
                                           toString(actualRows),
@@ -276,6 +295,7 @@ public class QuiescentChecker implements Model
         }
         return sb.toString();
     }
+
     public static String toString(Collection<Reconciler.RowState> collection, 
SchemaSpec schema)
     {
         StringBuilder builder = new StringBuilder();
diff --git 
a/test/harry/main/org/apache/cassandra/harry/model/QuiescentLocalStateCheckerBase.java
 
b/test/harry/main/org/apache/cassandra/harry/model/QuiescentLocalStateCheckerBase.java
index 3a611bc7de..54ea8638b9 100644
--- 
a/test/harry/main/org/apache/cassandra/harry/model/QuiescentLocalStateCheckerBase.java
+++ 
b/test/harry/main/org/apache/cassandra/harry/model/QuiescentLocalStateCheckerBase.java
@@ -77,7 +77,7 @@ public abstract class QuiescentLocalStateCheckerBase extends 
QuiescentChecker
         TokenPlacementModel.ReplicatedRanges ring = getRing();
 
         for (int lts = 0; lts < clock.peek(); lts++)
-            validate(Query.selectPartition(schema, pdSelector.pd(lts, schema), 
false), ring);
+            validate(Query.selectAllColumns(schema, pdSelector.pd(lts, 
schema), false), ring);
     }
 
     @Override
diff --git a/test/harry/main/org/apache/cassandra/harry/model/SelectHelper.java 
b/test/harry/main/org/apache/cassandra/harry/model/SelectHelper.java
index 8f2c6f5a4b..b2888afa19 100644
--- a/test/harry/main/org/apache/cassandra/harry/model/SelectHelper.java
+++ b/test/harry/main/org/apache/cassandra/harry/model/SelectHelper.java
@@ -295,17 +295,21 @@ public class SelectHelper
         }
 
         long[] slts = new long[schema.staticColumns.size()];
-        for (int i = 0; i < slts.length; i++)
+        Arrays.fill(slts, Model.NO_TIMESTAMP);
+        for (int i = 0, sltsBase = schema.allColumns.size(); i < slts.length 
&& sltsBase + i < result.length; i++)
         {
             Object v = result[schema.allColumns.size() + i];
-            slts[i] = v == null ? Model.NO_TIMESTAMP : clock.lts((long) v);
+            if (v != null)
+                slts[i] = clock.lts((long) v);
         }
 
         long[] lts = new long[schema.regularColumns.size()];
-        for (int i = 0; i < lts.length; i++)
+        Arrays.fill(lts, Model.NO_TIMESTAMP);
+        for (int i = 0, ltsBase = schema.allColumns.size() + slts.length; i < 
lts.length && ltsBase + i < result.length; i++)
         {
-            Object v = result[schema.allColumns.size() + slts.length + i];
-            lts[i] = v == null ? Model.NO_TIMESTAMP : clock.lts((long) v);
+            Object v = result[ltsBase + i];
+            if (v != null)
+                lts[i] = clock.lts((long) v);
         }
 
         return new ResultSetRow(isDeflatable(partitionKey) ? 
schema.deflatePartitionKey(partitionKey) : UNSET_DESCR,
diff --git 
a/test/harry/main/org/apache/cassandra/harry/model/reconciler/PartitionState.java
 
b/test/harry/main/org/apache/cassandra/harry/model/reconciler/PartitionState.java
index 4792de638d..3484b1cdc4 100644
--- 
a/test/harry/main/org/apache/cassandra/harry/model/reconciler/PartitionState.java
+++ 
b/test/harry/main/org/apache/cassandra/harry/model/reconciler/PartitionState.java
@@ -300,7 +300,7 @@ public class PartitionState implements 
Iterable<Reconciler.RowState>
         sb.append("Skipped LTS: " + skippedLts).append("\n");
 
         if (staticRow != null)
-            sb.append("Static row: " + 
staticRow.toString(schema)).append("\n");
+            sb.append("Static row:\n" + 
staticRow.toString(schema)).append("\n");
 
         for (Reconciler.RowState row : rows.values())
             sb.append(row.toString(schema)).append("\n");
diff --git 
a/test/harry/main/org/apache/cassandra/harry/model/reconciler/Reconciler.java 
b/test/harry/main/org/apache/cassandra/harry/model/reconciler/Reconciler.java
index fb0f690490..23ba8a41b5 100644
--- 
a/test/harry/main/org/apache/cassandra/harry/model/reconciler/Reconciler.java
+++ 
b/test/harry/main/org/apache/cassandra/harry/model/reconciler/Reconciler.java
@@ -316,16 +316,27 @@ public class Reconciler
 
         public String toString(SchemaSpec schema)
         {
-            return " rowStateRow("
-                   + partitionState.pd +
-                   "L, " + cd +
-                   (partitionState.staticRow == null ? "" : ", values(" + 
StringUtils.toString(partitionState.staticRow.vds) + ")") +
-                   (partitionState.staticRow == null ? "" : ", lts(" + 
StringUtils.toString(partitionState.staticRow.lts) + ")") +
-                   ", values(" + StringUtils.toString(vds) + ")" +
-                   ", lts(" + StringUtils.toString(lts) + ")" +
-                   (schema == null ? "" : ", clustering=" + (cd == 
STATIC_CLUSTERING ? "static" : 
Arrays.toString(schema.inflateClusteringKey(cd)))) +
-                   (schema == null ? "" : ", values=" + Arrays.toString(cd == 
STATIC_CLUSTERING ? schema.inflateStaticColumns(vds) : 
schema.inflateRegularColumns(vds))) +
-                   ")";
+            if (cd == STATIC_CLUSTERING)
+            {
+                return " rowStateRow("
+                       + partitionState.pd +
+                       "L, " + cd + "L" +
+                       ", statics(" + 
StringUtils.toString(partitionState.staticRow.vds) + ")" +
+                       ", lts(" + 
StringUtils.toString(partitionState.staticRow.lts) + ")";
+            }
+            else
+            {
+                return " rowStateRow("
+                       + partitionState.pd +
+                       "L, " + cd +
+                       (partitionState.staticRow == null ? "" : ", statics(" + 
StringUtils.toString(partitionState.staticRow.vds) + ")") +
+                       (partitionState.staticRow == null ? "" : ", lts(" + 
StringUtils.toString(partitionState.staticRow.lts) + ")") +
+                       ", values(" + StringUtils.toString(vds) + ")" +
+                       ", lts(" + StringUtils.toString(lts) + ")" +
+                       (schema == null ? "" : ", clustering=" + 
Arrays.toString(schema.inflateClusteringKey(cd))) +
+                       (schema == null ? "" : ", values=" + 
Arrays.toString(schema.inflateRegularColumns(vds))) +
+                       ")";
+            }
         }
     }
 }
\ No newline at end of file
diff --git a/test/harry/main/org/apache/cassandra/harry/operations/Query.java 
b/test/harry/main/org/apache/cassandra/harry/operations/Query.java
index 92ae2995ce..2ca8a429ee 100644
--- a/test/harry/main/org/apache/cassandra/harry/operations/Query.java
+++ b/test/harry/main/org/apache/cassandra/harry/operations/Query.java
@@ -47,8 +47,14 @@ public abstract class Query
     public final Map<String, List<Relation>> relationsMap;
     public final SchemaSpec schemaSpec;
     public final QueryKind queryKind;
+    public final Selection selection;
 
     public Query(QueryKind kind, long pd, boolean reverse, List<Relation> 
relations, SchemaSpec schemaSpec)
+    {
+        this(kind, pd, reverse, relations, schemaSpec, new 
Columns(schemaSpec.allColumnsSet, true));
+    }
+
+    public Query(QueryKind kind, long pd, boolean reverse, List<Relation> 
relations, SchemaSpec schemaSpec, Selection selection)
     {
         this.queryKind = kind;
         this.pd = pd;
@@ -58,6 +64,7 @@ public abstract class Query
         for (Relation relation : relations)
             this.relationsMap.computeIfAbsent(relation.column(), column -> new 
ArrayList<>()).add(relation);
         this.schemaSpec = schemaSpec;
+        this.selection = selection;
     }
 
     // TODO: pd, values, filtering?
@@ -88,9 +95,9 @@ public abstract class Query
 
     public static class SinglePartitionQuery extends Query
     {
-        public SinglePartitionQuery(QueryKind kind, long pd, boolean reverse, 
List<Relation> allRelations, SchemaSpec schemaSpec)
+        public SinglePartitionQuery(QueryKind kind, long pd, boolean reverse, 
List<Relation> allRelations, SchemaSpec schemaSpec, Selection selection)
         {
-            super(kind, pd, reverse, allRelations, schemaSpec);
+            super(kind, pd, reverse, allRelations, schemaSpec, selection);
         }
 
         public boolean matchCd(long cd)
@@ -244,19 +251,14 @@ public abstract class Query
         }
     }
 
-    public CompiledStatement toWildcardSelectStatement()
-    {
-        return SelectHelper.select(schemaSpec, pd, null, reverse, false);
-    }
-
     public CompiledStatement toSelectStatement()
     {
-        return SelectHelper.select(schemaSpec, pd, schemaSpec.allColumnsSet, 
relations, reverse, true);
+        return SelectHelper.select(schemaSpec, pd, selection.columns(), 
relations, reverse, selection.includeTimestamp());
     }
 
     public CompiledStatement toSelectStatement(boolean includeWriteTime)
     {
-        return SelectHelper.select(schemaSpec, pd, schemaSpec.allColumnsSet, 
relations, reverse, includeWriteTime);
+        return SelectHelper.select(schemaSpec, pd, selection.columns(), 
relations, reverse, includeWriteTime);
     }
 
     public CompiledStatement toSelectStatement(Set<ColumnSpec<?>> columns, 
boolean includeWriteTime)
@@ -271,13 +273,24 @@ public abstract class Query
 
     public abstract DescriptorRanges.DescriptorRange toRange(long ts);
 
-    public static Query selectPartition(SchemaSpec schemaSpec, long pd, 
boolean reverse)
+    public static Query selectAllColumns(SchemaSpec schemaSpec, long pd, 
boolean reverse)
+    {
+        return selectPartition(schemaSpec, pd, reverse, new 
Columns(schemaSpec.allColumnsSet, true));
+    }
+
+    public static Query selectAllColumnsWildcard(SchemaSpec schemaSpec, long 
pd, boolean reverse)
+    {
+        return selectPartition(schemaSpec, pd, reverse, Wildcard.instance);
+    }
+
+    public static Query selectPartition(SchemaSpec schemaSpec, long pd, 
boolean reverse, Selection selection)
     {
         return new Query.SinglePartitionQuery(Query.QueryKind.SINGLE_PARTITION,
                                               pd,
                                               reverse,
                                               Collections.emptyList(),
-                                              schemaSpec);
+                                              schemaSpec,
+                                              selection);
     }
 
     public static Query singleClustering(SchemaSpec schema, long pd, long cd, 
boolean reverse)
@@ -498,4 +511,47 @@ public abstract class Query
         // Such queries only make sense if written partition actually has 
clusterings that have intersecting parts.
         CLUSTERING_RANGE
     }
-}
+
+    public interface Selection
+    {
+        Set<ColumnSpec<?>> columns();
+        boolean includeTimestamp();
+    }
+
+    public static class Wildcard implements Selection
+    {
+        public static final Wildcard instance = new Wildcard();
+
+        public Set<ColumnSpec<?>> columns()
+        {
+            return null;
+        }
+
+        public boolean includeTimestamp()
+        {
+            return false;
+        }
+    }
+
+    public static class Columns implements Selection
+    {
+        private Set<ColumnSpec<?>> columns;
+        private boolean includeTimestamp;
+
+        public Columns(Set<ColumnSpec<?>> columns, boolean includeTimestamp)
+        {
+            this.columns = columns;
+            this.includeTimestamp = includeTimestamp;
+        }
+
+        public Set<ColumnSpec<?>> columns()
+        {
+            return columns;
+        }
+
+        public boolean includeTimestamp()
+        {
+            return includeTimestamp;
+        }
+    }
+}
\ No newline at end of file
diff --git 
a/test/harry/main/org/apache/cassandra/harry/operations/QueryGenerator.java 
b/test/harry/main/org/apache/cassandra/harry/operations/QueryGenerator.java
index 269f60a903..aff09e3cfc 100644
--- a/test/harry/main/org/apache/cassandra/harry/operations/QueryGenerator.java
+++ b/test/harry/main/org/apache/cassandra/harry/operations/QueryGenerator.java
@@ -141,7 +141,7 @@ public class QueryGenerator
 
     public Query singlePartition(long pd, boolean reverse)
     {
-        return Query.selectPartition(schema, pd, reverse);
+        return Query.selectAllColumns(schema, pd, reverse);
     }
 
     public Query singleClustering(long pd, long cd, boolean reverse)
diff --git 
a/test/harry/main/org/apache/cassandra/harry/visitors/AllPartitionsValidator.java
 
b/test/harry/main/org/apache/cassandra/harry/visitors/AllPartitionsValidator.java
index 30037b67d9..8a44b6a2a8 100644
--- 
a/test/harry/main/org/apache/cassandra/harry/visitors/AllPartitionsValidator.java
+++ 
b/test/harry/main/org/apache/cassandra/harry/visitors/AllPartitionsValidator.java
@@ -21,12 +21,15 @@ package org.apache.cassandra.harry.visitors;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
 
+import org.junit.Assert;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import org.apache.cassandra.concurrent.ExecutorFactory;
+import org.apache.cassandra.concurrent.InfiniteLoopExecutor;
 import org.apache.cassandra.concurrent.Interruptible;
 import org.apache.cassandra.harry.core.Configuration;
 import org.apache.cassandra.harry.core.MetricReporter;
@@ -113,7 +116,7 @@ public class AllPartitionsValidator implements Visitor
                                                                                
                  {
                                                                                
                      for (boolean reverse : new boolean[]{ true, false })
                                                                                
                      {
-                                                                               
                          Query query = Query.selectPartition(schema, 
pdSelector.pd(pdSelector.minLtsAt(pos), schema), reverse);
+                                                                               
                          Query query = Query.selectAllColumns(schema, 
pdSelector.pd(pdSelector.minLtsAt(pos), schema), reverse);
                                                                                
                          model.validate(query);
                                                                                
                          queryLogger.logSelectQuery((int)pos, query);
                                                                                
                      }
@@ -129,7 +132,12 @@ public class AllPartitionsValidator implements Visitor
 
         interrupt.awaitUninterruptibly();
 
-        Runner.shutdown(threads::stream);
+        for (Interruptible thread : threads)
+        {
+            ((InfiniteLoopExecutor)thread).shutdown(false);
+            Assert.assertTrue(thread.awaitTermination(1, TimeUnit.MINUTES));
+        }
+
         if (!errors.isEmpty())
             Runner.mergeAndThrow(errors);
     }
@@ -145,6 +153,4 @@ public class AllPartitionsValidator implements Visitor
             throw new RuntimeException(e);
         }
     }
-
-
 }
\ No newline at end of file
diff --git 
a/test/harry/main/org/apache/cassandra/harry/visitors/CorruptingVisitor.java 
b/test/harry/main/org/apache/cassandra/harry/visitors/CorruptingVisitor.java
index 1fcd93943e..043a0804d9 100644
--- a/test/harry/main/org/apache/cassandra/harry/visitors/CorruptingVisitor.java
+++ b/test/harry/main/org/apache/cassandra/harry/visitors/CorruptingVisitor.java
@@ -77,7 +77,7 @@ public class CorruptingVisitor implements Visitor
         long pd = run.pdSelector.pd(random.nextInt((int) maxPos), 
run.schemaSpec);
         try
         {
-            boolean success = 
corruptor.maybeCorrupt(Query.selectPartition(run.schemaSpec, pd, false),
+            boolean success = 
corruptor.maybeCorrupt(Query.selectAllColumns(run.schemaSpec, pd, false),
                                                      run.sut);
             logger.info("{} tried to corrupt a partition with a pd {}@{} my 
means of {}", success ? "Successfully" : "Unsuccessfully", pd, lts, 
corruptor.getClass());
         }
diff --git 
a/test/harry/main/org/apache/cassandra/harry/visitors/SingleValidator.java 
b/test/harry/main/org/apache/cassandra/harry/visitors/SingleValidator.java
index 0186e991e3..3783958e4d 100644
--- a/test/harry/main/org/apache/cassandra/harry/visitors/SingleValidator.java
+++ b/test/harry/main/org/apache/cassandra/harry/visitors/SingleValidator.java
@@ -52,7 +52,7 @@ public class SingleValidator implements Visitor
 
         for (boolean reverse : new boolean[]{ true, false })
         {
-            model.validate(Query.selectPartition(run.schemaSpec, 
run.pdSelector.pd(lts, run.schemaSpec), reverse));
+            model.validate(Query.selectAllColumns(run.schemaSpec, 
run.pdSelector.pd(lts, run.schemaSpec), reverse));
         }
 
         for (Query.QueryKind queryKind : new Query.QueryKind[]{ 
Query.QueryKind.CLUSTERING_RANGE, Query.QueryKind.CLUSTERING_SLICE, 
Query.QueryKind.SINGLE_CLUSTERING })
diff --git 
a/test/simulator/test/org/apache/cassandra/simulator/test/HarrySimulatorTest.java
 
b/test/simulator/test/org/apache/cassandra/simulator/test/HarrySimulatorTest.java
index 358faec3de..e98b8ddd0d 100644
--- 
a/test/simulator/test/org/apache/cassandra/simulator/test/HarrySimulatorTest.java
+++ 
b/test/simulator/test/org/apache/cassandra/simulator/test/HarrySimulatorTest.java
@@ -801,7 +801,7 @@ public class HarrySimulatorTest
                                             {
                                                 long minLts = 
simulation.harryRun.pdSelector.minLtsAt(position);
                                                 long pd = 
simulation.harryRun.pdSelector.pd(minLts, simulation.harryRun.schemaSpec);
-                                                Query query = 
Query.selectPartition(simulation.harryRun.schemaSpec, pd, false);
+                                                Query query = 
Query.selectAllColumns(simulation.harryRun.schemaSpec, pd, false);
                                                 actions.add(new 
HarryValidatingQuery(simulation.simulated, simulation.cluster, rf,
                                                                                
      simulation.harryRun, owernship, query));
                                             }
diff --git 
a/test/unit/org/apache/cassandra/tcm/sequences/ProgressBarrierTest.java 
b/test/unit/org/apache/cassandra/tcm/sequences/ProgressBarrierTest.java
index 278584fa6d..92610ce6d3 100644
--- a/test/unit/org/apache/cassandra/tcm/sequences/ProgressBarrierTest.java
+++ b/test/unit/org/apache/cassandra/tcm/sequences/ProgressBarrierTest.java
@@ -37,12 +37,12 @@ import org.apache.cassandra.config.DatabaseDescriptor;
 import org.apache.cassandra.db.ConsistencyLevel;
 import org.apache.cassandra.distributed.api.IIsolatedExecutor;
 import org.apache.cassandra.distributed.test.log.CMSTestBase;
-import org.apache.cassandra.distributed.test.log.RngUtils;
 import org.apache.cassandra.exceptions.RequestFailureReason;
 import org.apache.cassandra.harry.gen.EntropySource;
 import org.apache.cassandra.harry.gen.Surjections;
 import org.apache.cassandra.harry.gen.rng.PCGFastPure;
 import org.apache.cassandra.harry.gen.rng.PcgRSUFast;
+import org.apache.cassandra.harry.gen.rng.RngUtils;
 import org.apache.cassandra.harry.sut.TokenPlacementModel;
 import org.apache.cassandra.locator.InetAddressAndPort;
 import org.apache.cassandra.net.ConnectionType;
@@ -308,6 +308,8 @@ public class ProgressBarrierTest extends CMSTestBase
                 public <REQ> void send(Message<REQ> message, 
InetAddressAndPort to) {}
                 public <REQ, RSP> void sendWithCallback(Message<REQ> message, 
InetAddressAndPort to, RequestCallback<RSP> cb, ConnectionType 
specifyConnection) {}
                 public <REQ, RSP> Future<Message<RSP>> 
sendWithResult(Message<REQ> message, InetAddressAndPort to) { return null; }
+
+                @Override
                 public <V> void respond(V response, Message<?> message) {}
             };
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@cassandra.apache.org
For additional commands, e-mail: commits-h...@cassandra.apache.org


Reply via email to