[GitHub] [kafka] ijuma commented on a change in pull request #10324: MINOR: Add a few more benchmark for the timeline map

2021-03-16 Thread GitBox


ijuma commented on a change in pull request #10324:
URL: https://github.com/apache/kafka/pull/10324#discussion_r595472914



##
File path: 
jmh-benchmarks/src/main/java/org/apache/kafka/jmh/timeline/TimelineHashMapBenchmark.java
##
@@ -44,33 +49,126 @@
 public class TimelineHashMapBenchmark {
 private final static int NUM_ENTRIES = 1_000_000;
 
+@State(Scope.Thread)
+public static class HashMapInput {
+public HashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+map = new HashMap<>(keys.size());
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class ImmutableMapInput {
+scala.collection.immutable.HashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+map = new scala.collection.immutable.HashMap<>();
+for (Integer key : keys) {
+map = map.updated(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class TimelineMapInput {
+public SnapshotRegistry snapshotRegistry;
+public TimelineHashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+snapshotRegistry = new SnapshotRegistry(new LogContext());
+map = new TimelineHashMap<>(snapshotRegistry, keys.size());
+
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class TimelineMapSnapshotInput {
+public SnapshotRegistry snapshotRegistry;
+public TimelineHashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+snapshotRegistry = new SnapshotRegistry(new LogContext());
+map = new TimelineHashMap<>(snapshotRegistry, keys.size());
+
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+int count = 0;
+for (Integer key : keys) {
+if (count % 1_000 == 0) {
+snapshotRegistry.deleteSnapshotsUpTo(count - 10_000);
+snapshotRegistry.createSnapshot(count);
+}
+map.put(key, String.valueOf(key));
+count++;
+}
+
+Collections.shuffle(keys);
+}
+}
+
+
 @Benchmark
 public Map testAddEntriesInHashMap() {
-HashMap map = new HashMap<>(NUM_ENTRIES);
+HashMap map = new HashMap<>();
 for (int i = 0; i < NUM_ENTRIES; i++) {
 int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));
 map.put(key, String.valueOf(key));
 }
+
+return map;
+}
+
+@Benchmark
+public scala.collection.immutable.HashMap 
testAddEntriesInImmutableMap() {
+scala.collection.immutable.HashMap map = new 
scala.collection.immutable.HashMap<>();
+for (int i = 0; i < NUM_ENTRIES; i++) {
+int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));
+map = map.updated(key, String.valueOf(key));
+}
+
 return map;
 }
 
 @Benchmark
 public Map testAddEntriesInTimelineMap() {
 SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new 
LogContext());
-TimelineHashMap map =
-new TimelineHashMap<>(snapshotRegistry, NUM_ENTRIES);
+TimelineHashMap map = new 
TimelineHashMap<>(snapshotRegistry, 16);
 for (int i = 0; i < NUM_ENTRIES; i++) {
 int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));

Review comment:
   Hmm, I'd just generate the randoms during set-up and add them to an 
array.





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org




[GitHub] [kafka] ijuma commented on a change in pull request #10324: MINOR: Add a few more benchmark for the timeline map

2021-03-16 Thread GitBox


ijuma commented on a change in pull request #10324:
URL: https://github.com/apache/kafka/pull/10324#discussion_r595397258



##
File path: 
jmh-benchmarks/src/main/java/org/apache/kafka/jmh/timeline/TimelineHashMapBenchmark.java
##
@@ -44,33 +49,126 @@
 public class TimelineHashMapBenchmark {
 private final static int NUM_ENTRIES = 1_000_000;
 
+@State(Scope.Thread)
+public static class HashMapInput {
+public HashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+map = new HashMap<>(keys.size());
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class ImmutableMapInput {
+scala.collection.immutable.HashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+map = new scala.collection.immutable.HashMap<>();
+for (Integer key : keys) {
+map = map.updated(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class TimelineMapInput {
+public SnapshotRegistry snapshotRegistry;
+public TimelineHashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+snapshotRegistry = new SnapshotRegistry(new LogContext());
+map = new TimelineHashMap<>(snapshotRegistry, keys.size());
+
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class TimelineMapSnapshotInput {
+public SnapshotRegistry snapshotRegistry;
+public TimelineHashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+snapshotRegistry = new SnapshotRegistry(new LogContext());
+map = new TimelineHashMap<>(snapshotRegistry, keys.size());
+
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+int count = 0;
+for (Integer key : keys) {
+if (count % 1_000 == 0) {
+snapshotRegistry.deleteSnapshotsUpTo(count - 10_000);
+snapshotRegistry.createSnapshot(count);
+}
+map.put(key, String.valueOf(key));
+count++;
+}
+
+Collections.shuffle(keys);
+}
+}
+
+
 @Benchmark
 public Map testAddEntriesInHashMap() {
-HashMap map = new HashMap<>(NUM_ENTRIES);
+HashMap map = new HashMap<>();
 for (int i = 0; i < NUM_ENTRIES; i++) {
 int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));
 map.put(key, String.valueOf(key));
 }
+
+return map;
+}
+
+@Benchmark
+public scala.collection.immutable.HashMap 
testAddEntriesInImmutableMap() {
+scala.collection.immutable.HashMap map = new 
scala.collection.immutable.HashMap<>();
+for (int i = 0; i < NUM_ENTRIES; i++) {
+int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));
+map = map.updated(key, String.valueOf(key));
+}
+
 return map;
 }
 
 @Benchmark
 public Map testAddEntriesInTimelineMap() {
 SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new 
LogContext());
-TimelineHashMap map =
-new TimelineHashMap<>(snapshotRegistry, NUM_ENTRIES);
+TimelineHashMap map = new 
TimelineHashMap<>(snapshotRegistry, 16);
 for (int i = 0; i < NUM_ENTRIES; i++) {
 int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));

Review comment:
   Why are we doing these things?

##
File path: 
jmh-benchmarks/src/main/java/org/apache/kafka/jmh/timeline/TimelineHashMapBenchmark.java
##
@@ -44,33 +49,126 @@
 public class TimelineHashMapBenchmark {
 private final static int NUM_ENTRIES = 1_000_000;
 
+@State(Scope.Thread)
+public static class HashMapInput {
+public HashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+map = new HashMap<>(keys.size());
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class ImmutableMapInput {
+scala.collection.immutable.HashMap map;
+

[GitHub] [kafka] ijuma commented on a change in pull request #10324: MINOR: Add a few more benchmark for the timeline map

2021-03-16 Thread GitBox


ijuma commented on a change in pull request #10324:
URL: https://github.com/apache/kafka/pull/10324#discussion_r595396880



##
File path: 
jmh-benchmarks/src/main/java/org/apache/kafka/jmh/timeline/TimelineHashMapBenchmark.java
##
@@ -44,33 +49,126 @@
 public class TimelineHashMapBenchmark {
 private final static int NUM_ENTRIES = 1_000_000;
 
+@State(Scope.Thread)
+public static class HashMapInput {
+public HashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+map = new HashMap<>(keys.size());
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class ImmutableMapInput {
+scala.collection.immutable.HashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+map = new scala.collection.immutable.HashMap<>();
+for (Integer key : keys) {
+map = map.updated(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class TimelineMapInput {
+public SnapshotRegistry snapshotRegistry;
+public TimelineHashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+snapshotRegistry = new SnapshotRegistry(new LogContext());
+map = new TimelineHashMap<>(snapshotRegistry, keys.size());
+
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+Collections.shuffle(keys);
+}
+}
+
+@State(Scope.Thread)
+public static class TimelineMapSnapshotInput {
+public SnapshotRegistry snapshotRegistry;
+public TimelineHashMap map;
+public final List keys = createKeys(NUM_ENTRIES);
+
+@Setup(Level.Invocation)
+public void setup() {
+snapshotRegistry = new SnapshotRegistry(new LogContext());
+map = new TimelineHashMap<>(snapshotRegistry, keys.size());
+
+for (Integer key : keys) {
+map.put(key, String.valueOf(key));
+}
+
+int count = 0;
+for (Integer key : keys) {
+if (count % 1_000 == 0) {
+snapshotRegistry.deleteSnapshotsUpTo(count - 10_000);
+snapshotRegistry.createSnapshot(count);
+}
+map.put(key, String.valueOf(key));
+count++;
+}
+
+Collections.shuffle(keys);
+}
+}
+
+
 @Benchmark
 public Map testAddEntriesInHashMap() {
-HashMap map = new HashMap<>(NUM_ENTRIES);
+HashMap map = new HashMap<>();
 for (int i = 0; i < NUM_ENTRIES; i++) {
 int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));
 map.put(key, String.valueOf(key));
 }
+
+return map;
+}
+
+@Benchmark
+public scala.collection.immutable.HashMap 
testAddEntriesInImmutableMap() {
+scala.collection.immutable.HashMap map = new 
scala.collection.immutable.HashMap<>();
+for (int i = 0; i < NUM_ENTRIES; i++) {
+int key = (int) (0x & ((i * 2862933555777941757L) + 
3037000493L));
+map = map.updated(key, String.valueOf(key));

Review comment:
   We don't want to be converting from int to string in the benchmark code.





This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org