mjsax commented on code in PR #13252:
URL: https://github.com/apache/kafka/pull/13252#discussion_r1116343074


##########
streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStoreTest.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.apache.kafka.common.utils.Utils.mkEntry;
+import static org.apache.kafka.common.utils.Utils.mkMap;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.MetricName;
+import org.apache.kafka.common.metrics.KafkaMetric;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.metrics.Sensor;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes.StringSerde;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.MockTime;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.TaskId;
+import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
+import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
+import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
+import org.apache.kafka.streams.query.KeyQuery;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.query.RangeQuery;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.StrictStubs.class)
+public class MeteredVersionedKeyValueStoreTest {
+
+    private static final String STORE_NAME = "versioned_store";
+    private static final Serde<String> STRING_SERDE = new StringSerde();
+    private static final Serde<ValueAndTimestamp<String>> 
VALUE_AND_TIMESTAMP_SERDE = new NullableValueAndTimestampSerde<>(STRING_SERDE);
+    private static final String METRICS_SCOPE = "scope";
+    private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
+    private static final String APPLICATION_ID = "test-app";
+    private static final TaskId TASK_ID = new TaskId(0, 0, "My-Topology");
+
+    private static final String KEY = "k";
+    private static final String VALUE = "v";
+    private static final long TIMESTAMP = 10L;
+    private static final Bytes RAW_KEY = new 
Bytes(STRING_SERDE.serializer().serialize(null, KEY));
+    private static final byte[] RAW_VALUE_AND_TIMESTAMP = 
VALUE_AND_TIMESTAMP_SERDE.serializer()
+        .serialize(null, ValueAndTimestamp.make(VALUE, TIMESTAMP));
+
+    private final VersionedBytesStore inner = mock(VersionedBytesStore.class);
+    private final Metrics metrics = new Metrics();
+    private final Time mockTime = new MockTime();
+    private final String threadId = Thread.currentThread().getName();
+    private InternalProcessorContext context = 
mock(InternalProcessorContext.class);
+    private Map<String, String> tags;
+
+    private MeteredVersionedKeyValueStore<String, String> store;
+
+    @Before
+    public void setUp() {
+        when(inner.name()).thenReturn(STORE_NAME);
+        when(context.metrics()).thenReturn(new StreamsMetricsImpl(metrics, 
"test", StreamsConfig.METRICS_LATEST, mockTime));
+        when(context.applicationId()).thenReturn(APPLICATION_ID);
+        when(context.taskId()).thenReturn(TASK_ID);
+
+        metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
+        tags = mkMap(
+            mkEntry("thread-id", threadId),
+            mkEntry("task-id", TASK_ID.toString()),
+            mkEntry(METRICS_SCOPE + "-state-id", STORE_NAME)
+        );
+
+        store = newMeteredStore(inner);
+        store.init((StateStoreContext) context, store);
+    }
+
+    private MeteredVersionedKeyValueStore<String, String> 
newMeteredStore(final VersionedBytesStore inner) {
+        return new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            STRING_SERDE,
+            VALUE_AND_TIMESTAMP_SERDE
+        );
+    }
+
+    @SuppressWarnings("deprecation")
+    @Test
+    public void shouldDelegateDeprecatedInit() {
+        // recreate store in order to re-init
+        store.close();
+        final VersionedBytesStore mockInner = mock(VersionedBytesStore.class);
+        store = newMeteredStore(mockInner);
+
+        store.init((ProcessorContext) context, store);
+
+        verify(mockInner).init((ProcessorContext) context, store);
+    }
+
+    @Test
+    public void shouldDelegateInit() {
+        // init is already called in setUp()
+        verify(inner).init((StateStoreContext) context, store);
+    }
+
+    @Test
+    public void shouldPassChangelogTopicNameToStateStoreSerde() {
+        final String changelogTopicName = "changelog-topic";
+        when(context.changelogFor(STORE_NAME)).thenReturn(changelogTopicName);
+        doShouldPassChangelogTopicNameToStateStoreSerde(changelogTopicName);
+    }
+
+    @Test
+    public void 
shouldPassDefaultChangelogTopicNameToStateStoreSerdeIfLoggingDisabled() {
+        final String defaultChangelogTopicName = 
ProcessorStateManager.storeChangelogTopic(APPLICATION_ID, STORE_NAME, 
TASK_ID.topologyName());
+        when(context.changelogFor(STORE_NAME)).thenReturn(null);
+        
doShouldPassChangelogTopicNameToStateStoreSerde(defaultChangelogTopicName);
+    }
+
+    @SuppressWarnings("unchecked")
+    private void doShouldPassChangelogTopicNameToStateStoreSerde(final String 
changelogTopicName) {
+        // recreate store with mock serdes
+        final Serde<String> keySerde = mock(Serde.class);
+        final Serializer<String> keySerializer = mock(Serializer.class);
+        final Serde<String> valueSerde = mock(Serde.class);
+        final Serializer<String> valueSerializer = mock(Serializer.class);
+        final Deserializer<String> valueDeserializer = 
mock(Deserializer.class);
+        when(keySerde.serializer()).thenReturn(keySerializer);
+        when(valueSerde.serializer()).thenReturn(valueSerializer);
+        when(valueSerde.deserializer()).thenReturn(valueDeserializer);
+
+        store.close();
+        store = new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            keySerde,
+            new NullableValueAndTimestampSerde<>(valueSerde)
+        );
+        store.init((StateStoreContext) context, store);
+
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(keySerializer).serialize(changelogTopicName, KEY);
+        verify(valueSerializer).serialize(changelogTopicName, VALUE);
+    }
+
+    @Test
+    public void shouldRecordMetricsOnInit() {
+        // init is called in setUp(). it suffices to verify one restore metric 
since all restore
+        // metrics are recorded by the same sensor, and the sensor is tested 
elsewhere.
+        assertThat((Double) getMetric("restore-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnPut() {
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(inner).put(RAW_KEY, RAW_VALUE_AND_TIMESTAMP);
+        assertThat((Double) getMetric("put-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnDelete() {
+        store.delete(KEY, TIMESTAMP);
+
+        verify(inner).delete(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("delete-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGet() {
+        store.get(KEY);

Review Comment:
   I think we should also mock inner.get() to let it return something useful 
and check that we get the expected result handed back here.



##########
streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStoreTest.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.apache.kafka.common.utils.Utils.mkEntry;
+import static org.apache.kafka.common.utils.Utils.mkMap;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.MetricName;
+import org.apache.kafka.common.metrics.KafkaMetric;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.metrics.Sensor;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes.StringSerde;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.MockTime;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.TaskId;
+import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
+import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
+import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
+import org.apache.kafka.streams.query.KeyQuery;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.query.RangeQuery;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.StrictStubs.class)
+public class MeteredVersionedKeyValueStoreTest {
+
+    private static final String STORE_NAME = "versioned_store";
+    private static final Serde<String> STRING_SERDE = new StringSerde();
+    private static final Serde<ValueAndTimestamp<String>> 
VALUE_AND_TIMESTAMP_SERDE = new NullableValueAndTimestampSerde<>(STRING_SERDE);
+    private static final String METRICS_SCOPE = "scope";
+    private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
+    private static final String APPLICATION_ID = "test-app";
+    private static final TaskId TASK_ID = new TaskId(0, 0, "My-Topology");
+
+    private static final String KEY = "k";
+    private static final String VALUE = "v";
+    private static final long TIMESTAMP = 10L;
+    private static final Bytes RAW_KEY = new 
Bytes(STRING_SERDE.serializer().serialize(null, KEY));
+    private static final byte[] RAW_VALUE_AND_TIMESTAMP = 
VALUE_AND_TIMESTAMP_SERDE.serializer()
+        .serialize(null, ValueAndTimestamp.make(VALUE, TIMESTAMP));
+
+    private final VersionedBytesStore inner = mock(VersionedBytesStore.class);
+    private final Metrics metrics = new Metrics();
+    private final Time mockTime = new MockTime();
+    private final String threadId = Thread.currentThread().getName();
+    private InternalProcessorContext context = 
mock(InternalProcessorContext.class);
+    private Map<String, String> tags;
+
+    private MeteredVersionedKeyValueStore<String, String> store;
+
+    @Before
+    public void setUp() {
+        when(inner.name()).thenReturn(STORE_NAME);
+        when(context.metrics()).thenReturn(new StreamsMetricsImpl(metrics, 
"test", StreamsConfig.METRICS_LATEST, mockTime));
+        when(context.applicationId()).thenReturn(APPLICATION_ID);
+        when(context.taskId()).thenReturn(TASK_ID);
+
+        metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
+        tags = mkMap(
+            mkEntry("thread-id", threadId),
+            mkEntry("task-id", TASK_ID.toString()),
+            mkEntry(METRICS_SCOPE + "-state-id", STORE_NAME)
+        );
+
+        store = newMeteredStore(inner);
+        store.init((StateStoreContext) context, store);
+    }
+
+    private MeteredVersionedKeyValueStore<String, String> 
newMeteredStore(final VersionedBytesStore inner) {
+        return new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            STRING_SERDE,
+            VALUE_AND_TIMESTAMP_SERDE
+        );
+    }
+
+    @SuppressWarnings("deprecation")
+    @Test
+    public void shouldDelegateDeprecatedInit() {
+        // recreate store in order to re-init
+        store.close();
+        final VersionedBytesStore mockInner = mock(VersionedBytesStore.class);
+        store = newMeteredStore(mockInner);
+
+        store.init((ProcessorContext) context, store);
+
+        verify(mockInner).init((ProcessorContext) context, store);
+    }
+
+    @Test
+    public void shouldDelegateInit() {
+        // init is already called in setUp()
+        verify(inner).init((StateStoreContext) context, store);
+    }
+
+    @Test
+    public void shouldPassChangelogTopicNameToStateStoreSerde() {
+        final String changelogTopicName = "changelog-topic";
+        when(context.changelogFor(STORE_NAME)).thenReturn(changelogTopicName);
+        doShouldPassChangelogTopicNameToStateStoreSerde(changelogTopicName);
+    }
+
+    @Test
+    public void 
shouldPassDefaultChangelogTopicNameToStateStoreSerdeIfLoggingDisabled() {
+        final String defaultChangelogTopicName = 
ProcessorStateManager.storeChangelogTopic(APPLICATION_ID, STORE_NAME, 
TASK_ID.topologyName());
+        when(context.changelogFor(STORE_NAME)).thenReturn(null);
+        
doShouldPassChangelogTopicNameToStateStoreSerde(defaultChangelogTopicName);
+    }
+
+    @SuppressWarnings("unchecked")
+    private void doShouldPassChangelogTopicNameToStateStoreSerde(final String 
changelogTopicName) {
+        // recreate store with mock serdes
+        final Serde<String> keySerde = mock(Serde.class);
+        final Serializer<String> keySerializer = mock(Serializer.class);
+        final Serde<String> valueSerde = mock(Serde.class);
+        final Serializer<String> valueSerializer = mock(Serializer.class);
+        final Deserializer<String> valueDeserializer = 
mock(Deserializer.class);
+        when(keySerde.serializer()).thenReturn(keySerializer);
+        when(valueSerde.serializer()).thenReturn(valueSerializer);
+        when(valueSerde.deserializer()).thenReturn(valueDeserializer);
+
+        store.close();
+        store = new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            keySerde,
+            new NullableValueAndTimestampSerde<>(valueSerde)
+        );
+        store.init((StateStoreContext) context, store);
+
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(keySerializer).serialize(changelogTopicName, KEY);
+        verify(valueSerializer).serialize(changelogTopicName, VALUE);
+    }
+
+    @Test
+    public void shouldRecordMetricsOnInit() {
+        // init is called in setUp(). it suffices to verify one restore metric 
since all restore
+        // metrics are recorded by the same sensor, and the sensor is tested 
elsewhere.
+        assertThat((Double) getMetric("restore-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnPut() {
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(inner).put(RAW_KEY, RAW_VALUE_AND_TIMESTAMP);
+        assertThat((Double) getMetric("put-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnDelete() {
+        store.delete(KEY, TIMESTAMP);
+
+        verify(inner).delete(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("delete-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGet() {
+        store.get(KEY);
+
+        verify(inner).get(RAW_KEY);
+        assertThat((Double) getMetric("get-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGetWithTimestamp() {
+        store.get(KEY, TIMESTAMP);

Review Comment:
   As above.



##########
streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStoreTest.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.apache.kafka.common.utils.Utils.mkEntry;
+import static org.apache.kafka.common.utils.Utils.mkMap;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.MetricName;
+import org.apache.kafka.common.metrics.KafkaMetric;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.metrics.Sensor;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes.StringSerde;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.MockTime;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.TaskId;
+import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
+import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
+import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
+import org.apache.kafka.streams.query.KeyQuery;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.query.RangeQuery;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.StrictStubs.class)
+public class MeteredVersionedKeyValueStoreTest {
+
+    private static final String STORE_NAME = "versioned_store";
+    private static final Serde<String> STRING_SERDE = new StringSerde();
+    private static final Serde<ValueAndTimestamp<String>> 
VALUE_AND_TIMESTAMP_SERDE = new NullableValueAndTimestampSerde<>(STRING_SERDE);
+    private static final String METRICS_SCOPE = "scope";
+    private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
+    private static final String APPLICATION_ID = "test-app";
+    private static final TaskId TASK_ID = new TaskId(0, 0, "My-Topology");
+
+    private static final String KEY = "k";
+    private static final String VALUE = "v";
+    private static final long TIMESTAMP = 10L;
+    private static final Bytes RAW_KEY = new 
Bytes(STRING_SERDE.serializer().serialize(null, KEY));
+    private static final byte[] RAW_VALUE_AND_TIMESTAMP = 
VALUE_AND_TIMESTAMP_SERDE.serializer()
+        .serialize(null, ValueAndTimestamp.make(VALUE, TIMESTAMP));
+
+    private final VersionedBytesStore inner = mock(VersionedBytesStore.class);
+    private final Metrics metrics = new Metrics();
+    private final Time mockTime = new MockTime();
+    private final String threadId = Thread.currentThread().getName();
+    private InternalProcessorContext context = 
mock(InternalProcessorContext.class);
+    private Map<String, String> tags;
+
+    private MeteredVersionedKeyValueStore<String, String> store;
+
+    @Before
+    public void setUp() {
+        when(inner.name()).thenReturn(STORE_NAME);
+        when(context.metrics()).thenReturn(new StreamsMetricsImpl(metrics, 
"test", StreamsConfig.METRICS_LATEST, mockTime));
+        when(context.applicationId()).thenReturn(APPLICATION_ID);
+        when(context.taskId()).thenReturn(TASK_ID);
+
+        metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
+        tags = mkMap(
+            mkEntry("thread-id", threadId),
+            mkEntry("task-id", TASK_ID.toString()),
+            mkEntry(METRICS_SCOPE + "-state-id", STORE_NAME)
+        );
+
+        store = newMeteredStore(inner);
+        store.init((StateStoreContext) context, store);
+    }
+
+    private MeteredVersionedKeyValueStore<String, String> 
newMeteredStore(final VersionedBytesStore inner) {
+        return new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            STRING_SERDE,
+            VALUE_AND_TIMESTAMP_SERDE
+        );
+    }
+
+    @SuppressWarnings("deprecation")
+    @Test
+    public void shouldDelegateDeprecatedInit() {
+        // recreate store in order to re-init
+        store.close();
+        final VersionedBytesStore mockInner = mock(VersionedBytesStore.class);
+        store = newMeteredStore(mockInner);
+
+        store.init((ProcessorContext) context, store);
+
+        verify(mockInner).init((ProcessorContext) context, store);
+    }
+
+    @Test
+    public void shouldDelegateInit() {
+        // init is already called in setUp()
+        verify(inner).init((StateStoreContext) context, store);
+    }
+
+    @Test
+    public void shouldPassChangelogTopicNameToStateStoreSerde() {
+        final String changelogTopicName = "changelog-topic";
+        when(context.changelogFor(STORE_NAME)).thenReturn(changelogTopicName);
+        doShouldPassChangelogTopicNameToStateStoreSerde(changelogTopicName);
+    }
+
+    @Test
+    public void 
shouldPassDefaultChangelogTopicNameToStateStoreSerdeIfLoggingDisabled() {
+        final String defaultChangelogTopicName = 
ProcessorStateManager.storeChangelogTopic(APPLICATION_ID, STORE_NAME, 
TASK_ID.topologyName());
+        when(context.changelogFor(STORE_NAME)).thenReturn(null);
+        
doShouldPassChangelogTopicNameToStateStoreSerde(defaultChangelogTopicName);
+    }
+
+    @SuppressWarnings("unchecked")
+    private void doShouldPassChangelogTopicNameToStateStoreSerde(final String 
changelogTopicName) {
+        // recreate store with mock serdes
+        final Serde<String> keySerde = mock(Serde.class);
+        final Serializer<String> keySerializer = mock(Serializer.class);
+        final Serde<String> valueSerde = mock(Serde.class);
+        final Serializer<String> valueSerializer = mock(Serializer.class);
+        final Deserializer<String> valueDeserializer = 
mock(Deserializer.class);
+        when(keySerde.serializer()).thenReturn(keySerializer);
+        when(valueSerde.serializer()).thenReturn(valueSerializer);
+        when(valueSerde.deserializer()).thenReturn(valueDeserializer);
+
+        store.close();
+        store = new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            keySerde,
+            new NullableValueAndTimestampSerde<>(valueSerde)
+        );
+        store.init((StateStoreContext) context, store);
+
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(keySerializer).serialize(changelogTopicName, KEY);
+        verify(valueSerializer).serialize(changelogTopicName, VALUE);
+    }
+
+    @Test
+    public void shouldRecordMetricsOnInit() {
+        // init is called in setUp(). it suffices to verify one restore metric 
since all restore
+        // metrics are recorded by the same sensor, and the sensor is tested 
elsewhere.
+        assertThat((Double) getMetric("restore-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnPut() {
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(inner).put(RAW_KEY, RAW_VALUE_AND_TIMESTAMP);
+        assertThat((Double) getMetric("put-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnDelete() {
+        store.delete(KEY, TIMESTAMP);

Review Comment:
   as below



##########
streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStoreTest.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.apache.kafka.common.utils.Utils.mkEntry;
+import static org.apache.kafka.common.utils.Utils.mkMap;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.MetricName;
+import org.apache.kafka.common.metrics.KafkaMetric;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.metrics.Sensor;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes.StringSerde;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.MockTime;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.TaskId;
+import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
+import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
+import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
+import org.apache.kafka.streams.query.KeyQuery;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.query.RangeQuery;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.StrictStubs.class)
+public class MeteredVersionedKeyValueStoreTest {
+
+    private static final String STORE_NAME = "versioned_store";
+    private static final Serde<String> STRING_SERDE = new StringSerde();
+    private static final Serde<ValueAndTimestamp<String>> 
VALUE_AND_TIMESTAMP_SERDE = new NullableValueAndTimestampSerde<>(STRING_SERDE);
+    private static final String METRICS_SCOPE = "scope";
+    private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
+    private static final String APPLICATION_ID = "test-app";
+    private static final TaskId TASK_ID = new TaskId(0, 0, "My-Topology");
+
+    private static final String KEY = "k";
+    private static final String VALUE = "v";
+    private static final long TIMESTAMP = 10L;
+    private static final Bytes RAW_KEY = new 
Bytes(STRING_SERDE.serializer().serialize(null, KEY));
+    private static final byte[] RAW_VALUE_AND_TIMESTAMP = 
VALUE_AND_TIMESTAMP_SERDE.serializer()
+        .serialize(null, ValueAndTimestamp.make(VALUE, TIMESTAMP));
+
+    private final VersionedBytesStore inner = mock(VersionedBytesStore.class);
+    private final Metrics metrics = new Metrics();
+    private final Time mockTime = new MockTime();
+    private final String threadId = Thread.currentThread().getName();
+    private InternalProcessorContext context = 
mock(InternalProcessorContext.class);
+    private Map<String, String> tags;
+
+    private MeteredVersionedKeyValueStore<String, String> store;
+
+    @Before
+    public void setUp() {
+        when(inner.name()).thenReturn(STORE_NAME);
+        when(context.metrics()).thenReturn(new StreamsMetricsImpl(metrics, 
"test", StreamsConfig.METRICS_LATEST, mockTime));
+        when(context.applicationId()).thenReturn(APPLICATION_ID);
+        when(context.taskId()).thenReturn(TASK_ID);
+
+        metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
+        tags = mkMap(
+            mkEntry("thread-id", threadId),
+            mkEntry("task-id", TASK_ID.toString()),
+            mkEntry(METRICS_SCOPE + "-state-id", STORE_NAME)
+        );
+
+        store = newMeteredStore(inner);
+        store.init((StateStoreContext) context, store);
+    }
+
+    private MeteredVersionedKeyValueStore<String, String> 
newMeteredStore(final VersionedBytesStore inner) {
+        return new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            STRING_SERDE,
+            VALUE_AND_TIMESTAMP_SERDE
+        );
+    }
+
+    @SuppressWarnings("deprecation")
+    @Test
+    public void shouldDelegateDeprecatedInit() {
+        // recreate store in order to re-init
+        store.close();
+        final VersionedBytesStore mockInner = mock(VersionedBytesStore.class);
+        store = newMeteredStore(mockInner);
+
+        store.init((ProcessorContext) context, store);
+
+        verify(mockInner).init((ProcessorContext) context, store);
+    }
+
+    @Test
+    public void shouldDelegateInit() {
+        // init is already called in setUp()
+        verify(inner).init((StateStoreContext) context, store);
+    }
+
+    @Test
+    public void shouldPassChangelogTopicNameToStateStoreSerde() {
+        final String changelogTopicName = "changelog-topic";
+        when(context.changelogFor(STORE_NAME)).thenReturn(changelogTopicName);
+        doShouldPassChangelogTopicNameToStateStoreSerde(changelogTopicName);
+    }
+
+    @Test
+    public void 
shouldPassDefaultChangelogTopicNameToStateStoreSerdeIfLoggingDisabled() {
+        final String defaultChangelogTopicName = 
ProcessorStateManager.storeChangelogTopic(APPLICATION_ID, STORE_NAME, 
TASK_ID.topologyName());
+        when(context.changelogFor(STORE_NAME)).thenReturn(null);
+        
doShouldPassChangelogTopicNameToStateStoreSerde(defaultChangelogTopicName);
+    }
+
+    @SuppressWarnings("unchecked")
+    private void doShouldPassChangelogTopicNameToStateStoreSerde(final String 
changelogTopicName) {
+        // recreate store with mock serdes
+        final Serde<String> keySerde = mock(Serde.class);
+        final Serializer<String> keySerializer = mock(Serializer.class);
+        final Serde<String> valueSerde = mock(Serde.class);
+        final Serializer<String> valueSerializer = mock(Serializer.class);
+        final Deserializer<String> valueDeserializer = 
mock(Deserializer.class);
+        when(keySerde.serializer()).thenReturn(keySerializer);
+        when(valueSerde.serializer()).thenReturn(valueSerializer);
+        when(valueSerde.deserializer()).thenReturn(valueDeserializer);
+
+        store.close();
+        store = new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            keySerde,
+            new NullableValueAndTimestampSerde<>(valueSerde)
+        );
+        store.init((StateStoreContext) context, store);
+
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(keySerializer).serialize(changelogTopicName, KEY);
+        verify(valueSerializer).serialize(changelogTopicName, VALUE);
+    }
+
+    @Test
+    public void shouldRecordMetricsOnInit() {
+        // init is called in setUp(). it suffices to verify one restore metric 
since all restore
+        // metrics are recorded by the same sensor, and the sensor is tested 
elsewhere.
+        assertThat((Double) getMetric("restore-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnPut() {
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(inner).put(RAW_KEY, RAW_VALUE_AND_TIMESTAMP);
+        assertThat((Double) getMetric("put-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnDelete() {
+        store.delete(KEY, TIMESTAMP);
+
+        verify(inner).delete(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("delete-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGet() {
+        store.get(KEY);
+
+        verify(inner).get(RAW_KEY);
+        assertThat((Double) getMetric("get-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGetWithTimestamp() {
+        store.get(KEY, TIMESTAMP);
+
+        verify(inner).get(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("get-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnFlush() {
+        store.flush();
+
+        verify(inner).flush();
+        assertThat((Double) getMetric("flush-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRemoveMetricsOnClose() {
+        assertThat(storeMetrics(), not(empty()));
+
+        store.close();
+
+        verify(inner).close();
+        assertThat(storeMetrics(), empty());
+    }
+
+    @Test
+    public void shouldRemoveMetricsOnCloseEvenIfInnerThrows() {

Review Comment:
   as above



##########
streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStoreTest.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.apache.kafka.common.utils.Utils.mkEntry;
+import static org.apache.kafka.common.utils.Utils.mkMap;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.MetricName;
+import org.apache.kafka.common.metrics.KafkaMetric;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.metrics.Sensor;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes.StringSerde;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.MockTime;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.TaskId;
+import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
+import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
+import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
+import org.apache.kafka.streams.query.KeyQuery;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.query.RangeQuery;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.StrictStubs.class)
+public class MeteredVersionedKeyValueStoreTest {
+
+    private static final String STORE_NAME = "versioned_store";
+    private static final Serde<String> STRING_SERDE = new StringSerde();
+    private static final Serde<ValueAndTimestamp<String>> 
VALUE_AND_TIMESTAMP_SERDE = new NullableValueAndTimestampSerde<>(STRING_SERDE);
+    private static final String METRICS_SCOPE = "scope";
+    private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
+    private static final String APPLICATION_ID = "test-app";
+    private static final TaskId TASK_ID = new TaskId(0, 0, "My-Topology");
+
+    private static final String KEY = "k";
+    private static final String VALUE = "v";
+    private static final long TIMESTAMP = 10L;
+    private static final Bytes RAW_KEY = new 
Bytes(STRING_SERDE.serializer().serialize(null, KEY));
+    private static final byte[] RAW_VALUE_AND_TIMESTAMP = 
VALUE_AND_TIMESTAMP_SERDE.serializer()
+        .serialize(null, ValueAndTimestamp.make(VALUE, TIMESTAMP));
+
+    private final VersionedBytesStore inner = mock(VersionedBytesStore.class);
+    private final Metrics metrics = new Metrics();
+    private final Time mockTime = new MockTime();
+    private final String threadId = Thread.currentThread().getName();
+    private InternalProcessorContext context = 
mock(InternalProcessorContext.class);
+    private Map<String, String> tags;
+
+    private MeteredVersionedKeyValueStore<String, String> store;
+
+    @Before
+    public void setUp() {
+        when(inner.name()).thenReturn(STORE_NAME);
+        when(context.metrics()).thenReturn(new StreamsMetricsImpl(metrics, 
"test", StreamsConfig.METRICS_LATEST, mockTime));
+        when(context.applicationId()).thenReturn(APPLICATION_ID);
+        when(context.taskId()).thenReturn(TASK_ID);
+
+        metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
+        tags = mkMap(
+            mkEntry("thread-id", threadId),
+            mkEntry("task-id", TASK_ID.toString()),
+            mkEntry(METRICS_SCOPE + "-state-id", STORE_NAME)
+        );
+
+        store = newMeteredStore(inner);
+        store.init((StateStoreContext) context, store);
+    }
+
+    private MeteredVersionedKeyValueStore<String, String> 
newMeteredStore(final VersionedBytesStore inner) {
+        return new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            STRING_SERDE,
+            VALUE_AND_TIMESTAMP_SERDE
+        );
+    }
+
+    @SuppressWarnings("deprecation")
+    @Test
+    public void shouldDelegateDeprecatedInit() {
+        // recreate store in order to re-init
+        store.close();
+        final VersionedBytesStore mockInner = mock(VersionedBytesStore.class);
+        store = newMeteredStore(mockInner);
+
+        store.init((ProcessorContext) context, store);
+
+        verify(mockInner).init((ProcessorContext) context, store);
+    }
+
+    @Test
+    public void shouldDelegateInit() {
+        // init is already called in setUp()
+        verify(inner).init((StateStoreContext) context, store);
+    }
+
+    @Test
+    public void shouldPassChangelogTopicNameToStateStoreSerde() {
+        final String changelogTopicName = "changelog-topic";
+        when(context.changelogFor(STORE_NAME)).thenReturn(changelogTopicName);
+        doShouldPassChangelogTopicNameToStateStoreSerde(changelogTopicName);
+    }
+
+    @Test
+    public void 
shouldPassDefaultChangelogTopicNameToStateStoreSerdeIfLoggingDisabled() {
+        final String defaultChangelogTopicName = 
ProcessorStateManager.storeChangelogTopic(APPLICATION_ID, STORE_NAME, 
TASK_ID.topologyName());
+        when(context.changelogFor(STORE_NAME)).thenReturn(null);
+        
doShouldPassChangelogTopicNameToStateStoreSerde(defaultChangelogTopicName);
+    }
+
+    @SuppressWarnings("unchecked")
+    private void doShouldPassChangelogTopicNameToStateStoreSerde(final String 
changelogTopicName) {
+        // recreate store with mock serdes
+        final Serde<String> keySerde = mock(Serde.class);
+        final Serializer<String> keySerializer = mock(Serializer.class);
+        final Serde<String> valueSerde = mock(Serde.class);
+        final Serializer<String> valueSerializer = mock(Serializer.class);
+        final Deserializer<String> valueDeserializer = 
mock(Deserializer.class);
+        when(keySerde.serializer()).thenReturn(keySerializer);
+        when(valueSerde.serializer()).thenReturn(valueSerializer);
+        when(valueSerde.deserializer()).thenReturn(valueDeserializer);
+
+        store.close();
+        store = new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            keySerde,
+            new NullableValueAndTimestampSerde<>(valueSerde)
+        );
+        store.init((StateStoreContext) context, store);
+
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(keySerializer).serialize(changelogTopicName, KEY);
+        verify(valueSerializer).serialize(changelogTopicName, VALUE);
+    }
+
+    @Test
+    public void shouldRecordMetricsOnInit() {
+        // init is called in setUp(). it suffices to verify one restore metric 
since all restore
+        // metrics are recorded by the same sensor, and the sensor is tested 
elsewhere.
+        assertThat((Double) getMetric("restore-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnPut() {
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(inner).put(RAW_KEY, RAW_VALUE_AND_TIMESTAMP);
+        assertThat((Double) getMetric("put-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnDelete() {
+        store.delete(KEY, TIMESTAMP);
+
+        verify(inner).delete(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("delete-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGet() {
+        store.get(KEY);
+
+        verify(inner).get(RAW_KEY);
+        assertThat((Double) getMetric("get-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGetWithTimestamp() {
+        store.get(KEY, TIMESTAMP);
+
+        verify(inner).get(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("get-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnFlush() {
+        store.flush();
+
+        verify(inner).flush();
+        assertThat((Double) getMetric("flush-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRemoveMetricsOnClose() {

Review Comment:
   Do we need this test? Seem it's rather testing the wrapped-store?



##########
streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStoreTest.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.apache.kafka.common.utils.Utils.mkEntry;
+import static org.apache.kafka.common.utils.Utils.mkMap;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.MetricName;
+import org.apache.kafka.common.metrics.KafkaMetric;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.metrics.Sensor;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes.StringSerde;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.MockTime;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.TaskId;
+import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
+import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
+import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
+import org.apache.kafka.streams.query.KeyQuery;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.query.RangeQuery;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.StrictStubs.class)
+public class MeteredVersionedKeyValueStoreTest {
+
+    private static final String STORE_NAME = "versioned_store";
+    private static final Serde<String> STRING_SERDE = new StringSerde();
+    private static final Serde<ValueAndTimestamp<String>> 
VALUE_AND_TIMESTAMP_SERDE = new NullableValueAndTimestampSerde<>(STRING_SERDE);
+    private static final String METRICS_SCOPE = "scope";
+    private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
+    private static final String APPLICATION_ID = "test-app";
+    private static final TaskId TASK_ID = new TaskId(0, 0, "My-Topology");
+
+    private static final String KEY = "k";
+    private static final String VALUE = "v";
+    private static final long TIMESTAMP = 10L;
+    private static final Bytes RAW_KEY = new 
Bytes(STRING_SERDE.serializer().serialize(null, KEY));
+    private static final byte[] RAW_VALUE_AND_TIMESTAMP = 
VALUE_AND_TIMESTAMP_SERDE.serializer()
+        .serialize(null, ValueAndTimestamp.make(VALUE, TIMESTAMP));
+
+    private final VersionedBytesStore inner = mock(VersionedBytesStore.class);
+    private final Metrics metrics = new Metrics();
+    private final Time mockTime = new MockTime();
+    private final String threadId = Thread.currentThread().getName();
+    private InternalProcessorContext context = 
mock(InternalProcessorContext.class);
+    private Map<String, String> tags;
+
+    private MeteredVersionedKeyValueStore<String, String> store;
+
+    @Before
+    public void setUp() {
+        when(inner.name()).thenReturn(STORE_NAME);
+        when(context.metrics()).thenReturn(new StreamsMetricsImpl(metrics, 
"test", StreamsConfig.METRICS_LATEST, mockTime));
+        when(context.applicationId()).thenReturn(APPLICATION_ID);
+        when(context.taskId()).thenReturn(TASK_ID);
+
+        metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
+        tags = mkMap(
+            mkEntry("thread-id", threadId),
+            mkEntry("task-id", TASK_ID.toString()),
+            mkEntry(METRICS_SCOPE + "-state-id", STORE_NAME)
+        );
+
+        store = newMeteredStore(inner);
+        store.init((StateStoreContext) context, store);
+    }
+
+    private MeteredVersionedKeyValueStore<String, String> 
newMeteredStore(final VersionedBytesStore inner) {
+        return new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            STRING_SERDE,
+            VALUE_AND_TIMESTAMP_SERDE
+        );
+    }
+
+    @SuppressWarnings("deprecation")
+    @Test
+    public void shouldDelegateDeprecatedInit() {
+        // recreate store in order to re-init
+        store.close();
+        final VersionedBytesStore mockInner = mock(VersionedBytesStore.class);
+        store = newMeteredStore(mockInner);
+
+        store.init((ProcessorContext) context, store);
+
+        verify(mockInner).init((ProcessorContext) context, store);
+    }
+
+    @Test
+    public void shouldDelegateInit() {
+        // init is already called in setUp()
+        verify(inner).init((StateStoreContext) context, store);

Review Comment:
   Should we also verify the delegation of `persistent()`, `isOpen()`, 
`getPosition()` and `name()`  (hope I did not forget any method).



##########
streams/src/main/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStore.java:
##########
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static 
org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.maybeMeasureLatency;
+
+import java.util.Objects;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.errors.ProcessorStateException;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStore;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.internals.SerdeGetter;
+import org.apache.kafka.streams.query.Position;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.state.KeyValueStore;
+import org.apache.kafka.streams.state.TimestampedKeyValueStore;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.apache.kafka.streams.state.VersionedKeyValueStore;
+import org.apache.kafka.streams.state.VersionedRecord;
+
+/**
+ * A metered {@link VersionedKeyValueStore} wrapper that is used for recording 
operation
+ * metrics, and hence its inner {@link VersionedBytesStore} implementation 
does not need to provide
+ * its own metrics collecting functionality. The inner {@code 
VersionedBytesStore} of this class
+ * is a {@link KeyValueStore} of type &lt;Bytes,byte[]&gt;, so we use {@link 
Serde}s
+ * to convert from &lt;K,ValueAndTimestamp&lt;V&gt&gt; to 
&lt;Bytes,byte[]&gt;. In particular,
+ * {@link NullableValueAndTimestampSerde} is used since putting a tombstone to 
a versioned key-value
+ * store requires putting a null value associated with a timestamp.
+ *
+ * @param <K> The key type
+ * @param <V> The (raw) value type
+ */
+public class MeteredVersionedKeyValueStore<K, V>
+    extends WrappedStateStore<VersionedBytesStore, K, V>
+    implements VersionedKeyValueStore<K, V> {
+
+    private final MeteredVersionedKeyValueStoreInternal internal;
+
+    MeteredVersionedKeyValueStore(final VersionedBytesStore inner,
+                                  final String metricScope,
+                                  final Time time,
+                                  final Serde<K> keySerde,
+                                  final Serde<ValueAndTimestamp<V>> 
valueSerde) {
+        super(inner);
+        internal = new MeteredVersionedKeyValueStoreInternal(inner, 
metricScope, time, keySerde, valueSerde);
+    }
+
+    /**
+     * Conceptually, {@link MeteredVersionedKeyValueStore} should {@code 
extend}
+     * {@link MeteredKeyValueStore}, but due to type conflicts, we cannot do 
this. (Specifically,
+     * the first needs to be {@link VersionedKeyValueStore} while the second 
is {@link KeyValueStore}
+     * and the two interfaces conflict.) Thus, we use an internal 
<it>instance</it> of
+     * {@code MeteredKeyValueStore} to mimic inheritance instead.
+     * <p>
+     * It's not ideal because it requires an extra step to translate between 
the APIs of
+     * {@link VersionedKeyValueStore} in {@link MeteredVersionedKeyValueStore} 
and
+     * the APIs of {@link TimestampedKeyValueStore} in {@link 
MeteredVersionedKeyValueStoreInternal}.
+     * This extra step is all that the methods of {@code 
MeteredVersionedKeyValueStoreInternal} do.
+     * <p>
+     * Note that the addition of {@link #get(Object, long)} and {@link 
#delete(Object, long)} in
+     * this class are to match the interface of {@link VersionedKeyValueStore}.
+     */
+    private class MeteredVersionedKeyValueStoreInternal
+        extends MeteredKeyValueStore<K, ValueAndTimestamp<V>> {
+
+        private final VersionedBytesStore inner;
+
+        MeteredVersionedKeyValueStoreInternal(final VersionedBytesStore inner,
+                                              final String metricScope,
+                                              final Time time,
+                                              final Serde<K> keySerde,
+                                              final 
Serde<ValueAndTimestamp<V>> valueSerde) {
+            super(inner, metricScope, time, keySerde, valueSerde);
+            this.inner = inner;
+        }
+
+        @Override
+        public void put(final K key, final ValueAndTimestamp<V> value) {
+            if (value == null) {
+                throw new IllegalStateException("Versioned store requires 
timestamp associated with all puts, including tombstones/deletes");
+            }
+            super.put(key, value);
+        }
+
+        public ValueAndTimestamp<V> get(final K key, final long asOfTimestamp) 
{
+            Objects.requireNonNull(key, "key cannot be null");
+            try {
+                return maybeMeasureLatency(() -> 
outerValue(inner.get(keyBytes(key), asOfTimestamp)), time, getSensor);
+            } catch (final ProcessorStateException e) {
+                final String message = String.format(e.getMessage(), key);
+                throw new ProcessorStateException(message, e);
+            }
+        }
+
+        public ValueAndTimestamp<V> delete(final K key, final long timestamp) {
+            Objects.requireNonNull(key, "key cannot be null");
+            try {
+                return maybeMeasureLatency(() -> 
outerValue(inner.delete(keyBytes(key), timestamp)), time, deleteSensor);
+            } catch (final ProcessorStateException e) {
+                final String message = String.format(e.getMessage(), key);
+                throw new ProcessorStateException(message, e);
+            }
+        }
+
+        @Override
+        protected <R> QueryResult<R> runRangeQuery(final Query<R> query,
+                                                   final PositionBound 
positionBound,
+                                                   final QueryConfig config) {
+            // throw exception for now to reserve the ability to implement 
this in the future
+            // without clashing with users' custom implementations in the 
meantime
+            throw new UnsupportedOperationException("Versioned stores do not 
support RangeQuery queries at this time.");
+        }
+
+        @Override
+        protected <R> QueryResult<R> runKeyQuery(final Query<R> query,
+                                                 final PositionBound 
positionBound,
+                                                 final QueryConfig config) {
+            // throw exception for now to reserve the ability to implement 
this in the future
+            // without clashing with users' custom implementations in the 
meantime
+            throw new UnsupportedOperationException("Versioned stores do not 
support KeyQuery queries at this time.");
+        }
+
+        @SuppressWarnings("unchecked")
+        @Override
+        protected Serde<ValueAndTimestamp<V>> prepareValueSerdeForStore(
+            final Serde<ValueAndTimestamp<V>> valueSerde,
+            final SerdeGetter getter
+        ) {
+            if (valueSerde == null) {
+                return new NullableValueAndTimestampSerde<>((Serde<V>) 
getter.valueSerde());
+            } else {
+                return super.prepareValueSerdeForStore(valueSerde, getter);
+            }
+        }
+    }
+
+    @Override
+    public void put(final K key, final V value, final long timestamp) {
+        internal.put(key, ValueAndTimestamp.makeAllowNullable(value, 
timestamp));
+    }
+
+    @Override
+    public VersionedRecord<V> delete(final K key, final long timestamp) {
+        final ValueAndTimestamp<V> valueAndTimestamp = internal.delete(key, 
timestamp);
+        return valueAndTimestamp == null
+            ? null
+            : new VersionedRecord<>(valueAndTimestamp.value(), 
valueAndTimestamp.timestamp());
+    }
+
+    @Override
+    public VersionedRecord<V> get(final K key) {
+        final ValueAndTimestamp<V> valueAndTimestamp = internal.get(key);
+        return valueAndTimestamp == null
+            ? null
+            : new VersionedRecord<>(valueAndTimestamp.value(), 
valueAndTimestamp.timestamp());
+    }
+
+    @Override
+    public VersionedRecord<V> get(final K key, final long asOfTimestamp) {
+        final ValueAndTimestamp<V> valueAndTimestamp = internal.get(key, 
asOfTimestamp);
+        return valueAndTimestamp == null
+            ? null
+            : new VersionedRecord<>(valueAndTimestamp.value(), 
valueAndTimestamp.timestamp());
+    }
+
+    @Override
+    public String name() {

Review Comment:
   It seems we overwrite some method to delegate to `internal.x()` instead of 
`inner.x()` but I am not sure what the pattern is. Can you elaborate?



##########
streams/src/test/java/org/apache/kafka/streams/state/internals/MeteredVersionedKeyValueStoreTest.java:
##########
@@ -0,0 +1,312 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.apache.kafka.common.utils.Utils.mkEntry;
+import static org.apache.kafka.common.utils.Utils.mkMap;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.empty;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.junit.Assert.assertThrows;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.MetricName;
+import org.apache.kafka.common.metrics.KafkaMetric;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.metrics.Sensor;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serde;
+import org.apache.kafka.common.serialization.Serdes.StringSerde;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.MockTime;
+import org.apache.kafka.common.utils.Time;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.processor.TaskId;
+import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
+import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
+import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
+import org.apache.kafka.streams.query.KeyQuery;
+import org.apache.kafka.streams.query.PositionBound;
+import org.apache.kafka.streams.query.Query;
+import org.apache.kafka.streams.query.QueryConfig;
+import org.apache.kafka.streams.query.QueryResult;
+import org.apache.kafka.streams.query.RangeQuery;
+import org.apache.kafka.streams.state.ValueAndTimestamp;
+import org.apache.kafka.streams.state.VersionedBytesStore;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.junit.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.StrictStubs.class)
+public class MeteredVersionedKeyValueStoreTest {
+
+    private static final String STORE_NAME = "versioned_store";
+    private static final Serde<String> STRING_SERDE = new StringSerde();
+    private static final Serde<ValueAndTimestamp<String>> 
VALUE_AND_TIMESTAMP_SERDE = new NullableValueAndTimestampSerde<>(STRING_SERDE);
+    private static final String METRICS_SCOPE = "scope";
+    private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
+    private static final String APPLICATION_ID = "test-app";
+    private static final TaskId TASK_ID = new TaskId(0, 0, "My-Topology");
+
+    private static final String KEY = "k";
+    private static final String VALUE = "v";
+    private static final long TIMESTAMP = 10L;
+    private static final Bytes RAW_KEY = new 
Bytes(STRING_SERDE.serializer().serialize(null, KEY));
+    private static final byte[] RAW_VALUE_AND_TIMESTAMP = 
VALUE_AND_TIMESTAMP_SERDE.serializer()
+        .serialize(null, ValueAndTimestamp.make(VALUE, TIMESTAMP));
+
+    private final VersionedBytesStore inner = mock(VersionedBytesStore.class);
+    private final Metrics metrics = new Metrics();
+    private final Time mockTime = new MockTime();
+    private final String threadId = Thread.currentThread().getName();
+    private InternalProcessorContext context = 
mock(InternalProcessorContext.class);
+    private Map<String, String> tags;
+
+    private MeteredVersionedKeyValueStore<String, String> store;
+
+    @Before
+    public void setUp() {
+        when(inner.name()).thenReturn(STORE_NAME);
+        when(context.metrics()).thenReturn(new StreamsMetricsImpl(metrics, 
"test", StreamsConfig.METRICS_LATEST, mockTime));
+        when(context.applicationId()).thenReturn(APPLICATION_ID);
+        when(context.taskId()).thenReturn(TASK_ID);
+
+        metrics.config().recordLevel(Sensor.RecordingLevel.DEBUG);
+        tags = mkMap(
+            mkEntry("thread-id", threadId),
+            mkEntry("task-id", TASK_ID.toString()),
+            mkEntry(METRICS_SCOPE + "-state-id", STORE_NAME)
+        );
+
+        store = newMeteredStore(inner);
+        store.init((StateStoreContext) context, store);
+    }
+
+    private MeteredVersionedKeyValueStore<String, String> 
newMeteredStore(final VersionedBytesStore inner) {
+        return new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            STRING_SERDE,
+            VALUE_AND_TIMESTAMP_SERDE
+        );
+    }
+
+    @SuppressWarnings("deprecation")
+    @Test
+    public void shouldDelegateDeprecatedInit() {
+        // recreate store in order to re-init
+        store.close();
+        final VersionedBytesStore mockInner = mock(VersionedBytesStore.class);
+        store = newMeteredStore(mockInner);
+
+        store.init((ProcessorContext) context, store);
+
+        verify(mockInner).init((ProcessorContext) context, store);
+    }
+
+    @Test
+    public void shouldDelegateInit() {
+        // init is already called in setUp()
+        verify(inner).init((StateStoreContext) context, store);
+    }
+
+    @Test
+    public void shouldPassChangelogTopicNameToStateStoreSerde() {
+        final String changelogTopicName = "changelog-topic";
+        when(context.changelogFor(STORE_NAME)).thenReturn(changelogTopicName);
+        doShouldPassChangelogTopicNameToStateStoreSerde(changelogTopicName);
+    }
+
+    @Test
+    public void 
shouldPassDefaultChangelogTopicNameToStateStoreSerdeIfLoggingDisabled() {
+        final String defaultChangelogTopicName = 
ProcessorStateManager.storeChangelogTopic(APPLICATION_ID, STORE_NAME, 
TASK_ID.topologyName());
+        when(context.changelogFor(STORE_NAME)).thenReturn(null);
+        
doShouldPassChangelogTopicNameToStateStoreSerde(defaultChangelogTopicName);
+    }
+
+    @SuppressWarnings("unchecked")
+    private void doShouldPassChangelogTopicNameToStateStoreSerde(final String 
changelogTopicName) {
+        // recreate store with mock serdes
+        final Serde<String> keySerde = mock(Serde.class);
+        final Serializer<String> keySerializer = mock(Serializer.class);
+        final Serde<String> valueSerde = mock(Serde.class);
+        final Serializer<String> valueSerializer = mock(Serializer.class);
+        final Deserializer<String> valueDeserializer = 
mock(Deserializer.class);
+        when(keySerde.serializer()).thenReturn(keySerializer);
+        when(valueSerde.serializer()).thenReturn(valueSerializer);
+        when(valueSerde.deserializer()).thenReturn(valueDeserializer);
+
+        store.close();
+        store = new MeteredVersionedKeyValueStore<>(
+            inner,
+            METRICS_SCOPE,
+            mockTime,
+            keySerde,
+            new NullableValueAndTimestampSerde<>(valueSerde)
+        );
+        store.init((StateStoreContext) context, store);
+
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(keySerializer).serialize(changelogTopicName, KEY);
+        verify(valueSerializer).serialize(changelogTopicName, VALUE);
+    }
+
+    @Test
+    public void shouldRecordMetricsOnInit() {
+        // init is called in setUp(). it suffices to verify one restore metric 
since all restore
+        // metrics are recorded by the same sensor, and the sensor is tested 
elsewhere.
+        assertThat((Double) getMetric("restore-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnPut() {
+        store.put(KEY, VALUE, TIMESTAMP);
+
+        verify(inner).put(RAW_KEY, RAW_VALUE_AND_TIMESTAMP);
+        assertThat((Double) getMetric("put-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnDelete() {
+        store.delete(KEY, TIMESTAMP);
+
+        verify(inner).delete(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("delete-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGet() {
+        store.get(KEY);
+
+        verify(inner).get(RAW_KEY);
+        assertThat((Double) getMetric("get-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnGetWithTimestamp() {
+        store.get(KEY, TIMESTAMP);
+
+        verify(inner).get(RAW_KEY, TIMESTAMP);
+        assertThat((Double) getMetric("get-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRecordMetricsOnFlush() {
+        store.flush();
+
+        verify(inner).flush();
+        assertThat((Double) getMetric("flush-rate").metricValue(), 
greaterThan(0.0));
+    }
+
+    @Test
+    public void shouldDelegateAndRemoveMetricsOnClose() {
+        assertThat(storeMetrics(), not(empty()));
+
+        store.close();
+
+        verify(inner).close();
+        assertThat(storeMetrics(), empty());
+    }
+
+    @Test
+    public void shouldRemoveMetricsOnCloseEvenIfInnerThrows() {
+        doThrow(new RuntimeException("uh oh")).when(inner).close();
+        assertThat(storeMetrics(), not(empty()));
+
+        assertThrows(RuntimeException.class, () -> store.close());
+
+        assertThat(storeMetrics(), empty());
+    }
+
+    @Test
+    public void shouldNotSetFlushListenerIfInnerIsNotCaching() {

Review Comment:
   as above



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: jira-unsubscr...@kafka.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to