[GitHub] [kafka] vcrfxia commented on a diff in pull request #13143: KAFKA-14491: [3/N] Add logical key value segments

2023-02-03 Thread via GitHub


vcrfxia commented on code in PR #13143:
URL: https://github.com/apache/kafka/pull/13143#discussion_r1096168089


##
streams/src/test/java/org/apache/kafka/streams/state/internals/LogicalKeyValueSegmentTest.java:
##
@@ -0,0 +1,270 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serdes;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.Utils;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.errors.InvalidStateStoreException;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.state.KeyValueIterator;
+import org.apache.kafka.streams.state.internals.metrics.RocksDBMetricsRecorder;
+import org.apache.kafka.test.InternalMockProcessorContext;
+import org.apache.kafka.test.StreamsTestUtils;
+import org.apache.kafka.test.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class LogicalKeyValueSegmentTest {
+
+private static final String STORE_NAME = "physical-rocks";
+private static final String METRICS_SCOPE = "metrics-scope";
+private static final String DB_FILE_DIR = "rocksdb";
+private static final Serializer STRING_SERIALIZER = new 
StringSerializer();
+private static final Deserializer STRING_DESERIALIZER = new 
StringDeserializer();
+
+private RocksDBStore physicalStore;
+
+private LogicalKeyValueSegment segment1;
+private LogicalKeyValueSegment segment2;
+
+@Before
+public void setUp() {
+physicalStore = new RocksDBStore(STORE_NAME, DB_FILE_DIR, new 
RocksDBMetricsRecorder(METRICS_SCOPE, STORE_NAME), false);
+physicalStore.init((StateStoreContext) new 
InternalMockProcessorContext<>(
+TestUtils.tempDirectory(),
+Serdes.String(),
+Serdes.String(),
+new StreamsConfig(StreamsTestUtils.getStreamsConfig())
+), physicalStore);
+
+segment1 = new LogicalKeyValueSegment(1, "segment-1", physicalStore);
+segment2 = new LogicalKeyValueSegment(2, "segment-2", physicalStore);
+}
+
+@After
+public void tearDown() {
+segment1.close();
+segment2.close();
+physicalStore.close();
+}
+
+@Test
+public void shouldPut() {
+final KeyValue kv0 = new KeyValue<>("1", "a");
+final KeyValue kv1 = new KeyValue<>("2", "b");
+
+segment1.put(new Bytes(kv0.key.getBytes(UTF_8)), 
kv0.value.getBytes(UTF_8));
+segment1.put(new Bytes(kv1.key.getBytes(UTF_8)), 
kv1.value.getBytes(UTF_8));
+segment2.put(new Bytes(kv0.key.getBytes(UTF_8)), 
kv0.value.getBytes(UTF_8));
+segment2.put(new Bytes(kv1.key.getBytes(UTF_8)), 
kv1.value.getBytes(UTF_8));
+
+assertEquals("a", getAndDeserialize(segment1, "1"));

Review Comment:
   Ah good point. That's definitely a gap in `shouldPut()` and 
`shouldPutAll()`. All of the other tests are already set up in a way that they 
fail if segments are not properly isolated from each other. Just pushed a fix 
to the two tests which didn't ensure that, and some minor cleanup to a few of 
the other tests.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: jira-unsubscr...@kafka.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



[GitHub] [kafka] vcrfxia commented on a diff in pull request #13143: KAFKA-14491: [3/N] Add logical key value segments

2023-02-01 Thread via GitHub


vcrfxia commented on code in PR #13143:
URL: https://github.com/apache/kafka/pull/13143#discussion_r1094000371


##
streams/src/test/java/org/apache/kafka/streams/state/internals/LogicalKeyValueSegmentTest.java:
##
@@ -0,0 +1,270 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serdes;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.Utils;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.errors.InvalidStateStoreException;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.state.KeyValueIterator;
+import org.apache.kafka.streams.state.internals.metrics.RocksDBMetricsRecorder;
+import org.apache.kafka.test.InternalMockProcessorContext;
+import org.apache.kafka.test.StreamsTestUtils;
+import org.apache.kafka.test.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class LogicalKeyValueSegmentTest {
+
+private static final String STORE_NAME = "physical-rocks";
+private static final String METRICS_SCOPE = "metrics-scope";
+private static final String DB_FILE_DIR = "rocksdb";
+private static final Serializer STRING_SERIALIZER = new 
StringSerializer();
+private static final Deserializer STRING_DESERIALIZER = new 
StringDeserializer();
+
+private RocksDBStore physicalStore;
+
+private LogicalKeyValueSegment segment1;
+private LogicalKeyValueSegment segment2;
+
+@Before
+public void setUp() {
+physicalStore = new RocksDBStore(STORE_NAME, DB_FILE_DIR, new 
RocksDBMetricsRecorder(METRICS_SCOPE, STORE_NAME), false);
+physicalStore.init((StateStoreContext) new 
InternalMockProcessorContext<>(
+TestUtils.tempDirectory(),
+Serdes.String(),
+Serdes.String(),
+new StreamsConfig(StreamsTestUtils.getStreamsConfig())
+), physicalStore);
+
+segment1 = new LogicalKeyValueSegment(1, "segment-1", physicalStore);
+segment2 = new LogicalKeyValueSegment(2, "segment-2", physicalStore);
+}
+
+@After
+public void tearDown() {
+segment1.close();
+segment2.close();
+physicalStore.close();
+}
+
+@Test
+public void shouldPut() {
+final KeyValue kv0 = new KeyValue<>("1", "a");
+final KeyValue kv1 = new KeyValue<>("2", "b");
+
+segment1.put(new Bytes(kv0.key.getBytes(UTF_8)), 
kv0.value.getBytes(UTF_8));
+segment1.put(new Bytes(kv1.key.getBytes(UTF_8)), 
kv1.value.getBytes(UTF_8));
+segment2.put(new Bytes(kv0.key.getBytes(UTF_8)), 
kv0.value.getBytes(UTF_8));
+segment2.put(new Bytes(kv1.key.getBytes(UTF_8)), 
kv1.value.getBytes(UTF_8));
+
+assertEquals("a", getAndDeserialize(segment1, "1"));
+assertEquals("b", getAndDeserialize(segment1, "2"));
+assertEquals("a", getAndDeserialize(segment2, "1"));
+assertEquals("b", getAndDeserialize(segment2, "2"));
+}
+
+@Test
+public void shouldPutAll() {
+final List> entries = new ArrayList<>();
+entries.add(new KeyValue<>(
+new Bytes(STRING_SERIALIZER.serialize(null, "1")),
+STRING_SERIALIZER.serialize(null, "a")));
+entries.add(new KeyValue<>(
+new Bytes(STRING_SERIALIZER.serialize(null, "2")),
+STRING_SERIALIZER.serialize(null, "b")));
+entries.add(new KeyValue<>(
+new Bytes(STRING_SERIALIZ

[GitHub] [kafka] vcrfxia commented on a diff in pull request #13143: KAFKA-14491: [3/N] Add logical key value segments

2023-02-01 Thread via GitHub


vcrfxia commented on code in PR #13143:
URL: https://github.com/apache/kafka/pull/13143#discussion_r1093929163


##
streams/src/test/java/org/apache/kafka/streams/state/internals/LogicalKeyValueSegmentsTest.java:
##
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.util.List;
+import org.apache.kafka.common.metrics.Metrics;
+import org.apache.kafka.common.serialization.Serdes;
+import org.apache.kafka.common.utils.LogContext;
+import org.apache.kafka.streams.processor.internals.MockStreamsMetrics;
+import org.apache.kafka.streams.state.internals.metrics.RocksDBMetricsRecorder;
+import org.apache.kafka.test.InternalMockProcessorContext;
+import org.apache.kafka.test.MockRecordCollector;
+import org.apache.kafka.test.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class LogicalKeyValueSegmentsTest {
+
+private static final long SEGMENT_INTERVAL = 100L;
+private static final long RETENTION_PERIOD = 4 * SEGMENT_INTERVAL;
+private static final String STORE_NAME = "logical-segments";
+private static final String METRICS_SCOPE = "metrics-scope";
+private static final String DB_FILE_DIR = "rocksdb";
+
+private InternalMockProcessorContext context;
+
+private LogicalKeyValueSegments segments;
+
+@Before
+public void setUp() {
+context = new InternalMockProcessorContext<>(
+TestUtils.tempDirectory(),
+Serdes.String(),
+Serdes.Long(),
+new MockRecordCollector(),
+new ThreadCache(new LogContext("testCache "), 0, new 
MockStreamsMetrics(new Metrics()))
+);
+segments = new LogicalKeyValueSegments(
+STORE_NAME,
+DB_FILE_DIR,
+RETENTION_PERIOD,
+SEGMENT_INTERVAL,
+new RocksDBMetricsRecorder(METRICS_SCOPE, STORE_NAME)
+);
+segments.openExisting(context, -1L);
+}
+
+@After
+public void tearDown() {
+segments.close();
+}
+
+@Test
+public void shouldGetSegmentIdsFromTimestamp() {
+assertEquals(0, segments.segmentId(0));
+assertEquals(1, segments.segmentId(SEGMENT_INTERVAL));
+assertEquals(2, segments.segmentId(2 * SEGMENT_INTERVAL));
+assertEquals(3, segments.segmentId(3 * SEGMENT_INTERVAL));
+}
+
+@Test
+public void shouldCreateSegments() {
+final LogicalKeyValueSegment segment1 = 
segments.getOrCreateSegmentIfLive(0, context, -1L);

Review Comment:
   See 
[above](https://github.com/apache/kafka/pull/13143#discussion_r1093927849).



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: jira-unsubscr...@kafka.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



[GitHub] [kafka] vcrfxia commented on a diff in pull request #13143: KAFKA-14491: [3/N] Add logical key value segments

2023-02-01 Thread via GitHub


vcrfxia commented on code in PR #13143:
URL: https://github.com/apache/kafka/pull/13143#discussion_r1093924959


##
streams/src/test/java/org/apache/kafka/streams/state/internals/LogicalKeyValueSegmentTest.java:
##
@@ -0,0 +1,270 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serdes;
+import org.apache.kafka.common.serialization.Serializer;
+import org.apache.kafka.common.serialization.StringDeserializer;
+import org.apache.kafka.common.serialization.StringSerializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.common.utils.Utils;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.StreamsConfig;
+import org.apache.kafka.streams.errors.InvalidStateStoreException;
+import org.apache.kafka.streams.processor.StateStoreContext;
+import org.apache.kafka.streams.state.KeyValueIterator;
+import org.apache.kafka.streams.state.internals.metrics.RocksDBMetricsRecorder;
+import org.apache.kafka.test.InternalMockProcessorContext;
+import org.apache.kafka.test.StreamsTestUtils;
+import org.apache.kafka.test.TestUtils;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class LogicalKeyValueSegmentTest {
+
+private static final String STORE_NAME = "physical-rocks";
+private static final String METRICS_SCOPE = "metrics-scope";
+private static final String DB_FILE_DIR = "rocksdb";
+private static final Serializer STRING_SERIALIZER = new 
StringSerializer();
+private static final Deserializer STRING_DESERIALIZER = new 
StringDeserializer();
+
+private RocksDBStore physicalStore;
+
+private LogicalKeyValueSegment segment1;
+private LogicalKeyValueSegment segment2;
+
+@Before
+public void setUp() {
+physicalStore = new RocksDBStore(STORE_NAME, DB_FILE_DIR, new 
RocksDBMetricsRecorder(METRICS_SCOPE, STORE_NAME), false);
+physicalStore.init((StateStoreContext) new 
InternalMockProcessorContext<>(
+TestUtils.tempDirectory(),
+Serdes.String(),
+Serdes.String(),
+new StreamsConfig(StreamsTestUtils.getStreamsConfig())
+), physicalStore);
+
+segment1 = new LogicalKeyValueSegment(1, "segment-1", physicalStore);
+segment2 = new LogicalKeyValueSegment(2, "segment-2", physicalStore);
+}
+
+@After
+public void tearDown() {
+segment1.close();
+segment2.close();
+physicalStore.close();
+}
+
+@Test
+public void shouldPut() {
+final KeyValue kv0 = new KeyValue<>("1", "a");
+final KeyValue kv1 = new KeyValue<>("2", "b");
+
+segment1.put(new Bytes(kv0.key.getBytes(UTF_8)), 
kv0.value.getBytes(UTF_8));
+segment1.put(new Bytes(kv1.key.getBytes(UTF_8)), 
kv1.value.getBytes(UTF_8));
+segment2.put(new Bytes(kv0.key.getBytes(UTF_8)), 
kv0.value.getBytes(UTF_8));
+segment2.put(new Bytes(kv1.key.getBytes(UTF_8)), 
kv1.value.getBytes(UTF_8));
+
+assertEquals("a", getAndDeserialize(segment1, "1"));

Review Comment:
   I was on the fence about this because it requires testing the internals of 
the class (i.e., specifically how the segment prefixes are serialized) rather 
than just the public-facing methods. In the end I opted to test indirectly 
instead, by inserting the same keys into different segments and checking that 
their values do not collide.
   
   If you prefer checking the contents of the physical store itself, I can make 
the update. 



##
streams/src/test/java/org/apache/kafka/streams/state/internals/LogicalKeyValueSegmentTest.java:
##
@@ -0,0 +1,270 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * co

[GitHub] [kafka] vcrfxia commented on a diff in pull request #13143: KAFKA-14491: [3/N] Add logical key value segments

2023-01-24 Thread via GitHub


vcrfxia commented on code in PR #13143:
URL: https://github.com/apache/kafka/pull/13143#discussion_r1085990051


##
streams/src/main/java/org/apache/kafka/streams/state/internals/LogicalKeyValueSegment.java:
##
@@ -0,0 +1,297 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ *http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kafka.streams.state.internals;
+
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import org.apache.kafka.common.serialization.BytesSerializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.apache.kafka.streams.KeyValue;
+import org.apache.kafka.streams.processor.ProcessorContext;
+import org.apache.kafka.streams.processor.StateStore;
+import org.apache.kafka.streams.state.KeyValueIterator;
+import org.rocksdb.RocksDBException;
+import org.rocksdb.WriteBatch;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * This "logical segment" is a segment which shares its underlying physical 
store with other
+ * logical segments. Each segment uses a unique, fixed-length key prefix 
derived from the
+ * segment ID when writing to the shared physical store.
+ */
+class LogicalKeyValueSegment implements Comparable, 
Segment {
+private static final Logger log = 
LoggerFactory.getLogger(LogicalKeyValueSegment.class);
+
+public final long id;
+private final String name;
+private final RocksDBStore physicalStore;
+private final PrefixKeyFormatter prefixKeyFormatter;
+
+private volatile boolean open = false;
+final Set> openIterators = 
Collections.synchronizedSet(new HashSet<>());
+
+LogicalKeyValueSegment(final long id,
+   final String name,
+   final RocksDBStore physicalStore) {
+this.id = id;
+this.name = name;
+this.physicalStore = Objects.requireNonNull(physicalStore);
+
+this.prefixKeyFormatter = new 
PrefixKeyFormatter(serializeLongToBytes(id));
+}
+
+void openDB() {
+open = true;
+}
+
+@Override
+public int compareTo(final LogicalKeyValueSegment segment) {
+return Long.compare(id, segment.id);
+}
+
+@Override
+public synchronized void destroy() {
+final Bytes keyPrefix = prefixKeyFormatter.getPrefix();
+
+// this is a prefix deletion, because the deleteRange() implementation
+// calls Bytes.increment() in order to make keyTo inclusive
+physicalStore.deleteRange(keyPrefix, keyPrefix);
+}
+
+@Override
+public synchronized void deleteRange(final Bytes keyFrom, final Bytes 
keyTo) {
+physicalStore.deleteRange(
+prefixKeyFormatter.forPhysicalStore(keyFrom),
+prefixKeyFormatter.forPhysicalStore(keyTo));
+}
+
+@Override
+public synchronized void put(final Bytes key, final byte[] value) {
+physicalStore.put(
+prefixKeyFormatter.forPhysicalStore(key),
+value);
+}
+
+@Override
+public synchronized byte[] putIfAbsent(final Bytes key, final byte[] 
value) {
+return physicalStore.putIfAbsent(
+prefixKeyFormatter.forPhysicalStore(key),
+value);
+}
+
+@Override
+public synchronized void putAll(final List> 
entries) {
+physicalStore.putAll(entries.stream()
+.map(kv -> new KeyValue<>(
+prefixKeyFormatter.forPhysicalStore(kv.key),
+kv.value))
+.collect(Collectors.toList()));
+}
+
+@Override
+public synchronized byte[] delete(final Bytes key) {
+return physicalStore.delete(prefixKeyFormatter.forPhysicalStore(key));
+}
+
+@Override
+public String name() {
+return name;
+}
+
+@Deprecated
+@Override
+public void init(final ProcessorContext context, final StateStore root) {
+throw new UnsupportedOperationException("cannot initialize a logical 
segment");
+}
+
+@Override
+public void flush() {
+throw new UnsupportedOperationException("nothing to flush for logical 
segment");
+}
+
+