This is an automated email from the ASF dual-hosted git repository. bbejeck pushed a commit to branch 1.1 in repository https://gitbox.apache.org/repos/asf/kafka.git
The following commit(s) were added to refs/heads/1.1 by this push: new c1b3846 Fixing merge conflicts from cherry pick c1b3846 is described below commit c1b38463122a83606bcb955bfd6f12bc8963bae0 Author: Bill Bejeck <bbej...@gmail.com> AuthorDate: Thu Feb 28 18:54:25 2019 -0500 Fixing merge conflicts from cherry pick --- .../apache/kafka/streams/integration/RegexSourceIntegrationTest.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/RegexSourceIntegrationTest.java b/streams/src/test/java/org/apache/kafka/streams/integration/RegexSourceIntegrationTest.java index 1da4c58..5f0a107 100644 --- a/streams/src/test/java/org/apache/kafka/streams/integration/RegexSourceIntegrationTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/integration/RegexSourceIntegrationTest.java @@ -57,6 +57,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.regex.Pattern; import static org.hamcrest.CoreMatchers.equalTo; @@ -144,7 +145,7 @@ public class RegexSourceIntegrationTest { final KStream<String, String> pattern1Stream = builder.stream(Pattern.compile("TEST-TOPIC-\\d")); pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC); - final List<String> assignedTopics = new ArrayList<>(); + final List<String> assignedTopics = new CopyOnWriteArrayList<>(); streams = new KafkaStreams(builder.build(), streamsConfig, new DefaultKafkaClientSupplier() { @Override public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) { @@ -195,7 +196,7 @@ public class RegexSourceIntegrationTest { pattern1Stream.to(stringSerde, stringSerde, DEFAULT_OUTPUT_TOPIC); - final List<String> assignedTopics = new ArrayList<>(); + final List<String> assignedTopics = new CopyOnWriteArrayList<>(); streams = new KafkaStreams(builder.build(), streamsConfig, new DefaultKafkaClientSupplier() { @Override public Consumer<byte[], byte[]> getConsumer(final Map<String, Object> config) {