Build: https://jenkins.thetaphi.de/job/Lucene-Solr-7.3-Linux/60/ Java: 64bit/jdk-11-ea+5 -XX:-UseCompressedOops -XX:+UseG1GC
1 tests failed. FAILED: org.apache.solr.cloud.hdfs.HDFSCollectionsAPITest.testDataDirIsNotReused Error Message: null Live Nodes: [127.0.0.1:38585_solr, 127.0.0.1:42421_solr] Last available state: DocCollection(test//collections/test/state.json/5)={ "pullReplicas":"0", "replicationFactor":"1", "shards":{"shard1":{ "range":"80000000-7fffffff", "state":"active", "replicas":{"core_node2":{ "dataDir":"hdfs://localhost.localdomain:43061/data/test/core_node2/data/", "base_url":"https://127.0.0.1:38585/solr", "node_name":"127.0.0.1:38585_solr", "type":"NRT", "ulogDir":"hdfs://localhost.localdomain:43061/data/test/core_node2/data/tlog", "core":"test_shard1_replica_n1", "shared_storage":"true", "state":"down", "leader":"true"}}}}, "router":{"name":"compositeId"}, "maxShardsPerNode":"1", "autoAddReplicas":"false", "nrtReplicas":"1", "tlogReplicas":"0"} Stack Trace: java.lang.AssertionError: null Live Nodes: [127.0.0.1:38585_solr, 127.0.0.1:42421_solr] Last available state: DocCollection(test//collections/test/state.json/5)={ "pullReplicas":"0", "replicationFactor":"1", "shards":{"shard1":{ "range":"80000000-7fffffff", "state":"active", "replicas":{"core_node2":{ "dataDir":"hdfs://localhost.localdomain:43061/data/test/core_node2/data/", "base_url":"https://127.0.0.1:38585/solr", "node_name":"127.0.0.1:38585_solr", "type":"NRT", "ulogDir":"hdfs://localhost.localdomain:43061/data/test/core_node2/data/tlog", "core":"test_shard1_replica_n1", "shared_storage":"true", "state":"down", "leader":"true"}}}}, "router":{"name":"compositeId"}, "maxShardsPerNode":"1", "autoAddReplicas":"false", "nrtReplicas":"1", "tlogReplicas":"0"} at __randomizedtesting.SeedInfo.seed([35FB936DF3887624:2DE4D119E0D1D483]:0) at org.junit.Assert.fail(Assert.java:93) at org.apache.solr.cloud.SolrCloudTestCase.waitForState(SolrCloudTestCase.java:269) at org.apache.solr.cloud.hdfs.HDFSCollectionsAPITest.testDataDirIsNotReused(HDFSCollectionsAPITest.java:90) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:564) at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1737) at com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:934) at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:970) at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:984) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817) at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468) at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:943) at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:829) at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:879) at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:890) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57) at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53) at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47) at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64) at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54) at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36) at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368) at java.base/java.lang.Thread.run(Thread.java:841) Build Log: [...truncated 13073 lines...] [junit4] Suite: org.apache.solr.cloud.hdfs.HDFSCollectionsAPITest [junit4] 2> 704381 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom [junit4] 2> Creating dataDir: /home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/init-core-data-001 [junit4] 2> 704382 WARN (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=19 numCloses=19 [junit4] 2> 704382 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) w/NUMERIC_DOCVALUES_SYSPROP=true [junit4] 2> 704382 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via: @org.apache.solr.util.RandomizeSSL(reason="", ssl=0.0/0.0, value=0.0/0.0, clientAuth=0.0/0.0) [junit4] 2> 704383 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.c.MiniSolrCloudCluster Starting cluster of 2 servers in /home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-001 [junit4] 2> 704383 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.c.ZkTestServer STARTING ZK TEST SERVER [junit4] 2> 704383 INFO (Thread-4922) [ ] o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0 [junit4] 2> 704383 INFO (Thread-4922) [ ] o.a.s.c.ZkTestServer Starting server [junit4] 2> 704401 ERROR (Thread-4922) [ ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes [junit4] 2> 704483 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.c.ZkTestServer start zk server on port:34003 [junit4] 2> 704536 INFO (zkConnectionManagerCallback-1660-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704547 INFO (jetty-launcher-1657-thread-1) [ ] o.e.j.s.Server jetty-9.4.8.v20171121, build timestamp: 2017-11-21T22:27:37+01:00, git hash: 82b8fb23f757335bb3329d540ce37a2a2615f0a8 [junit4] 2> 704550 INFO (jetty-launcher-1657-thread-2) [ ] o.e.j.s.Server jetty-9.4.8.v20171121, build timestamp: 2017-11-21T22:27:37+01:00, git hash: 82b8fb23f757335bb3329d540ce37a2a2615f0a8 [junit4] 2> 704582 INFO (jetty-launcher-1657-thread-1) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0 [junit4] 2> 704582 INFO (jetty-launcher-1657-thread-1) [ ] o.e.j.s.session No SessionScavenger set, using defaults [junit4] 2> 704582 INFO (jetty-launcher-1657-thread-1) [ ] o.e.j.s.session Scavenging every 600000ms [junit4] 2> 704587 INFO (jetty-launcher-1657-thread-1) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@2b985410{/solr,null,AVAILABLE} [junit4] 2> 704589 INFO (jetty-launcher-1657-thread-1) [ ] o.e.j.s.AbstractConnector Started ServerConnector@1c283058{SSL,[ssl, http/1.1]}{127.0.0.1:42421} [junit4] 2> 704589 INFO (jetty-launcher-1657-thread-1) [ ] o.e.j.s.Server Started @706682ms [junit4] 2> 704589 INFO (jetty-launcher-1657-thread-1) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=42421} [junit4] 2> 704589 ERROR (jetty-launcher-1657-thread-1) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete. [junit4] 2> 704589 INFO (jetty-launcher-1657-thread-1) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 7.3.0 [junit4] 2> 704589 INFO (jetty-launcher-1657-thread-1) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null [junit4] 2> 704589 INFO (jetty-launcher-1657-thread-1) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null [junit4] 2> 704589 INFO (jetty-launcher-1657-thread-1) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2018-03-24T18:37:23.184807Z [junit4] 2> 704590 INFO (jetty-launcher-1657-thread-2) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0 [junit4] 2> 704590 INFO (jetty-launcher-1657-thread-2) [ ] o.e.j.s.session No SessionScavenger set, using defaults [junit4] 2> 704590 INFO (jetty-launcher-1657-thread-2) [ ] o.e.j.s.session Scavenging every 600000ms [junit4] 2> 704596 INFO (zkConnectionManagerCallback-1662-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@34023232{/solr,null,AVAILABLE} [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-1) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading... [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.e.j.s.AbstractConnector Started ServerConnector@64f5baeb{SSL,[ssl, http/1.1]}{127.0.0.1:38585} [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.e.j.s.Server Started @706690ms [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=38585} [junit4] 2> 704597 ERROR (jetty-launcher-1657-thread-2) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete. [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 7.3.0 [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null [junit4] 2> 704597 INFO (jetty-launcher-1657-thread-2) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2018-03-24T18:37:23.192941Z [junit4] 2> 704599 INFO (zkConnectionManagerCallback-1664-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704600 INFO (jetty-launcher-1657-thread-2) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading... [junit4] 2> 704603 INFO (jetty-launcher-1657-thread-1) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34003/solr [junit4] 2> 704614 INFO (jetty-launcher-1657-thread-2) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34003/solr [junit4] 2> 704614 INFO (zkConnectionManagerCallback-1668-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704634 INFO (zkConnectionManagerCallback-1674-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704638 INFO (zkConnectionManagerCallback-1672-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704663 INFO (zkConnectionManagerCallback-1677-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704666 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.c.Overseer Overseer (id=null) closing [junit4] 2> 704667 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:38585_solr [junit4] 2> 704667 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.c.Overseer Overseer (id=72192221371695110-127.0.0.1:38585_solr-n_0000000000) starting [junit4] 2> 704671 INFO (zkConnectionManagerCallback-1682-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704672 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34003/solr ready [junit4] 2> 704673 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:38585_solr [junit4] 2> 704674 INFO (zkCallback-1676-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1) [junit4] 2> 704675 INFO (zkCallback-1681-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1) [junit4] 2> 704692 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1) [junit4] 2> 704693 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.c.Overseer Overseer (id=null) closing [junit4] 2> 704694 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:42421_solr [junit4] 2> 704695 INFO (zkCallback-1681-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2) [junit4] 2> 704695 INFO (zkCallback-1676-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2) [junit4] 2> 704699 INFO (zkCallback-1673-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2) [junit4] 2> 704703 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38585.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 704708 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38585.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 704708 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38585.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 704709 INFO (jetty-launcher-1657-thread-2) [n:127.0.0.1:38585_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-001/node2/. [junit4] 2> 704721 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_42421.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 704726 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_42421.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 704726 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_42421.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 704726 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.c.CorePropertiesLocator Found 0 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-001/node1/. [junit4] 2> 704728 INFO (zkConnectionManagerCallback-1686-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 704729 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2) [junit4] 2> 704729 INFO (jetty-launcher-1657-thread-1) [n:127.0.0.1:42421_solr ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34003/solr ready [junit4] 2> 704742 INFO (zkConnectionManagerCallback-1688-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 1> Formatting using clusterid: testClusterID [junit4] 2> 704855 WARN (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.h.m.i.MetricsConfig Cannot locate configuration: tried hadoop-metrics2-namenode.properties,hadoop-metrics2.properties [junit4] 2> 704860 WARN (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j [junit4] 2> 704861 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log jetty-6.1.26 [junit4] 2> 704865 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Extract jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/hdfs to ./temp/Jetty_localhost_localdomain_42731_hdfs____fujlv5/webapp [junit4] 2> 704931 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost.localdomain:42731 [junit4] 2> 704979 WARN (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j [junit4] 2> 704980 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log jetty-6.1.26 [junit4] 2> 704982 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Extract jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/datanode to ./temp/Jetty_localhost_43395_datanode____y4jjpz/webapp [junit4] 2> 705047 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:43395 [junit4] 2> 705068 WARN (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j [junit4] 2> 705068 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log jetty-6.1.26 [junit4] 2> 705071 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Extract jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-2.7.4-tests.jar!/webapps/datanode to ./temp/Jetty_localhost_41941_datanode____.max9s0/webapp [junit4] 2> 705141 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Started HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:41941 [junit4] 2> 705172 ERROR (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data1/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data2/]] heartbeating to localhost.localdomain/127.0.0.1:43061) [ ] o.a.h.h.s.d.DirectoryScanner dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value below 1 ms/sec. Assuming default value of 1000 [junit4] 2> 705176 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x74c548d136efc: from storage DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc node DatanodeRegistration(127.0.0.1:41499, datanodeUuid=41560b0a-3d3c-49bd-a9a4-ba3d2704e2a1, infoPort=34097, infoSecurePort=0, ipcPort=41221, storageInfo=lv=-56;cid=testClusterID;nsid=580539182;c=0), blocks: 0, hasStaleStorage: true, processing time: 0 msecs [junit4] 2> 705176 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x74c548d136efc: from storage DS-1db7c095-0031-4d58-9f0a-b9871faacd02 node DatanodeRegistration(127.0.0.1:41499, datanodeUuid=41560b0a-3d3c-49bd-a9a4-ba3d2704e2a1, infoPort=34097, infoSecurePort=0, ipcPort=41221, storageInfo=lv=-56;cid=testClusterID;nsid=580539182;c=0), blocks: 0, hasStaleStorage: false, processing time: 0 msecs [junit4] 2> 705238 ERROR (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data3/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data4/]] heartbeating to localhost.localdomain/127.0.0.1:43061) [ ] o.a.h.h.s.d.DirectoryScanner dfs.datanode.directoryscan.throttle.limit.ms.per.sec set to value below 1 ms/sec. Assuming default value of 1000 [junit4] 2> 705242 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x74c54910b216d: from storage DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3 node DatanodeRegistration(127.0.0.1:38913, datanodeUuid=50d65d5a-cf3d-4ba6-823c-0fc48640c627, infoPort=42149, infoSecurePort=0, ipcPort=34233, storageInfo=lv=-56;cid=testClusterID;nsid=580539182;c=0), blocks: 0, hasStaleStorage: true, processing time: 0 msecs [junit4] 2> 705242 INFO (Block report processor) [ ] BlockStateChange BLOCK* processReport 0x74c54910b216d: from storage DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458 node DatanodeRegistration(127.0.0.1:38913, datanodeUuid=50d65d5a-cf3d-4ba6-823c-0fc48640c627, infoPort=42149, infoSecurePort=0, ipcPort=34233, storageInfo=lv=-56;cid=testClusterID;nsid=580539182;c=0), blocks: 0, hasStaleStorage: false, processing time: 0 msecs [junit4] 2> 705254 INFO (zkConnectionManagerCallback-1692-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 705255 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2) [junit4] 2> 705257 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34003/solr ready [junit4] 2> 705278 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.SolrTestCaseJ4 ###Starting testDataDirIsNotReused [junit4] 2> 705289 INFO (qtp317328429-10759) [n:127.0.0.1:38585_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params replicationFactor=1&collection.configName=conf1&name=test&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:38585_solr&wt=javabin&version=2 and sendToOCPQueue=true [junit4] 2> 705291 INFO (OverseerThreadFactory-3494-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.a.c.CreateCollectionCmd Create collection test [junit4] 2> 705397 INFO (OverseerStateUpdate-72192221371695110-127.0.0.1:38585_solr-n_0000000000) [n:127.0.0.1:38585_solr ] o.a.s.c.o.SliceMutator createReplica() { [junit4] 2> "operation":"ADDREPLICA", [junit4] 2> "collection":"test", [junit4] 2> "shard":"shard1", [junit4] 2> "core":"test_shard1_replica_n1", [junit4] 2> "state":"down", [junit4] 2> "base_url":"https://127.0.0.1:38585/solr", [junit4] 2> "type":"NRT", [junit4] 2> "waitForFinalState":"false"} [junit4] 2> 705602 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=test_shard1_replica_n1&action=CREATE&numShards=1&collection=test&shard=shard1&wt=javabin&version=2&replicaType=NRT [junit4] 2> 705602 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores [junit4] 2> 705707 INFO (zkCallback-1676-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 705707 INFO (zkCallback-1676-thread-2-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 706611 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.3.0 [junit4] 2> 706615 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.IndexSchema [test_shard1_replica_n1] Schema name=minimal [junit4] 2> 706617 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.IndexSchema Loaded schema minimal/1.1 with uniqueid field id [junit4] 2> 706617 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.CoreContainer Creating SolrCore 'test_shard1_replica_n1' using configuration from collection test, trusted=true [junit4] 2> 706617 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38585.solr.core.test.shard1.replica_n1' (registry 'solr.core.test.shard1.replica_n1') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 706617 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory solr.hdfs.home=hdfs://localhost.localdomain:43061/data [junit4] 2> 706617 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Solr Kerberos Authentication disabled [junit4] 2> 706617 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrCore solr.RecoveryStrategy.Builder [junit4] 2> 706617 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrCore [[test_shard1_replica_n1] ] Opening new SolrCore at [/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-001/node2/test_shard1_replica_n1], dataDir=[hdfs://localhost.localdomain:43061/data/test/core_node2/data/] [junit4] 2> 706618 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:43061/data/test/core_node2/data/snapshot_metadata [junit4] 2> 706624 WARN (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.h.HdfsDirectory The NameNode is in SafeMode - Solr will wait 5 seconds and try again. [junit4] 2> 711627 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:43061/data/test/core_node2/data [junit4] 2> 711638 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory creating directory factory for path hdfs://localhost.localdomain:43061/data/test/core_node2/data/index [junit4] 2> 711655 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741825_1001{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 711656 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741825_1001 size 69 [junit4] 2> 711686 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.UpdateHandler Using UpdateLog implementation: org.apache.solr.update.HdfsUpdateLog [junit4] 2> 711686 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.UpdateLog Initializing UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 maxNumLogsToKeep=10 numVersionBuckets=65536 [junit4] 2> 711687 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.HdfsUpdateLog Initializing HdfsUpdateLog: tlogDfsReplication=3 [junit4] 2> 711693 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.CommitTracker Hard AutoCommit: disabled [junit4] 2> 711693 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.CommitTracker Soft AutoCommit: disabled [junit4] 2> 711698 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@4c064c37[test_shard1_replica_n1] main] [junit4] 2> 711699 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: /configs/conf1 [junit4] 2> 711699 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using ZooKeeperStorageIO:path=/configs/conf1 [junit4] 2> 711700 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.h.ReplicationHandler Commits will be reserved for 10000ms. [junit4] 2> 711700 INFO (searcherExecutor-3499-thread-1-processing-n:127.0.0.1:38585_solr x:test_shard1_replica_n1 s:shard1 c:test r:core_node2) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrCore [test_shard1_replica_n1] Registered new searcher Searcher@4c064c37[test_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader())} [junit4] 2> 711700 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.UpdateLog Could not find max version in index or recent updates, using new clock 1595845273499729920 [junit4] 2> 711703 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/test/terms/shard1 to Terms{values={core_node2=0}, version=0} [junit4] 2> 711704 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue. [junit4] 2> 711704 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync [junit4] 2> 711704 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SyncStrategy Sync replicas to https://127.0.0.1:38585/solr/test_shard1_replica_n1/ [junit4] 2> 711704 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me [junit4] 2> 711704 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SyncStrategy https://127.0.0.1:38585/solr/test_shard1_replica_n1/ has no replicas [junit4] 2> 711704 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext Found all replicas participating in election, clear LIR [junit4] 2> 711705 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ShardLeaderElectionContext I am the new leader: https://127.0.0.1:38585/solr/test_shard1_replica_n1/ shard1 [junit4] 2> 711706 INFO (zkCallback-1676-thread-2-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 711706 INFO (zkCallback-1676-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 711756 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ZkController I am the leader, no recovery necessary [junit4] 2> 711758 INFO (qtp317328429-10758) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores params={qt=/admin/cores&coreNodeName=core_node2&collection.configName=conf1&newCollection=true&name=test_shard1_replica_n1&action=CREATE&numShards=1&collection=test&shard=shard1&wt=javabin&version=2&replicaType=NRT} status=0 QTime=6155 [junit4] 2> 711760 INFO (qtp317328429-10759) [n:127.0.0.1:38585_solr ] o.a.s.h.a.CollectionsHandler Wait for new collection to be active for at most 30 seconds. Check all shard replicas [junit4] 2> 711859 INFO (zkCallback-1676-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 711859 INFO (zkCallback-1676-thread-3-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 712760 INFO (qtp317328429-10759) [n:127.0.0.1:38585_solr ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={replicationFactor=1&collection.configName=conf1&name=test&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:38585_solr&wt=javabin&version=2} status=0 QTime=7471 [junit4] 2> 712766 INFO (qtp317328429-10756) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ZkShardTerms Successful update of terms at /collections/test/terms/shard1 to Terms{values={core_node2=1}, version=1} [junit4] 2> 712771 INFO (qtp317328429-10756) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [test_shard1_replica_n1] webapp=/solr path=/update params={wt=javabin&version=2}{add=[1 (1595845274614366208)]} 0 8 [junit4] 2> 712772 INFO (qtp317328429-10759) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [test_shard1_replica_n1] webapp=/solr path=/update params={wt=javabin&version=2}{add=[2 (1595845274622754816)]} 0 1 [junit4] 2> 712773 INFO (qtp317328429-10756) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.DirectUpdateHandler2 start commit{_version_=1595845274624851968,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false} [junit4] 2> 712773 INFO (qtp317328429-10756) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@638fe5dc commitCommandVersion:1595845274624851968 [junit4] 2> 712784 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712784 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741827_1003{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712793 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741828_1004{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712793 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741828_1004{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712798 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741829_1005{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712798 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741829_1005 size 59 [junit4] 2> 712818 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741830_1006{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712819 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741830_1006{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712821 WARN (DataStreamer for file /data/test/core_node2/data/index/_0.fdx) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.h.h.DFSClient Caught exception [junit4] 2> java.lang.InterruptedException [junit4] 2> at java.base/java.lang.Object.wait(Native Method) [junit4] 2> at java.base/java.lang.Thread.join(Thread.java:1352) [junit4] 2> at java.base/java.lang.Thread.join(Thread.java:1426) [junit4] 2> at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.closeResponder(DFSOutputStream.java:716) [junit4] 2> at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.endBlock(DFSOutputStream.java:476) [junit4] 2> at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:652) [junit4] 2> 712830 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741831_1007{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712831 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741831_1007{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712836 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741832_1008{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW]]} size 0 [junit4] 2> 712848 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741832_1008{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW]]} size 0 [junit4] 2> 712852 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741833_1009{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712852 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741833_1009{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712856 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741834_1010{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712856 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741834_1010{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712867 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741835_1011{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712868 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741835_1011 size 96 [junit4] 2> 712875 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741836_1012{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712875 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741836_1012{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712879 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741837_1013{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712880 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741837_1013 size 106 [junit4] 2> 712887 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741838_1014{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712887 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741838_1014{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712891 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741839_1015{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712891 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741839_1015{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712899 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741840_1016{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712899 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741840_1016 size 242 [junit4] 2> 712919 INFO (qtp317328429-10756) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.SolrIndexSearcher Opening [Searcher@6ee3fa46[test_shard1_replica_n1] main] [junit4] 2> 712921 INFO (searcherExecutor-3499-thread-1-processing-n:127.0.0.1:38585_solr x:test_shard1_replica_n1 s:shard1 c:test r:core_node2) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrCore [test_shard1_replica_n1] Registered new searcher Searcher@6ee3fa46[test_shard1_replica_n1] main{ExitableDirectoryReader(UninvertingDirectoryReader(Uninverting(_0(7.3.0):C2)))} [junit4] 2> 712921 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741826_1002{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW]]} size 75 [junit4] 2> 712921 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741826_1002{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW]]} size 75 [junit4] 2> 712922 INFO (qtp317328429-10756) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.DirectUpdateHandler2 end_commit_flush [junit4] 2> 712922 INFO (qtp317328429-10756) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [test_shard1_replica_n1] webapp=/solr path=/update params={_stateVer_=test:4&waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=} 0 148 [junit4] 2> 712931 INFO (qtp317328429-10759) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.p.LogUpdateProcessorFactory [test_shard1_replica_n1] webapp=/solr path=/update params={wt=javabin&version=2}{add=[3 (1595845274782138368)]} 0 8 [junit4] 2> 712932 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.AbstractConnector Stopped ServerConnector@64f5baeb{SSL,[ssl, http/1.1]}{127.0.0.1:0} [junit4] 2> 712932 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.c.CoreContainer Shutting down CoreContainer instance=1016935407 [junit4] 2> 712933 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.node, tag=null [junit4] 2> 712933 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@2ba4864e: rootName = solr_38585, domain = solr.node, service url = null, agent id = null] for registry solr.node / com.codahale.metrics.MetricRegistry@7173b983 [junit4] 2> 712937 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jvm, tag=null [junit4] 2> 712937 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@4f21ccc1: rootName = solr_38585, domain = solr.jvm, service url = null, agent id = null] for registry solr.jvm / com.codahale.metrics.MetricRegistry@7489f72c [junit4] 2> 712940 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.jetty, tag=null [junit4] 2> 712940 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@71d4accf: rootName = solr_38585, domain = solr.jetty, service url = null, agent id = null] for registry solr.jetty / com.codahale.metrics.MetricRegistry@63fdb7e4 [junit4] 2> 712940 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.c.ZkController Remove node as live in ZooKeeper:/live_nodes/127.0.0.1:38585_solr [junit4] 2> 712941 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.cluster, tag=null [junit4] 2> 712941 INFO (zkCallback-1691-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1) [junit4] 2> 712941 INFO (zkCallback-1673-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1) [junit4] 2> 712941 INFO (zkCallback-1685-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1) [junit4] 2> 712941 INFO (zkCallback-1681-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1) [junit4] 2> 712941 INFO (zkCallback-1676-thread-3-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (1) [junit4] 2> 712941 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.SolrCore [test_shard1_replica_n1] CLOSING SolrCore org.apache.solr.core.SolrCore@53c4aea2 [junit4] 2> 712941 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.core.test.shard1.replica_n1, tag=1405398690 [junit4] 2> 712941 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.m.r.SolrJmxReporter Closing reporter [org.apache.solr.metrics.reporters.SolrJmxReporter@15398af3: rootName = solr_38585, domain = solr.core.test.shard1.replica_n1, service url = null, agent id = null] for registry solr.core.test.shard1.replica_n1 / com.codahale.metrics.MetricRegistry@3a34091 [junit4] 2> 712950 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.m.SolrMetricManager Closing metric reporters for registry=solr.collection.test.shard1.leader, tag=1405398690 [junit4] 2> 712951 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.DirectUpdateHandler2 Committing on IndexWriter close. [junit4] 2> 712951 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.u.SolrIndexWriter Calling setCommitData with IW:org.apache.solr.update.SolrIndexWriter@638fe5dc commitCommandVersion:0 [junit4] 2> 712956 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741842_1018{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712956 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741842_1018{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712960 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741843_1019{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712961 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741843_1019{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712965 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741844_1020{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712965 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741844_1020{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712968 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741845_1021{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712969 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741845_1021 size 158 [junit4] 2> 712972 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741846_1022{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 712972 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741846_1022{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712976 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741847_1023{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW]]} size 0 [junit4] 2> 712976 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741847_1023{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712980 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741848_1024{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712980 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741848_1024{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|FINALIZED], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712983 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741849_1025{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712983 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741849_1025{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712987 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741850_1026{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712987 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741850_1026{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|FINALIZED], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712990 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741851_1027{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED]]} size 0 [junit4] 2> 712990 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741851_1027{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|FINALIZED], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 712994 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741852_1028{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW]]} size 0 [junit4] 2> 712994 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741852_1028 size 69 [junit4] 2> 712999 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741853_1029{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|FINALIZED]]} size 0 [junit4] 2> 713000 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741853_1029 size 75 [junit4] 2> 713004 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741854_1030{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 713004 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741854_1030{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-1db7c095-0031-4d58-9f0a-b9871faacd02:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-ed8e29e0-1ee0-4f96-9e66-e0997615d458:NORMAL:127.0.0.1:38913|RBW]]} size 0 [junit4] 2> 713008 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741855_1031{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW]]} size 0 [junit4] 2> 713008 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741855_1031{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW], ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW]]} size 0 [junit4] 2> 713009 INFO (IPC Server handler 4 on 43061) [ ] BlockStateChange BLOCK* addToInvalidates: blk_1073741825_1001 127.0.0.1:38913 127.0.0.1:41499 [junit4] 2> 713013 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:41499 is added to blk_1073741841_1017{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW]]} size 75 [junit4] 2> 713013 INFO (Block report processor) [ ] BlockStateChange BLOCK* addStoredBlock: blockMap updated: 127.0.0.1:38913 is added to blk_1073741841_1017{UCState=UNDER_CONSTRUCTION, truncateBlock=null, primaryNodeIndex=-1, replicas=[ReplicaUC[[DISK]DS-3a5d0ab8-5403-4d61-90ef-adf53ceb3ebc:NORMAL:127.0.0.1:41499|RBW], ReplicaUC[[DISK]DS-be6c7fb3-19c2-49b8-9574-0adc897ca3a3:NORMAL:127.0.0.1:38913|RBW]]} size 75 [junit4] 2> 713016 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:43061/data/test/core_node2/data [junit4] 2> 713016 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:43061/data/test/core_node2/data/snapshot_metadata [junit4] 2> 713016 INFO (coreCloseExecutor-3504-thread-1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.s.h.HdfsDirectory Closing hdfs directory hdfs://localhost.localdomain:43061/data/test/core_node2/data/index [junit4] 2> 713016 ERROR (OldIndexDirectoryCleanupThreadForCore-test_shard1_replica_n1) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.HdfsDirectoryFactory Error checking for old index directories to clean-up. [junit4] 2> java.io.IOException: Filesystem closed [junit4] 2> at org.apache.hadoop.hdfs.DFSClient.checkOpen(DFSClient.java:808) [junit4] 2> at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2083) [junit4] 2> at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:2069) [junit4] 2> at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:791) [junit4] 2> at org.apache.hadoop.hdfs.DistributedFileSystem.access$700(DistributedFileSystem.java:106) [junit4] 2> at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:853) [junit4] 2> at org.apache.hadoop.hdfs.DistributedFileSystem$18.doCall(DistributedFileSystem.java:849) [junit4] 2> at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) [junit4] 2> at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:860) [junit4] 2> at org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:1517) [junit4] 2> at org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:1557) [junit4] 2> at org.apache.solr.core.HdfsDirectoryFactory.cleanupOldIndexDirectories(HdfsDirectoryFactory.java:529) [junit4] 2> at org.apache.solr.core.SolrCore.lambda$cleanupOldIndexDirectories$21(SolrCore.java:3037) [junit4] 2> at java.base/java.lang.Thread.run(Thread.java:841) [junit4] 2> 713017 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.c.Overseer Overseer (id=72192221371695110-127.0.0.1:38585_solr-n_0000000000) closing [junit4] 2> 713017 INFO (OverseerStateUpdate-72192221371695110-127.0.0.1:38585_solr-n_0000000000) [n:127.0.0.1:38585_solr ] o.a.s.c.Overseer Overseer Loop exiting : 127.0.0.1:38585_solr [junit4] 2> 713017 WARN (OverseerAutoScalingTriggerThread-72192221371695110-127.0.0.1:38585_solr-n_0000000000) [n:127.0.0.1:38585_solr ] o.a.s.c.a.OverseerTriggerThread OverseerTriggerThread woken up but we are closed, exiting. [junit4] 2> 713019 INFO (zkCallback-1673-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.OverseerElectionContext I am going to be the leader 127.0.0.1:42421_solr [junit4] 2> 713019 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@34023232{/solr,null,UNAVAILABLE} [junit4] 2> 713019 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.session Stopped scavenging [junit4] 2> 713019 INFO (zkCallback-1673-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.Overseer Overseer (id=72192221371695109-127.0.0.1:42421_solr-n_0000000001) starting [junit4] 2> 713022 INFO (zkCallback-1691-thread-1) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [1]) [junit4] 2> 713022 INFO (zkCallback-1691-thread-2) [ ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [1]) [junit4] 2> 713038 INFO (qtp693490159-10749) [n:127.0.0.1:42421_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :delete with params name=test&action=DELETE&wt=javabin&version=2 and sendToOCPQueue=true [junit4] 2> 713044 INFO (OverseerThreadFactory-3509-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.a.c.OverseerCollectionMessageHandler Executing Collection Cmd=action=UNLOAD&deleteInstanceDir=true&deleteDataDir=true, asyncId=null [junit4] 2> 713754 INFO (qtp693490159-10749) [n:127.0.0.1:42421_solr ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/collections params={name=test&action=DELETE&wt=javabin&version=2} status=0 QTime=715 [junit4] 2> 713755 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.Server jetty-9.4.8.v20171121, build timestamp: 2017-11-21T22:27:37+01:00, git hash: 82b8fb23f757335bb3329d540ce37a2a2615f0a8 [junit4] 2> 713756 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.session DefaultSessionIdManager workerName=node0 [junit4] 2> 713756 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.session No SessionScavenger set, using defaults [junit4] 2> 713756 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.session Scavenging every 600000ms [junit4] 2> 713756 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.h.ContextHandler Started o.e.j.s.ServletContextHandler@6df1361d{/solr,null,AVAILABLE} [junit4] 2> 713756 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.AbstractConnector Started ServerConnector@254b60fe{SSL,[ssl, http/1.1]}{127.0.0.1:38585} [junit4] 2> 713756 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.e.j.s.Server Started @715850ms [junit4] 2> 713757 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, hostPort=38585} [junit4] 2> 713757 ERROR (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be missing or incomplete. [junit4] 2> 713757 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.s.SolrDispatchFilter ___ _ Welcome to Apache Solr? version 7.3.0 [junit4] 2> 713757 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.s.SolrDispatchFilter / __| ___| |_ _ Starting in cloud mode on port null [junit4] 2> 713757 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_| Install dir: null [junit4] 2> 713757 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.s.SolrDispatchFilter |___/\___/_|_| Start time: 2018-03-24T18:37:32.352626Z [junit4] 2> 713758 INFO (zkConnectionManagerCallback-1694-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 713759 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading... [junit4] 2> 713763 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [ ] o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:34003/solr [junit4] 2> 713764 INFO (zkConnectionManagerCallback-1698-thread-1) [ ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 713765 INFO (zkConnectionManagerCallback-1700-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 713768 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1) [junit4] 2> 713769 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.c.Overseer Overseer (id=null) closing [junit4] 2> 713769 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.c.ZkController Register node as live in ZooKeeper:/live_nodes/127.0.0.1:38585_solr [junit4] 2> 713770 INFO (zkCallback-1691-thread-1) [ ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2) [junit4] 2> 713770 INFO (zkCallback-1673-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2) [junit4] 2> 713770 INFO (zkCallback-1685-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2) [junit4] 2> 713770 INFO (zkCallback-1699-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2) [junit4] 2> 713798 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38585.solr.node' (registry 'solr.node') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 713803 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38585.solr.jvm' (registry 'solr.jvm') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 713804 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 'solr_38585.solr.jetty' (registry 'solr.jetty') enabled at server: com.sun.jmx.mbeanserver.JmxMBeanServer@7e48ea42 [junit4] 2> 713805 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath /home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-001/node2/. [junit4] 2> 713805 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.c.CorePropertiesLocator Cores are: [test_shard1_replica_n1] [junit4] 2> 713805 INFO (coreLoadExecutor-3515-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores [junit4] 2> 713807 INFO (zkConnectionManagerCallback-1705-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ConnectionManager zkClient has connected [junit4] 2> 713807 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2) [junit4] 2> 713808 INFO (TEST-HDFSCollectionsAPITest.testDataDirIsNotReused-seed#[35FB936DF3887624]) [n:127.0.0.1:38585_solr ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:34003/solr ready [junit4] 2> 713969 INFO (org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$ReplicationMonitor@305d7d92) [ ] BlockStateChange BLOCK* BlockManager: ask 127.0.0.1:41499 to delete [blk_1073741825_1001] [junit4] 2> 715050 INFO (OverseerCollectionConfigSetProcessor-72192221371695109-127.0.0.1:42421_solr-n_0000000001) [n:127.0.0.1:42421_solr ] o.a.s.c.OverseerTaskQueue Response ZK path: /overseer/collection-queue-work/qnr-0000000002 doesn't exist. Requestor may have disconnected from ZooKeeper [junit4] 2> 716969 INFO (org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$ReplicationMonitor@305d7d92) [ ] BlockStateChange BLOCK* BlockManager: ask 127.0.0.1:38913 to delete [blk_1073741825_1001] [junit4] 2> 723806 ERROR (coreLoadExecutor-3515-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr c:test s:shard1 r:core_node2 x:test_shard1_replica_n1] o.a.s.c.ZkController [junit4] 2> org.apache.solr.common.SolrException: Replica core_node2 is not present in cluster state: null [junit4] 2> at org.apache.solr.cloud.ZkController.checkStateInZk(ZkController.java:1733) [junit4] 2> at org.apache.solr.cloud.ZkController.preRegister(ZkController.java:1636) [junit4] 2> at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1033) [junit4] 2> at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:640) [junit4] 2> at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:197) [junit4] 2> at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) [junit4] 2> at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:188) [junit4] 2> at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) [junit4] 2> at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) [junit4] 2> at java.base/java.lang.Thread.run(Thread.java:841) [junit4] 2> 723807 ERROR (coreContainerWorkExecutor-3511-thread-1-processing-n:127.0.0.1:38585_solr) [n:127.0.0.1:38585_solr ] o.a.s.c.CoreContainer Error waiting for SolrCore to be created [junit4] 2> java.util.concurrent.ExecutionException: org.apache.solr.common.SolrException: Unable to create core [test_shard1_replica_n1] [junit4] 2> at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) [junit4] 2> at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) [junit4] 2> at org.apache.solr.core.CoreContainer.lambda$load$14(CoreContainer.java:669) [junit4] 2> at com.codahale.metrics.InstrumentedExecutorService$InstrumentedRunnable.run(InstrumentedExecutorService.java:176) [junit4] 2> at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515) [junit4] 2> at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) [junit4] 2> at org.apache.solr.common.util.ExecutorUtil$MDCAwareThreadPoolExecutor.lambda$execute$0(ExecutorUtil.java:188) [junit4] 2> at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) [junit4] 2> at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) [junit4] 2> at java.base/java.lang.Thread.run(Thread.java:841) [junit4] 2> Caused by: org.apache.solr.common.SolrException: Unable to create core [test_shard1_replica_n1] [junit4] 2> at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1056) [junit4] 2> at org.apache.solr.core.CoreContainer.lambda$load$13(CoreContainer.java:640) [junit4] 2> at com.codahale.metrics.InstrumentedExecutorService$InstrumentedCallable.call(InstrumentedExecutorService.java:197) [junit4] 2> ... 5 more [junit4] 2> Caused by: org.apache.solr.common.SolrException: [junit4] 2> at org.apache.solr.cloud.ZkController.preRegister(ZkController.java:1665) [junit4] 2> at org.apache.solr.core.CoreContainer.createFromDescriptor(CoreContainer.java:1033) [junit4] 2> ... 7 more [junit4] 2> Caused by: org.apache.solr.common.SolrException: Replica core_node2 is not present in cluster state: null [junit4] 2> at org.apache.solr.cloud.ZkController.checkStateInZk(ZkController.java:1733) [junit4] 2> at org.apache.solr.cloud.ZkController.preRegister(ZkController.java:1636) [junit4] 2> ... 8 more [junit4] 2> 723821 INFO (qtp31584075-11221) [n:127.0.0.1:38585_solr ] o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params replicationFactor=1&collection.configName=conf1&name=test&nrtReplicas=1&action=CREATE&numShards=1&createNodeSet=127.0.0.1:42421_solr&wt=javabin&version=2 and sendToOCPQueue=true [junit4] 2> 723823 INFO (OverseerThreadFactory-3509-thread-2-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.a.c.CreateCollectionCmd Create collection test [junit4] 2> 723926 INFO (OverseerStateUpdate-72192221371695109-127.0.0.1:42421_solr-n_0000000001) [n:127.0.0.1:42421_solr ] o.a.s.c.o.SliceMutator createReplica() { [junit4] 2> "operation":"ADDREPLICA", [junit4] 2> "collection":"test", [junit4] 2> "shard":"shard1", [junit4] 2> "core":"test_shard1_replica_n3", [junit4] 2> "state":"down", [junit4] 2> "base_url":"https://127.0.0.1:42421/solr", [junit4] 2> "type":"NRT", [junit4] 2> "waitForFinalState":"false"} [junit4] 2> 724129 INFO (qtp693490159-10749) [n:127.0.0.1:42421_solr ] o.a.s.h.a.CoreAdminOperation core create command qt=/admin/cores&coreNodeName=core_node4&collection.configName=conf1&newCollection=true&name=test_shard1_replica_n3&action=CREATE&numShards=1&collection=test&shard=shard1&wt=javabin&version=2&replicaType=NRT [junit4] 2> 724130 INFO (qtp693490159-10749) [n:127.0.0.1:42421_solr ] o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 transient cores [junit4] 2> 724232 INFO (zkCallback-1673-thread-1-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 724232 INFO (zkCallback-1673-thread-2-processing-n:127.0.0.1:42421_solr) [n:127.0.0.1:42421_solr ] o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncConnected type:NodeDataChanged path:/collections/test/state.json] for collection [test] has occurred - updating... (live nodes size: [2]) [junit4] 2> 725137 INFO (qtp693490159-10749) [n:127.0.0.1:42421_solr c:test s:shard1 r:core_node4 x:test_shard1_replica_n3] o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.3.0 [junit4] 2> 725141 INFO (qtp693490159-10749) [n:127.0.0.1:42421_solr c:test s:shard1 r:core_node4 x:test_shard1_replica_n3] o.a.s.s.IndexSchema [test_shard1_replica_n3] Schema name=minimal [junit4] 2> 725142 INFO (qtp693490159-10749) [n:127.0.0.1:42421_solr c:test s:shard1 r:core_node4 x:test_shard1_replica_n3] o.a.s.s.IndexSchem [...truncated too long message...] 0000000001) [n:127.0.0.1:42421_solr ] o.a.s.c.a.OverseerTriggerThread OverseerTriggerThread woken up but we are closed, exiting. [junit4] 2> 816421 INFO (jetty-closer-1658-thread-2) [ ] o.e.j.s.h.ContextHandler Stopped o.e.j.s.ServletContextHandler@2b985410{/solr,null,UNAVAILABLE} [junit4] 2> 816421 INFO (jetty-closer-1658-thread-2) [ ] o.e.j.s.session Stopped scavenging [junit4] 2> 816421 ERROR (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.z.s.ZooKeeperServer ZKShutdownHandler is not registered, so ZooKeeper server won't take any action on ERROR or SHUTDOWN server state changes [junit4] 2> 816421 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:34003 34003 [junit4] 2> 817203 INFO (Thread-4922) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:34003 34003 [junit4] 2> 817204 WARN (Thread-4922) [ ] o.a.s.c.ZkTestServer Watch limit violations: [junit4] 2> Maximum concurrent create/delete watches above limit: [junit4] 2> [junit4] 2> 7 /solr/aliases.json [junit4] 2> 5 /solr/collections/test/terms/shard1 [junit4] 2> 3 /solr/security.json [junit4] 2> 2 /solr/configs/conf1 [junit4] 2> [junit4] 2> Maximum concurrent data watches above limit: [junit4] 2> [junit4] 2> 15 /solr/collections/test/state.json [junit4] 2> 7 /solr/clusterstate.json [junit4] 2> 7 /solr/clusterprops.json [junit4] 2> 2 /solr/autoscaling.json [junit4] 2> [junit4] 2> Maximum concurrent children watches above limit: [junit4] 2> [junit4] 2> 7 /solr/live_nodes [junit4] 2> 7 /solr/collections [junit4] 2> 2 /solr/overseer/queue [junit4] 2> 2 /solr/autoscaling/events/.auto_add_replicas [junit4] 2> 2 /solr/overseer/queue-work [junit4] 2> 2 /solr/overseer/collection-queue-work [junit4] 2> [junit4] 2> 817204 WARN (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called [junit4] 2> 817220 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Stopped HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:0 [junit4] 2> 817320 WARN (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data3/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data4/]] heartbeating to localhost.localdomain/127.0.0.1:43061) [ ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted [junit4] 2> 817320 WARN (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data3/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data4/]] heartbeating to localhost.localdomain/127.0.0.1:43061) [ ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-1131975572-88.99.242.108-1521916643351 (Datanode Uuid 50d65d5a-cf3d-4ba6-823c-0fc48640c627) service to localhost.localdomain/127.0.0.1:43061 [junit4] 2> 817321 WARN (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.h.h.s.d.DirectoryScanner DirectoryScanner: shutdown has been called [junit4] 2> 817324 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Stopped HttpServer2$SelectChannelConnectorWithSafeStartup@localhost:0 [junit4] 2> 817425 WARN (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data1/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data2/]] heartbeating to localhost.localdomain/127.0.0.1:43061) [ ] o.a.h.h.s.d.IncrementalBlockReportManager IncrementalBlockReportManager interrupted [junit4] 2> 817425 WARN (DataNode: [[[DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data1/, [DISK]file:/home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001/tempDir-002/hdfsBaseDir/data/data2/]] heartbeating to localhost.localdomain/127.0.0.1:43061) [ ] o.a.h.h.s.d.DataNode Ending block pool service for: Block pool BP-1131975572-88.99.242.108-1521916643351 (Datanode Uuid 41560b0a-3d3c-49bd-a9a4-ba3d2704e2a1) service to localhost.localdomain/127.0.0.1:43061 [junit4] 2> 817429 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.m.log Stopped HttpServer2$SelectChannelConnectorWithSafeStartup@localhost.localdomain:0 [junit4] 2> 817547 INFO (SUITE-HDFSCollectionsAPITest-seed#[35FB936DF3887624]-worker) [ ] o.a.s.c.ZkTestServer connecting to 127.0.0.1:34003 34003 [junit4] 2> NOTE: leaving temporary files on disk at: /home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.hdfs.HDFSCollectionsAPITest_35FB936DF3887624-001 [junit4] 2> Mar 24, 2018 6:39:16 PM com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks [junit4] 2> WARNING: Will linger awaiting termination of 66 leaked thread(s). [junit4] 2> NOTE: test params are: codec=Asserting(Lucene70): {id=FST50}, docValues:{_version_=DocValuesFormat(name=Memory)}, maxPointsInLeafNode=1496, maxMBSortInHeap=7.465834936329097, sim=RandomSimilarity(queryNorm=false): {}, locale=gsw-LI, timezone=Etc/GMT-11 [junit4] 2> NOTE: Linux 4.13.0-36-generic amd64/Oracle Corporation 11-ea (64-bit)/cpus=8,threads=1,free=267780456,total=536870912 [junit4] 2> NOTE: All tests run in this JVM: [RegexBoostProcessorTest, TestCopyFieldCollectionResource, TestHttpShardHandlerFactory, TestNumericTerms32, ChangedSchemaMergeTest, TestFieldTypeResource, TestInPlaceUpdatesDistrib, TestXmlQParserPlugin, DeleteLastCustomShardedReplicaTest, TestBackupRepositoryFactory, MinimalSchemaTest, SubstringBytesRefFilterTest, TestExecutePlanAction, CreateCollectionCleanupTest, HdfsUnloadDistributedZkTest, TestCollapseQParserPlugin, TestAnalyzedSuggestions, TestDFRSimilarityFactory, TestAnalyzeInfixSuggestions, NotRequiredUniqueKeyTest, MultiTermTest, UnloadDistributedZkTest, TestSha256AuthenticationProvider, SystemLogListenerTest, CoreAdminRequestStatusTest, TestCloudSearcherWarming, TestSchemaSimilarityResource, TestCustomSort, TestTolerantSearch, TestRandomRequestDistribution, TestRandomDVFaceting, TestUpdate, TestRecoveryHdfs, StatelessScriptUpdateProcessorFactoryTest, DocValuesMultiTest, TestManagedSchema, BlobRepositoryCloudTest, TestImplicitCoreProperties, CoreAdminHandlerTest, ConcurrentCreateRoutedAliasTest, ConvertedLegacyTest, NodeMutatorTest, TestCloudDeleteByQuery, TestManagedSynonymGraphFilterFactory, CopyFieldTest, TestSubQueryTransformerCrossCore, ImplicitSnitchTest, SolrCmdDistributorTest, SegmentsInfoRequestHandlerTest, HdfsSyncSliceTest, TestStressReorder, TestSizeLimitedDistributedMap, LIRRollingUpdatesTest, TestValueSourceCache, DeleteShardTest, TestSlowCompositeReaderWrapper, TestRawResponseWriter, TestLazyCores, TestSSLRandomization, RollingRestartTest, TestPullReplica, HLLUtilTest, CollectionPropsTest, DistributedSpellCheckComponentTest, TestSolrCoreProperties, ReturnFieldsTest, TestCustomStream, TestAuthorizationFramework, TestTestInjection, CdcrBidirectionalTest, BlockJoinFacetDistribTest, SharedFSAutoReplicaFailoverTest, CloudMLTQParserTest, TestEmbeddedSolrServerAdminHandler, MoveReplicaHDFSTest, HDFSCollectionsAPITest] [junit4] Completed [284/783 (1!)] on J2 in 122.26s, 1 test, 1 failure <<< FAILURES! [...truncated 1815 lines...] [junit4] JVM J0: stdout was not empty, see: /home/jenkins/workspace/Lucene-Solr-7.3-Linux/solr/build/solr-core/test/temp/junit4-J0-20180324_182536_46416930611148556009282.sysout [junit4] >>> JVM J0 emitted unexpected output (verbatim) ---- [junit4] java.lang.OutOfMemoryError: Java heap space [junit4] Dumping heap to /home/jenkins/workspace/Lucene-Solr-7.3-Linux/heapdumps/java_pid25014.hprof ... [junit4] Heap dump file created [306503862 bytes in 0.401 secs] [junit4] <<< JVM J0: EOF ---- [...truncated 9275 lines...] BUILD FAILED /home/jenkins/workspace/Lucene-Solr-7.3-Linux/build.xml:618: The following error occurred while executing this line: /home/jenkins/workspace/Lucene-Solr-7.3-Linux/build.xml:570: Some of the tests produced a heap dump, but did not fail. Maybe a suppressed OutOfMemoryError? Dumps created: * java_pid25014.hprof Total time: 73 minutes 4 seconds Build step 'Invoke Ant' marked build as failure Archiving artifacts Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2 [WARNINGS] Skipping publisher since build result is FAILURE Recording test results Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2 Email was triggered for: Failure - Any Sending email for trigger: Failure - Any Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2 Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2 Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2 Setting ANT_1_8_2_HOME=/var/lib/jenkins/tools/hudson.tasks.Ant_AntInstallation/ANT_1.8.2
--------------------------------------------------------------------- To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org For additional commands, e-mail: dev-h...@lucene.apache.org