Build: https://builds.apache.org/job/Lucene-Solr-Tests-8.x/492/

2 tests failed.
FAILED:  
org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore.testRestoreFailure

Error Message:
Failed collection is still in the clusterstate: 
DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={
   "pullReplicas":0,   "replicationFactor":2,   "shards":{     "shard2":{       
"range":"0-7fffffff",       "state":"construction",       
"replicas":{"core_node2":{           
"core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",           
"base_url":"https://127.0.0.1:37581/solr";,           
"node_name":"127.0.0.1:37581_solr",           "state":"down",           
"type":"NRT",           "force_set_state":"false"}},       
"stateTimestamp":"1567075412159408232"},     "shard1":{       
"range":"80000000-ffffffff",       "state":"construction",       "replicas":{}, 
      "stateTimestamp":"1567075412159421642"}},   
"router":{"name":"compositeId"},   "maxShardsPerNode":"2",   
"autoAddReplicas":"false",   "nrtReplicas":2,   "tlogReplicas":0} Expected: not 
a collection containing "hdfsbackuprestore_testfailure_restored"      but: was 
<[hdfsbackuprestore_testok, hdfsbackuprestore_testfailure_restored, 
hdfsbackuprestore_testfailure, hdfsbackuprestore_testok_restored]>

Stack Trace:
java.lang.AssertionError: Failed collection is still in the clusterstate: 
DocCollection(hdfsbackuprestore_testfailure_restored//collections/hdfsbackuprestore_testfailure_restored/state.json/2)={
  "pullReplicas":0,
  "replicationFactor":2,
  "shards":{
    "shard2":{
      "range":"0-7fffffff",
      "state":"construction",
      "replicas":{"core_node2":{
          "core":"hdfsbackuprestore_testfailure_restored_shard2_replica_n1",
          "base_url":"https://127.0.0.1:37581/solr";,
          "node_name":"127.0.0.1:37581_solr",
          "state":"down",
          "type":"NRT",
          "force_set_state":"false"}},
      "stateTimestamp":"1567075412159408232"},
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"construction",
      "replicas":{},
      "stateTimestamp":"1567075412159421642"}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"false",
  "nrtReplicas":2,
  "tlogReplicas":0}
Expected: not a collection containing "hdfsbackuprestore_testfailure_restored"
     but: was <[hdfsbackuprestore_testok, 
hdfsbackuprestore_testfailure_restored, hdfsbackuprestore_testfailure, 
hdfsbackuprestore_testok_restored]>
        at 
__randomizedtesting.SeedInfo.seed([4E0D9701184A2A9E:67710924301329B3]:0)
        at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
        at org.junit.Assert.assertThat(Assert.java:956)
        at 
org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:748)


FAILED:  
org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore.testRestoreFailure

Error Message:
Failed collection is still in the clusterstate: 
DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={
   "pullReplicas":0,   "replicationFactor":2,   "shards":{     "shard2":{       
"range":"0-7fffffff",       "state":"construction",       
"replicas":{"core_node2":{           
"core":"backuprestore_testfailure_restored_shard2_replica_n1",           
"base_url":"http://127.0.0.1:33234/solr";,           
"node_name":"127.0.0.1:33234_solr",           "state":"down",           
"type":"NRT",           "force_set_state":"false"}},       
"stateTimestamp":"1567078270679883280"},     "shard1":{       
"range":"80000000-ffffffff",       "state":"construction",       "replicas":{}, 
      "stateTimestamp":"1567078270679892465"}},   
"router":{"name":"compositeId"},   "maxShardsPerNode":"2",   
"autoAddReplicas":"false",   "nrtReplicas":2,   "tlogReplicas":0} Expected: not 
a collection containing "backuprestore_testfailure_restored"      but: was 
<[backuprestore_testok, backuprestore_testfailure, 
backuprestore_testfailure_restored, backuprestore_testok_restored]>

Stack Trace:
java.lang.AssertionError: Failed collection is still in the clusterstate: 
DocCollection(backuprestore_testfailure_restored//collections/backuprestore_testfailure_restored/state.json/2)={
  "pullReplicas":0,
  "replicationFactor":2,
  "shards":{
    "shard2":{
      "range":"0-7fffffff",
      "state":"construction",
      "replicas":{"core_node2":{
          "core":"backuprestore_testfailure_restored_shard2_replica_n1",
          "base_url":"http://127.0.0.1:33234/solr";,
          "node_name":"127.0.0.1:33234_solr",
          "state":"down",
          "type":"NRT",
          "force_set_state":"false"}},
      "stateTimestamp":"1567078270679883280"},
    "shard1":{
      "range":"80000000-ffffffff",
      "state":"construction",
      "replicas":{},
      "stateTimestamp":"1567078270679892465"}},
  "router":{"name":"compositeId"},
  "maxShardsPerNode":"2",
  "autoAddReplicas":"false",
  "nrtReplicas":2,
  "tlogReplicas":0}
Expected: not a collection containing "backuprestore_testfailure_restored"
     but: was <[backuprestore_testok, backuprestore_testfailure, 
backuprestore_testfailure_restored, backuprestore_testok_restored]>
        at 
__randomizedtesting.SeedInfo.seed([4E0D9701184A2A9E:67710924301329B3]:0)
        at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
        at org.junit.Assert.assertThat(Assert.java:956)
        at 
org.apache.solr.cloud.api.collections.AbstractCloudBackupRestoreTestCase.testRestoreFailure(AbstractCloudBackupRestoreTestCase.java:211)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1750)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:938)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:974)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:988)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:947)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:832)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:883)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:894)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:748)




Build Log:
[...truncated 13394 lines...]
   [junit4] Suite: 
org.apache.solr.cloud.api.collections.TestHdfsCloudBackupRestore
   [junit4]   2> 338934 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.SolrTestCaseJ4 SecureRandom sanity checks: 
test.solr.allowed.securerandom=null & java.security.egd=file:/dev/./urandom
   [junit4]   2> 338951 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.SolrTestCaseJ4 Created dataDir: 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/data-dir-9-001
   [junit4]   2> 338962 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.SolrTestCaseJ4 startTrackingSearchers: numOpens=10 numCloses=10
   [junit4]   2> 338962 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.SolrTestCaseJ4 Using PointFields (NUMERIC_POINTS_SYSPROP=true) 
w/NUMERIC_DOCVALUES_SYSPROP=true
   [junit4]   2> 338989 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (true) via: 
@org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN)
   [junit4]   2> 342594 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.h.u.NativeCodeLoader Unable to load native-hadoop library for your 
platform... using builtin-java classes where applicable
   [junit4]   1> Formatting using clusterid: testClusterID
   [junit4]   2> 350226 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.h.m.i.MetricsConfig Cannot locate configuration: tried 
hadoop-metrics2-namenode.properties,hadoop-metrics2.properties
   [junit4]   2> 351833 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 351978 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: 
afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 351994 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 351994 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 351994 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 351996 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@54fb9ee{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 353850 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@5cec5af1{hdfs,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-lucene2-us-west.apache.org-43747-hdfs-_-any-4815867477447280271.dir/webapp/,AVAILABLE}{/hdfs}
   [junit4]   2> 353905 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@4da13787{HTTP/1.1,[http/1.1]}{lucene2-us-west.apache.org:43747}
   [junit4]   2> 353905 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.Server Started @353949ms
   [junit4]   2> 362077 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 362109 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: 
afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 362122 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 362122 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 362122 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 362122 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@4b2afa75{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 362976 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@72547b5{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-38032-datanode-_-any-4527236998548102363.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 362980 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@39082416{HTTP/1.1,[http/1.1]}{localhost:38032}
   [junit4]   2> 362980 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.Server Started @363023ms
   [junit4]   2> 366254 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.h.h.HttpRequestLog Jetty request log can only be enabled using Log4j
   [junit4]   2> 366255 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: 
afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 366376 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 366376 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 366376 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 366384 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@7dab02f2{static,/static,jar:file:/home/jenkins/.ivy2/cache/org.apache.hadoop/hadoop-hdfs/tests/hadoop-hdfs-3.2.0-tests.jar!/webapps/static,AVAILABLE}
   [junit4]   2> 367841 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.w.WebAppContext@4aea4952{datanode,/,file:///home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/jetty-localhost-40687-datanode-_-any-2656067372604467379.dir/webapp/,AVAILABLE}{/datanode}
   [junit4]   2> 367842 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.AbstractConnector Started 
ServerConnector@6ac5e7a4{HTTP/1.1,[http/1.1]}{localhost:40687}
   [junit4]   2> 367842 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.e.j.s.Server Started @367885ms
   [junit4]   2> 372676 WARN  (Thread-265) [     ] 
o.a.h.h.s.d.f.i.FsDatasetImpl Lock held time above threshold: lock identifier: 
org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl 
lockHeldTimeMs=312 ms. Suppressed 0 lock warnings. The stack trace is: 
java.lang.Thread.getStackTrace(Thread.java:1559)
   [junit4]   2> 
org.apache.hadoop.util.StringUtils.getStackTrace(StringUtils.java:1032)
   [junit4]   2> 
org.apache.hadoop.util.InstrumentedLock.logWarning(InstrumentedLock.java:148)
   [junit4]   2> 
org.apache.hadoop.util.InstrumentedLock.check(InstrumentedLock.java:186)
   [junit4]   2> 
org.apache.hadoop.util.InstrumentedLock.unlock(InstrumentedLock.java:133)
   [junit4]   2> 
org.apache.hadoop.util.AutoCloseableLock.release(AutoCloseableLock.java:84)
   [junit4]   2> 
org.apache.hadoop.util.AutoCloseableLock.close(AutoCloseableLock.java:96)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.activateVolume(FsDatasetImpl.java:429)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.addVolume(FsDatasetImpl.java:449)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl.<init>(FsDatasetImpl.java:334)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetFactory.newInstance(FsDatasetFactory.java:34)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetFactory.newInstance(FsDatasetFactory.java:30)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.DataNode.initStorage(DataNode.java:1732)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.DataNode.initBlockPool(DataNode.java:1678)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.BPOfferService.verifyAndSetNamespaceInfo(BPOfferService.java:390)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.BPServiceActor.connectToNNAndHandshake(BPServiceActor.java:280)
   [junit4]   2> 
org.apache.hadoop.hdfs.server.datanode.BPServiceActor.run(BPServiceActor.java:819)
   [junit4]   2> java.lang.Thread.run(Thread.java:748)
   [junit4]   2> 
   [junit4]   2> 375742 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x804f02bdacb9a407: Processing first storage report for 
DS-58cdbaf2-df89-4ecb-98c5-7ccac28bd359 from datanode 
8f4b546c-4238-4abe-a0dc-0eb8566cc48b
   [junit4]   2> 375796 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x804f02bdacb9a407: from storage 
DS-58cdbaf2-df89-4ecb-98c5-7ccac28bd359 node 
DatanodeRegistration(127.0.0.1:41676, 
datanodeUuid=8f4b546c-4238-4abe-a0dc-0eb8566cc48b, infoPort=42413, 
infoSecurePort=0, ipcPort=41430, 
storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 
0, hasStaleStorage: true, processing time: 3 msecs, invalidatedBlocks: 0
   [junit4]   2> 375815 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x629aaa2f76954a6d: Processing first storage report for 
DS-24037231-2a50-48b4-8406-05829731388c from datanode 
88de6798-6e70-4069-ba3e-b40c9b9b5a74
   [junit4]   2> 375816 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x629aaa2f76954a6d: from storage 
DS-24037231-2a50-48b4-8406-05829731388c node 
DatanodeRegistration(127.0.0.1:42762, 
datanodeUuid=88de6798-6e70-4069-ba3e-b40c9b9b5a74, infoPort=43879, 
infoSecurePort=0, ipcPort=39957, 
storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 
0, hasStaleStorage: true, processing time: 1 msecs, invalidatedBlocks: 0
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x629aaa2f76954a6d: Processing first storage report for 
DS-ad624b20-3299-49b9-8c24-ae7dc641703e from datanode 
88de6798-6e70-4069-ba3e-b40c9b9b5a74
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x629aaa2f76954a6d: from storage 
DS-ad624b20-3299-49b9-8c24-ae7dc641703e node 
DatanodeRegistration(127.0.0.1:42762, 
datanodeUuid=88de6798-6e70-4069-ba3e-b40c9b9b5a74, infoPort=43879, 
infoSecurePort=0, ipcPort=39957, 
storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 
0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x804f02bdacb9a407: Processing first storage report for 
DS-70f57e38-5a07-407d-a93c-52587d38a2e8 from datanode 
8f4b546c-4238-4abe-a0dc-0eb8566cc48b
   [junit4]   2> 375844 INFO  (Block report processor) [     ] BlockStateChange 
BLOCK* processReport 0x804f02bdacb9a407: from storage 
DS-70f57e38-5a07-407d-a93c-52587d38a2e8 node 
DatanodeRegistration(127.0.0.1:41676, 
datanodeUuid=8f4b546c-4238-4abe-a0dc-0eb8566cc48b, infoPort=42413, 
infoSecurePort=0, ipcPort=41430, 
storageInfo=lv=-57;cid=testClusterID;nsid=714383687;c=1567075296700), blocks: 
0, hasStaleStorage: false, processing time: 0 msecs, invalidatedBlocks: 0
   [junit4]   2> 376286 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.a.c.TestHdfsCloudBackupRestore The NameNode is in SafeMode - Solr will 
wait 5 seconds and try again.
   [junit4]   2> 381292 WARN  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.a.c.TestHdfsCloudBackupRestore The NameNode is in SafeMode - Solr will 
wait 5 seconds and try again.
   [junit4]   2> 386829 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.MiniSolrCloudCluster Starting cluster of 2 servers in 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002
   [junit4]   2> 386829 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 386856 INFO  (ZkTestServer Run Thread) [     ] 
o.a.s.c.ZkTestServer client port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 386856 INFO  (ZkTestServer Run Thread) [     ] 
o.a.s.c.ZkTestServer Starting server
   [junit4]   2> 386956 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.ZkTestServer start zk server on port:33641
   [junit4]   2> 386956 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.ZkTestServer waitForServerUp: 127.0.0.1:33641
   [junit4]   2> 386956 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.ZkTestServer parse host and port list: 127.0.0.1:33641
   [junit4]   2> 386956 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.ZkTestServer connecting to 127.0.0.1 33641
   [junit4]   2> 386975 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387013 INFO  (zkConnectionManagerCallback-578-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387022 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387025 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387054 INFO  (zkConnectionManagerCallback-580-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387055 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387056 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387057 INFO  (zkConnectionManagerCallback-582-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387057 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387174 WARN  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 387174 WARN  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.AbstractConnector Ignoring deprecated socket close linger time
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.s.e.JettySolrRunner Start Jetty (original configured port=0)
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.s.e.JettySolrRunner Trying to start Jetty on port 0 try number 1 ...
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: 
afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 387174 INFO  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.Server jetty-9.4.19.v20190610; built: 2019-06-10T16:30:51.723Z; git: 
afcf563148970e98786327af5e07c261fda175d3; jvm 1.8.0_191-b12
   [junit4]   2> 387188 INFO  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 387188 INFO  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 387188 INFO  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 387189 INFO  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@4fd41b22{/solr,null,AVAILABLE}
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.AbstractConnector Started ServerConnector@7fc0d56c{SSL,[ssl, 
http/1.1]}{127.0.0.1:37581}
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.e.j.s.Server Started @387233ms
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=37581}
   [junit4]   2> 387190 ERROR (jetty-launcher-583-thread-2) [     ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
8.3.0
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 387190 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-08-29T10:42:16.597Z
   [junit4]   2> 387208 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.session DefaultSessionIdManager workerName=node0
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.session No SessionScavenger set, using defaults
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.session node0 Scavenging every 600000ms
   [junit4]   2> 387244 INFO  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@548f922b{/solr,null,AVAILABLE}
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.AbstractConnector Started ServerConnector@7c62ff31{SSL,[ssl, 
http/1.1]}{127.0.0.1:41788}
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.e.j.s.Server Started @387292ms
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: {hostContext=/solr, 
hostPort=41788}
   [junit4]   2> 387249 ERROR (jetty-launcher-583-thread-1) [     ] 
o.a.s.u.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.s.SolrDispatchFilter Using logger factory 
org.apache.logging.slf4j.Log4jLoggerFactory
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr™ version 
8.3.0
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 387249 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2019-08-29T10:42:16.656Z
   [junit4]   2> 387251 INFO  (zkConnectionManagerCallback-585-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387252 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387261 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 387281 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 387297 INFO  (zkConnectionManagerCallback-587-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 387297 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 387298 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.s.SolrDispatchFilter solr.xml found in ZooKeeper. Loading...
   [junit4]   2> 387318 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 387320 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.SolrXmlConfig MBean server found: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50, but no JMX reporters were 
configured - adding default JMX reporter.
   [junit4]   2> 388528 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 388529 WARN  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport 
since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388635 WARN  (jetty-launcher-583-thread-1) [     ] 
o.e.j.u.s.S.config Trusting all certificates configured for 
Client@4324d32b[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388635 WARN  (jetty-launcher-583-thread-1) [     ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
Client@4324d32b[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388672 WARN  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport 
since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388700 WARN  (jetty-launcher-583-thread-1) [     ] 
o.e.j.u.s.S.config Trusting all certificates configured for 
Client@5ad2dc8c[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388700 WARN  (jetty-launcher-583-thread-1) [     ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
Client@5ad2dc8c[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388701 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33641/solr
   [junit4]   2> 388715 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 388772 INFO  (zkConnectionManagerCallback-595-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 388772 INFO  (jetty-launcher-583-thread-1) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 388930 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.h.c.HttpShardHandlerFactory Host whitelist initialized: 
WhitelistHostChecker [whitelistHosts=null, whitelistHostCheckingEnabled=true]
   [junit4]   2> 388947 WARN  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport 
since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388948 WARN  (jetty-launcher-583-thread-2) [     ] 
o.e.j.u.s.S.config Trusting all certificates configured for 
Client@1b735181[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388948 WARN  (jetty-launcher-583-thread-2) [     ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
Client@1b735181[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388960 WARN  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.s.i.Http2SolrClient Create Http2SolrClient with HTTP/1.1 transport 
since Java 8 or lower versions does not support SSL + HTTP/2
   [junit4]   2> 388961 WARN  (jetty-launcher-583-thread-2) [     ] 
o.e.j.u.s.S.config Trusting all certificates configured for 
Client@6e3c817e[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388961 WARN  (jetty-launcher-583-thread-2) [     ] 
o.e.j.u.s.S.config No Client EndPointIdentificationAlgorithm configured for 
Client@6e3c817e[provider=null,keyStore=null,trustStore=null]
   [junit4]   2> 388971 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33641/solr
   [junit4]   2> 388991 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to 
connect to ZooKeeper
   [junit4]   2> 389007 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 389008 INFO  (zkConnectionManagerCallback-599-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389008 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Client is connected 
to ZooKeeper
   [junit4]   2> 389028 INFO  (zkConnectionManagerCallback-603-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389028 INFO  (jetty-launcher-583-thread-2) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 389246 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to 
connect to ZooKeeper
   [junit4]   2> 389295 INFO  (zkConnectionManagerCallback-607-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389299 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Client is connected 
to ZooKeeper
   [junit4]   2> 389464 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.OverseerElectionContext I am going to be 
the leader 127.0.0.1:37581_solr
   [junit4]   2> 389465 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.Overseer Overseer 
(id=75295907184115720-127.0.0.1:37581_solr-n_0000000000) starting
   [junit4]   2> 389854 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to 
connect to ZooKeeper
   [junit4]   2> 389951 INFO  (zkConnectionManagerCallback-614-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 389963 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.c.ConnectionManager Client is connected 
to ZooKeeper
   [junit4]   2> 389982 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:33641/solr ready
   [junit4]   2> 390000 INFO  
(OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.Overseer Starting to work on the main 
queue : 127.0.0.1:37581_solr
   [junit4]   2> 390001 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:37581_solr
   [junit4]   2> 390016 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.PackageManager clusterprops.json changed 
, version 0
   [junit4]   2> 390017 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup 
repository with configuration params {type = repository,name = hdfs,class = 
org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = 
{name=hdfs, 
class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = 
{location=/backup,solr.hdfs.home=hdfs://lucene2-us-west.apache.org:37840/solr,solr.hdfs.confdir=}}
   [junit4]   2> 390017 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup 
repository with configuration params {type = repository,name = poisioned,class 
= 
org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes
 = {default=true, name=poisioned, 
class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 390017 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default 
configuration for backup repository is with configuration params {type = 
repository,name = poisioned,class = 
org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes
 = {default=true, name=poisioned, 
class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 390040 INFO  (zkCallback-613-thread-1) [     ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 390081 INFO  (zkCallback-606-thread-1) [     ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 390227 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 390336 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 390391 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 390391 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 390393 INFO  (jetty-launcher-583-thread-2) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/.
   [junit4]   2> 391023 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (1)
   [junit4]   2> 391451 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.ZkController Publish 
node=127.0.0.1:41788_solr as DOWN
   [junit4]   2> 391452 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.TransientSolrCoreCacheDefault Allocating 
transient cache for 2147483647 transient cores
   [junit4]   2> 391452 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:41788_solr
   [junit4]   2> 391454 INFO  (zkCallback-613-thread-1) [     ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 391473 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Waiting for client to 
connect to ZooKeeper
   [junit4]   2> 391488 INFO  (zkCallback-606-thread-2) [     ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 391489 INFO  (zkConnectionManagerCallback-620-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 391489 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.c.ConnectionManager Client is connected 
to ZooKeeper
   [junit4]   2> 391490 INFO  (zkCallback-598-thread-1) [     ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 391524 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.c.ZkStateReader Updated live nodes from 
ZooKeeper... (0) -> (2)
   [junit4]   2> 391572 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.s.i.ZkClientClusterStateProvider Cluster 
at 127.0.0.1:33641/solr ready
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.PackageManager clusterprops.json changed 
, version 0
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup 
repository with configuration params {type = repository,name = hdfs,class = 
org.apache.solr.core.backup.repository.HdfsBackupRepository,attributes = 
{name=hdfs, 
class=org.apache.solr.core.backup.repository.HdfsBackupRepository},args = 
{location=/backup,solr.hdfs.home=hdfs://lucene2-us-west.apache.org:37840/solr,solr.hdfs.confdir=}}
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Added backup 
repository with configuration params {type = repository,name = poisioned,class 
= 
org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes
 = {default=true, name=poisioned, 
class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 391605 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.b.r.BackupRepositoryFactory Default 
configuration for backup repository is with configuration params {type = 
repository,name = poisioned,class = 
org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository,attributes
 = {default=true, name=poisioned, 
class=org.apache.solr.cloud.api.collections.TestLocalFSCloudBackupRestore$PoinsionedRepository},}
   [junit4]   2> 391846 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.h.a.MetricsHistoryHandler No .system 
collection, keeping metrics history in memory.
   [junit4]   2> 392024 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.node' (registry 'solr.node') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 392170 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jvm' (registry 'solr.jvm') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 392170 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.m.r.SolrJmxReporter JMX monitoring for 
'solr.jetty' (registry 'solr.jetty') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 392171 INFO  (jetty-launcher-583-thread-1) 
[n:127.0.0.1:41788_solr     ] o.a.s.c.CorePropertiesLocator Found 0 core 
definitions underneath 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/.
   [junit4]   2> 392642 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.MiniSolrCloudCluster waitForAllNodes: numServers=2
   [junit4]   2> 392644 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Waiting for client to connect to ZooKeeper
   [junit4]   2> 392674 INFO  (zkConnectionManagerCallback-626-thread-1) [     
] o.a.s.c.c.ConnectionManager zkClient has connected
   [junit4]   2> 392674 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ConnectionManager Client is connected to ZooKeeper
   [junit4]   2> 392675 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 392676 INFO  
(SUITE-TestHdfsCloudBackupRestore-seed#[4E0D9701184A2A9E]-worker) [     ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33641/solr ready
   [junit4]   2> 394167 INFO  
(TEST-TestHdfsCloudBackupRestore.test-seed#[4E0D9701184A2A9E]) [     ] 
o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 394365 INFO  (qtp865602825-2264) [n:127.0.0.1:41788_solr     ] 
o.a.s.h.a.CollectionsHandler Invoked Collection Action :create with params 
collection.configName=conf1&router.name=implicit&version=2&pullReplicas=1&shards=shard1,shard2&property.customKey=customValue&maxShardsPerNode=-1&router.field=shard_s&name=hdfsbackuprestore_testok&nrtReplicas=1&action=CREATE&tlogReplicas=1&wt=javabin
 and sendToOCPQueue=true
   [junit4]   2> 394470 INFO  
(OverseerThreadFactory-530-thread-1-processing-n:127.0.0.1:37581_solr) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.a.c.CreateCollectionCmd Create collection 
hdfsbackuprestore_testok
   [junit4]   2> 394632 WARN  
(OverseerThreadFactory-530-thread-1-processing-n:127.0.0.1:37581_solr) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.a.c.CreateCollectionCmd Specified number 
of replicas of 3 on collection hdfsbackuprestore_testok is higher than the 
number of Solr instances currently live or live and part of your 
createNodeSet(2). It's unusual to run two replica of the same slice on the same 
Solr-instance.
   [junit4]   2> 394669 INFO  
(OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_n1",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:41788/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394708 INFO  
(OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_t2",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:37581/solr";,
   [junit4]   2>   "type":"TLOG",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394758 INFO  
(OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard1",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard1_replica_p4",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:41788/solr";,
   [junit4]   2>   "type":"PULL",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394794 INFO  
(OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_n6",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:37581/solr";,
   [junit4]   2>   "type":"NRT",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394815 INFO  
(OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_t8",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:41788/solr";,
   [junit4]   2>   "type":"TLOG",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 394863 INFO  
(OverseerStateUpdate-75295907184115720-127.0.0.1:37581_solr-n_0000000000) 
[n:127.0.0.1:37581_solr     ] o.a.s.c.o.SliceMutator createReplica() {
   [junit4]   2>   "operation":"ADDREPLICA",
   [junit4]   2>   "collection":"hdfsbackuprestore_testok",
   [junit4]   2>   "shard":"shard2",
   [junit4]   2>   "core":"hdfsbackuprestore_testok_shard2_replica_p10",
   [junit4]   2>   "state":"down",
   [junit4]   2>   "base_url":"https://127.0.0.1:37581/solr";,
   [junit4]   2>   "type":"PULL",
   [junit4]   2>   "waitForFinalState":"false"} 
   [junit4]   2> 395122 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr    
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.a.CoreAdminOperation 
core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node3&name=hdfsbackuprestore_testok_shard1_replica_n1&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 395122 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr    
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.a.CoreAdminOperation 
core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=PULL&property.customKey=customValue&coreNodeName=core_node7&name=hdfsbackuprestore_testok_shard1_replica_p4&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 395124 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr    
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.h.a.CoreAdminOperation 
core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node11&name=hdfsbackuprestore_testok_shard2_replica_t8&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 395576 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr    
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.h.a.CoreAdminOperation 
core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=PULL&property.customKey=customValue&coreNodeName=core_node12&name=hdfsbackuprestore_testok_shard2_replica_p10&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 395577 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr    
x:hdfsbackuprestore_testok_shard2_replica_p10 ] 
o.a.s.c.TransientSolrCoreCacheDefault Allocating transient cache for 2147483647 
transient cores
   [junit4]   2> 395621 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr    
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.h.a.CoreAdminOperation 
core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=NRT&property.customKey=customValue&coreNodeName=core_node9&name=hdfsbackuprestore_testok_shard2_replica_n6&action=CREATE&numShards=2&shard=shard2&wt=javabin
   [junit4]   2> 395629 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr    
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.h.a.CoreAdminOperation 
core create command 
qt=/admin/cores&collection.configName=conf1&newCollection=true&collection=hdfsbackuprestore_testok&version=2&replicaType=TLOG&property.customKey=customValue&coreNodeName=core_node5&name=hdfsbackuprestore_testok_shard1_replica_t2&action=CREATE&numShards=2&shard=shard1&wt=javabin
   [junit4]   2> 396182 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrConfig Using Lucene 
MatchVersion: 8.3.0
   [junit4]   2> 396291 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema 
[hdfsbackuprestore_testok_shard1_replica_n1] Schema name=minimal
   [junit4]   2> 396293 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 396294 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.CoreContainer Creating 
SolrCore 'hdfsbackuprestore_testok_shard1_replica_n1' using configuration from 
collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 396294 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.m.r.SolrJmxReporter JMX 
monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_n1' (registry 
'solr.core.hdfsbackuprestore_testok.shard1.replica_n1') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 396311 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.SolrCore 
[[hdfsbackuprestore_testok_shard1_replica_n1] ] Opening new SolrCore at 
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_n1],
 
dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_n1/data/]
   [junit4]   2> 396408 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.SolrConfig Using Lucene 
MatchVersion: 8.3.0
   [junit4]   2> 396431 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.SolrConfig Using Lucene 
MatchVersion: 8.3.0
   [junit4]   2> 396446 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.s.IndexSchema 
[hdfsbackuprestore_testok_shard2_replica_t8] Schema name=minimal
   [junit4]   2> 396461 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 396461 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.CoreContainer Creating 
SolrCore 'hdfsbackuprestore_testok_shard2_replica_t8' using configuration from 
collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 396462 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.m.r.SolrJmxReporter JMX 
monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_t8' (registry 
'solr.core.hdfsbackuprestore_testok.shard2.replica_t8') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 396462 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.SolrCore 
[[hdfsbackuprestore_testok_shard2_replica_t8] ] Opening new SolrCore at 
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/hdfsbackuprestore_testok_shard2_replica_t8],
 
dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard2_replica_t8/data/]
   [junit4]   2> 396542 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.s.IndexSchema 
[hdfsbackuprestore_testok_shard1_replica_p4] Schema name=minimal
   [junit4]   2> 396562 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 396562 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.CoreContainer Creating 
SolrCore 'hdfsbackuprestore_testok_shard1_replica_p4' using configuration from 
collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 396584 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.m.r.SolrJmxReporter JMX 
monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_p4' (registry 
'solr.core.hdfsbackuprestore_testok.shard1.replica_p4') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 396585 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.SolrCore 
[[hdfsbackuprestore_testok_shard1_replica_p4] ] Opening new SolrCore at 
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/hdfsbackuprestore_testok_shard1_replica_p4],
 
dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node1/./hdfsbackuprestore_testok_shard1_replica_p4/data/]
   [junit4]   2> 396986 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 396986 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Initializing 
UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 397053 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 397053 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 397064 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@6f3941f9[hdfsbackuprestore_testok_shard1_replica_n1] main]
   [junit4]   2> 397102 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 397102 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.UpdateLog Initializing 
UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 397103 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 397103 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 397132 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage 
Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 397148 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@a444bac[hdfsbackuprestore_testok_shard2_replica_t8] main]
   [junit4]   2> 397152 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.r.ManagedResourceStorage 
Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 397155 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 397155 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.u.UpdateLog Could not find 
max version in index or recent updates, using new clock 1643197598596595712
   [junit4]   2> 397190 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.c.SolrConfig Using Lucene 
MatchVersion: 8.3.0
   [junit4]   2> 397225 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 397225 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 397227 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.s.IndexSchema 
[hdfsbackuprestore_testok_shard2_replica_n6] Schema name=minimal
   [junit4]   2> 397230 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 397230 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.c.CoreContainer Creating 
SolrCore 'hdfsbackuprestore_testok_shard2_replica_n6' using configuration from 
collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 397230 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.m.r.SolrJmxReporter JMX 
monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_n6' (registry 
'solr.core.hdfsbackuprestore_testok.shard2.replica_n6') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 397231 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.c.SolrCore 
[[hdfsbackuprestore_testok_shard2_replica_n6] ] Opening new SolrCore at 
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_n6],
 
dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_n6/data/]
   [junit4]   2> 397261 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@7bfbbe13[hdfsbackuprestore_testok_shard1_replica_p4] main]
   [junit4]   2> 397262 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.r.ManagedResourceStorage 
Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 397327 INFO  
(searcherExecutor-539-thread-1-processing-n:127.0.0.1:41788_solr 
x:hdfsbackuprestore_testok_shard1_replica_n1 c:hdfsbackuprestore_testok 
s:shard1 r:core_node3) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok 
s:shard1 r:core_node3 x:hdfsbackuprestore_testok_shard1_replica_n1 ] 
o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_n1] Registered new 
searcher Searcher@6f3941f9[hdfsbackuprestore_testok_shard1_replica_n1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 397328 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.c.SolrConfig Using Lucene 
MatchVersion: 8.3.0
   [junit4]   2> 397359 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.r.ManagedResourceStorage 
Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 397359 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 397359 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.u.UpdateLog Could not find 
max version in index or recent updates, using new clock 1643197598810505216
   [junit4]   2> 397362 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.SolrConfig Using Lucene 
MatchVersion: 8.3.0
   [junit4]   2> 397483 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.s.IndexSchema 
[hdfsbackuprestore_testok_shard1_replica_t2] Schema name=minimal
   [junit4]   2> 397485 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 397485 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.c.CoreContainer Creating 
SolrCore 'hdfsbackuprestore_testok_shard1_replica_t2' using configuration from 
collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 397485 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.m.r.SolrJmxReporter JMX 
monitoring for 'solr.core.hdfsbackuprestore_testok.shard1.replica_t2' (registry 
'solr.core.hdfsbackuprestore_testok.shard1.replica_t2') enabled at server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 397486 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.c.SolrCore 
[[hdfsbackuprestore_testok_shard1_replica_t2] ] Opening new SolrCore at 
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/hdfsbackuprestore_testok_shard1_replica_t2],
 
dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard1_replica_t2/data/]
   [junit4]   2> 397542 INFO  
(searcherExecutor-540-thread-1-processing-n:127.0.0.1:41788_solr 
x:hdfsbackuprestore_testok_shard2_replica_t8 c:hdfsbackuprestore_testok 
s:shard2 r:core_node11) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok 
s:shard2 r:core_node11 x:hdfsbackuprestore_testok_shard2_replica_t8 ] 
o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_t8] Registered new 
searcher Searcher@a444bac[hdfsbackuprestore_testok_shard2_replica_t8] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 397543 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.s.IndexSchema 
[hdfsbackuprestore_testok_shard2_replica_p10] Schema name=minimal
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.s.IndexSchema Loaded 
schema minimal/1.1 with uniqueid field id
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.CoreContainer Creating 
SolrCore 'hdfsbackuprestore_testok_shard2_replica_p10' using configuration from 
collection hdfsbackuprestore_testok, trusted=true
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.m.r.SolrJmxReporter JMX 
monitoring for 'solr.core.hdfsbackuprestore_testok.shard2.replica_p10' 
(registry 'solr.core.hdfsbackuprestore_testok.shard2.replica_p10') enabled at 
server: com.sun.jmx.mbeanserver.JmxMBeanServer@a5c0e50
   [junit4]   2> 397570 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.c.SolrCore 
[[hdfsbackuprestore_testok_shard2_replica_p10] ] Opening new SolrCore at 
[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/hdfsbackuprestore_testok_shard2_replica_p10],
 
dataDir=[/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/build/solr-core/test/J0/temp/solr.cloud.api.collections.TestHdfsCloudBackupRestore_4E0D9701184A2A9E-001/tempDir-002/node2/./hdfsbackuprestore_testok_shard2_replica_p10/data/]
   [junit4]   2> 397577 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.r.ManagedResourceStorage 
Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 397589 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.r.ManagedResourceStorage 
Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 397590 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 397799 INFO  
(searcherExecutor-541-thread-1-processing-n:127.0.0.1:41788_solr 
x:hdfsbackuprestore_testok_shard1_replica_p4 c:hdfsbackuprestore_testok 
s:shard1 r:core_node7) [n:127.0.0.1:41788_solr c:hdfsbackuprestore_testok 
s:shard1 r:core_node7 x:hdfsbackuprestore_testok_shard1_replica_p4 ] 
o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard1_replica_p4] Registered new 
searcher Searcher@7bfbbe13[hdfsbackuprestore_testok_shard1_replica_p4] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 397946 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.ZkController 
hdfsbackuprestore_testok_shard1_replica_p4 starting background replication from 
leader
   [junit4]   2> 397948 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.c.ReplicateFromLeader Will 
start replication from leader with poll interval: 00:00:01
   [junit4]   2> 397998 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] o.a.s.c.ZkShardTerms Successful 
update of terms at /collections/hdfsbackuprestore_testok/terms/shard1 to 
Terms{values={core_node3=0}, version=0}
   [junit4]   2> 397998 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] 
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created 
/collections/hdfsbackuprestore_testok/leaders/shard1
   [junit4]   2> 398006 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.ReplicationHandler Poll 
scheduled at an interval of 1000ms
   [junit4]   2> 398006 INFO  (qtp865602825-2262) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node7 
x:hdfsbackuprestore_testok_shard1_replica_p4 ] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 398176 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] o.a.s.c.ZkShardTerms Successful 
update of terms at /collections/hdfsbackuprestore_testok/terms/shard2 to 
Terms{values={core_node11=0}, version=0}
   [junit4]   2> 398200 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 398200 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.UpdateLog Initializing 
UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 398202 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 398202 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 398217 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 398217 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.UpdateLog Initializing 
UpdateLog: dataDir=null defaultSyncLevel=FLUSH numRecordsToKeep=100 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 398218 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 398218 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 398254 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] 
o.a.s.c.ShardLeaderElectionContextBase make sure parent is created 
/collections/hdfsbackuprestore_testok/leaders/shard2
   [junit4]   2> 398334 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@24dc1267[hdfsbackuprestore_testok_shard1_replica_t2] main]
   [junit4]   2> 398335 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@480c3d9f[hdfsbackuprestore_testok_shard2_replica_n6] main]
   [junit4]   2> 398340 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 398340 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 398340 INFO  (qtp865602825-2263) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node3 
x:hdfsbackuprestore_testok_shard1_replica_n1 ] 
o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for 
shard shard1: total=2 found=1 timeoutin=9999ms
   [junit4]   2> 398356 INFO  (qtp865602825-2265) [n:127.0.0.1:41788_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node11 
x:hdfsbackuprestore_testok_shard2_replica_t8 ] 
o.a.s.c.ShardLeaderElectionContext Waiting until we see more replicas up for 
shard shard2: total=2 found=1 timeoutin=9999ms
   [junit4]   2> 398368 INFO  (zkCallback-598-thread-2) [     ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/hdfsbackuprestore_testok/state.json] for collection 
[hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398368 INFO  (zkCallback-598-thread-1) [     ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/hdfsbackuprestore_testok/state.json] for collection 
[hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398383 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@11479cea[hdfsbackuprestore_testok_shard2_replica_p10] main]
   [junit4]   2> 398407 INFO  (zkCallback-598-thread-3) [     ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/hdfsbackuprestore_testok/state.json] for collection 
[hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398407 INFO  (zkCallback-598-thread-4) [     ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent 
state:SyncConnected type:NodeDataChanged 
path:/collections/hdfsbackuprestore_testok/state.json] for collection 
[hdfsbackuprestore_testok] has occurred - updating... (live nodes size: [2])
   [junit4]   2> 398415 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.r.ManagedResourceStorage 
Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 398420 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.r.ManagedResourceStorage 
Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 398420 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.r.ManagedResourceStorage 
Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 398420 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 398420 INFO  (qtp809121568-2254) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node9 
x:hdfsbackuprestore_testok_shard2_replica_n6 ] o.a.s.u.UpdateLog Could not find 
max version in index or recent updates, using new clock 1643197599923044352
   [junit4]   2> 398422 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.r.ManagedResourceStorage 
Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 398423 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 398423 INFO  (qtp809121568-2257) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard1 r:core_node5 
x:hdfsbackuprestore_testok_shard1_replica_t2 ] o.a.s.u.UpdateLog Could not find 
max version in index or recent updates, using new clock 1643197599926190080
   [junit4]   2> 398431 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.r.ManagedResourceStorage 
Configured ZooKeeperStorageIO with znodeBase: /configs/conf1
   [junit4]   2> 398432 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.r.ManagedResourceStorage 
Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 398432 INFO  (qtp809121568-2255) [n:127.0.0.1:37581_solr 
c:hdfsbackuprestore_testok s:shard2 r:core_node12 
x:hdfsbackuprestore_testok_shard2_replica_p10 ] o.a.s.h.ReplicationHandler 
Commits will be reserved for 10000ms.
   [junit4]   2> 398453 INFO  
(searcherExecutor-553-thread-1-processing-n:127.0.0.1:37581_solr 
x:hdfsbackuprestore_testok_shard2_replica_n6 c:hdfsbackuprestore_testok 
s:shard2 r:core_node9) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok 
s:shard2 r:core_node9 x:hdfsbackuprestore_testok_shard2_replica_n6 ] 
o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_n6] Registered new 
searcher Searcher@480c3d9f[hdfsbackuprestore_testok_shard2_replica_n6] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 398477 INFO  
(searcherExecutor-555-thread-1-processing-n:127.0.0.1:37581_solr 
x:hdfsbackuprestore_testok_shard2_replica_p10 c:hdfsbackuprestore_testok 
s:shard2 r:core_node12) [n:127.0.0.1:37581_solr c:hdfsbackuprestore_testok 
s:shard2 r:core_node12 x:hdfsbackuprestore_testok_shard2_replica_p10 ] 
o.a.s.c.SolrCore [hdfsbackuprestore_testok_shard2_replica_p10] Registered new 
searcher Searcher@11479cea[hdfsbackuprestore_testok_shard2_replica_p10] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 398478 INFO  (zkCallback-598-thread-4) [     ] 
o.a.s.c.c.ZkStateReader A cluster state change: [WatchedEvent state:SyncCon

[...truncated too long message...]

gs :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

ivy-availability-check:
[loadresource] Do not set property disallowed.ivy.jars.list as its length is 0.

-ivy-fail-disallowed-ivy-version:

ivy-fail:

ivy-fail:

ivy-configure:
[ivy:configure] :: loading settings :: file = 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/lucene/top-level-ivy-settings.xml

resolve:

jar-checksums:
    [mkdir] Created dir: 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null2090723009
     [copy] Copying 249 files to 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null2090723009
   [delete] Deleting directory 
/home/jenkins/jenkins-slave/workspace/Lucene-Solr-Tests-8.x/solr/null2090723009

check-working-copy:
[ivy:cachepath] :: resolving dependencies :: 
#;work...@lucene2-us-west.apache.org
[ivy:cachepath]         confs: [default]
[ivy:cachepath]         found 
org.eclipse.jgit#org.eclipse.jgit;5.3.0.201903130848-r in public
[ivy:cachepath]         found com.jcraft#jsch;0.1.54 in public
[ivy:cachepath]         found com.jcraft#jzlib;1.1.1 in public
[ivy:cachepath]         found com.googlecode.javaewah#JavaEWAH;1.1.6 in public
[ivy:cachepath]         found org.slf4j#slf4j-api;1.7.2 in public
[ivy:cachepath]         found org.bouncycastle#bcpg-jdk15on;1.60 in public
[ivy:cachepath]         found org.bouncycastle#bcprov-jdk15on;1.60 in public
[ivy:cachepath]         found org.bouncycastle#bcpkix-jdk15on;1.60 in public
[ivy:cachepath]         found org.slf4j#slf4j-nop;1.7.2 in public
[ivy:cachepath] :: resolution report :: resolve 34ms :: artifacts dl 4ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      default     |   9   |   0   |   0   |   0   ||   9   |   0   |
        ---------------------------------------------------------------------
[wc-checker] Initializing working copy...
[wc-checker] Checking working copy status...

-jenkins-base:

BUILD SUCCESSFUL
Total time: 199 minutes 22 seconds
Archiving artifacts
java.lang.InterruptedException: no matches found within 10000
        at hudson.FilePath$ValidateAntFileMask.hasMatch(FilePath.java:2847)
        at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2726)
        at hudson.FilePath$ValidateAntFileMask.invoke(FilePath.java:2707)
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3086)
Also:   hudson.remoting.Channel$CallSiteStackTrace: Remote call to lucene2
                at 
hudson.remoting.Channel.attachCallSiteStackTrace(Channel.java:1741)
                at 
hudson.remoting.UserRequest$ExceptionResponse.retrieve(UserRequest.java:357)
                at hudson.remoting.Channel.call(Channel.java:955)
                at hudson.FilePath.act(FilePath.java:1072)
                at hudson.FilePath.act(FilePath.java:1061)
                at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
                at 
hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
                at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
                at 
hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
                at hudson.model.Build$BuildExecution.post2(Build.java:186)
                at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
                at hudson.model.Run.execute(Run.java:1835)
                at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
                at 
hudson.model.ResourceController.execute(ResourceController.java:97)
                at hudson.model.Executor.run(Executor.java:429)
Caused: hudson.FilePath$TunneledInterruptedException
        at hudson.FilePath$FileCallableWrapper.call(FilePath.java:3088)
        at hudson.remoting.UserRequest.perform(UserRequest.java:212)
        at hudson.remoting.UserRequest.perform(UserRequest.java:54)
        at hudson.remoting.Request$2.run(Request.java:369)
        at 
hudson.remoting.InterceptingExecutorService$1.call(InterceptingExecutorService.java:72)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:748)
Caused: java.lang.InterruptedException: java.lang.InterruptedException: no 
matches found within 10000
        at hudson.FilePath.act(FilePath.java:1074)
        at hudson.FilePath.act(FilePath.java:1061)
        at hudson.FilePath.validateAntFileMask(FilePath.java:2705)
        at hudson.tasks.ArtifactArchiver.perform(ArtifactArchiver.java:243)
        at 
hudson.tasks.BuildStepCompatibilityLayer.perform(BuildStepCompatibilityLayer.java:81)
        at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:744)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:690)
        at hudson.model.Build$BuildExecution.post2(Build.java:186)
        at 
hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:635)
        at hudson.model.Run.execute(Run.java:1835)
        at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
        at hudson.model.ResourceController.execute(ResourceController.java:97)
        at hudson.model.Executor.run(Executor.java:429)
No artifacts found that match the file pattern 
"**/*.events,heapdumps/**,**/hs_err_pid*". Configuration error?
Recording test results
Build step 'Publish JUnit test result report' changed build result to UNSTABLE
Email was triggered for: Unstable (Test Failures)
Sending email for trigger: Unstable (Test Failures)
---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org
For additional commands, e-mail: dev-h...@lucene.apache.org

Reply via email to