Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-MacOSX/3800/
Java: 64bit/jdk1.8.0 -XX:-UseCompressedOops -XX:+UseParallelGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
timeout waiting to see all nodes active

Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
        at 
__randomizedtesting.SeedInfo.seed([A0E5E2CB84D8E147:28B1DD112A248CBF]:0)
        at org.junit.Assert.fail(Assert.java:93)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 12286 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/init-core-data-001
   [junit4]   2> 2641237 INFO  
(SUITE-PeerSyncReplicationTest-seed#[A0E5E2CB84D8E147]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using PointFields
   [junit4]   2> 2641241 INFO  
(SUITE-PeerSyncReplicationTest-seed#[A0E5E2CB84D8E147]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (false) via: 
@org.apache.solr.util.RandomizeSSL(reason=, value=NaN, ssl=NaN, clientAuth=NaN) 
w/ MAC_OS_X supressed clientAuth
   [junit4]   2> 2641242 INFO  
(SUITE-PeerSyncReplicationTest-seed#[A0E5E2CB84D8E147]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /
   [junit4]   2> 2641244 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 2641244 INFO  (Thread-2880) [    ] o.a.s.c.ZkTestServer client 
port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 2641244 INFO  (Thread-2880) [    ] o.a.s.c.ZkTestServer 
Starting server
   [junit4]   2> 2641347 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ZkTestServer start zk server on port:52443
   [junit4]   2> 2641380 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 2641448 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 2641561 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 2641650 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 2641687 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 2641712 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 2641744 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 2641794 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 2641813 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 2641830 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 2641855 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 2645831 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/control-001/cores/collection1
   [junit4]   2> 2645859 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2645864 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@7a9f2e46{/,null,AVAILABLE}
   [junit4]   2> 2645879 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@8cd1f39{SSL,[ssl, 
http/1.1]}{127.0.0.1:52473}
   [junit4]   2> 2645880 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server Started @2651354ms
   [junit4]   2> 2645880 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/tempDir-001/control/data,
 hostContext=/, hostPort=52473, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/control-001/cores}
   [junit4]   2> 2645880 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2645881 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 2645881 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2645881 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2645881 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-26T19:57:03.824Z
   [junit4]   2> 2645888 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2645888 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/control-001/solr.xml
   [junit4]   2> 2645905 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52443/solr
   [junit4]   2> 2645969 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52473_ 
   ] o.a.s.c.OverseerElectionContext I am going to be the leader 
127.0.0.1:52473_
   [junit4]   2> 2645971 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52473_ 
   ] o.a.s.c.Overseer Overseer 
(id=97351147139235845-127.0.0.1:52473_-n_0000000000) starting
   [junit4]   2> 2645990 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52473_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:52473_
   [junit4]   2> 2645995 INFO  
(zkCallback-2962-thread-1-processing-n:127.0.0.1:52473_) [n:127.0.0.1:52473_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2646098 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52473_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/control-001/cores
   [junit4]   2> 2646099 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52473_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2646108 INFO  
(OverseerStateUpdate-97351147139235845-127.0.0.1:52473_-n_0000000000) 
[n:127.0.0.1:52473_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 2647138 WARN  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.c.Config 
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> 
instead.
   [junit4]   2> 2647139 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 2647169 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 2647306 WARN  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 2647308 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 2647340 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection 
control_collection
   [junit4]   2> 2647340 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/control-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/control-001/cores/collection1/data/]
   [junit4]   2> 2647341 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@4d92275b
   [junit4]   2> 2647346 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.AlcoholicMergePolicy: [AlcoholicMergePolicy: 
minMergeSize=0, mergeFactor=10, maxMergeSize=1672653504, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.1]
   [junit4]   2> 2647368 WARN  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 2647388 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.u.UpdateHandler 
Using UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2647388 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2647390 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Hard AutoCommit: disabled
   [junit4]   2> 2647390 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.u.CommitTracker 
Soft AutoCommit: disabled
   [junit4]   2> 2647390 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.TieredMergePolicy: [TieredMergePolicy: 
maxMergeAtOnce=41, maxMergeAtOnceExplicit=23, maxMergedSegmentMB=55.8642578125, 
floorSegmentMB=0.7197265625, forceMergeDeletesPctAllowed=2.102139642005424, 
segmentsPerTier=16.0, maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2647391 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@692f0eb4[collection1] main]
   [junit4]   2> 2647393 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 2647394 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2647394 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 2647396 INFO  
(searcherExecutor-6484-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@692f0eb4[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2647396 INFO  
(coreLoadExecutor-6483-thread-1-processing-n:127.0.0.1:52473_) 
[n:127.0.0.1:52473_ c:control_collection   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1557618360675467264
   [junit4]   2> 2647411 INFO  
(coreZkRegister-6476-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas 
found to continue.
   [junit4]   2> 2647411 INFO  
(coreZkRegister-6476-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new 
leader - try and sync
   [junit4]   2> 2647411 INFO  
(coreZkRegister-6476-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
https://127.0.0.1:52473/collection1/
   [junit4]   2> 2647412 INFO  
(coreZkRegister-6476-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync 
replicas to me
   [junit4]   2> 2647412 INFO  
(coreZkRegister-6476-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
https://127.0.0.1:52473/collection1/ has no replicas
   [junit4]   2> 2647422 INFO  
(coreZkRegister-6476-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new 
leader: https://127.0.0.1:52473/collection1/ shard1
   [junit4]   2> 2647595 INFO  
(coreZkRegister-6476-thread-1-processing-n:127.0.0.1:52473_ x:collection1 
c:control_collection) [n:127.0.0.1:52473_ c:control_collection s:shard1 
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery 
necessary
   [junit4]   2> 2647714 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2647717 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:52443/solr ready
   [junit4]   2> 2647717 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection 
loss:false
   [junit4]   2> 2648135 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-1-001/cores/collection1
   [junit4]   2> 2648137 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-1-001
   [junit4]   2> 2648142 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2648150 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@2c2437e0{/,null,AVAILABLE}
   [junit4]   2> 2648152 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@4f40effb{SSL,[ssl, 
http/1.1]}{127.0.0.1:52478}
   [junit4]   2> 2648152 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server Started @2653627ms
   [junit4]   2> 2648152 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/tempDir-001/jetty1,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=52478, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-1-001/cores}
   [junit4]   2> 2648152 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2648153 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 2648153 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2648153 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2648153 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-26T19:57:06.096Z
   [junit4]   2> 2648158 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2648158 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-1-001/solr.xml
   [junit4]   2> 2648173 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52443/solr
   [junit4]   2> 2648195 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52478_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 2648219 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52478_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:52478_
   [junit4]   2> 2648224 INFO  
(zkCallback-2962-thread-1-processing-n:127.0.0.1:52473_) [n:127.0.0.1:52473_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2648224 INFO  (zkCallback-2966-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2648225 INFO  
(zkCallback-2971-thread-1-processing-n:127.0.0.1:52478_) [n:127.0.0.1:52478_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 2648401 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52478_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-1-001/cores
   [junit4]   2> 2648401 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52478_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2648405 INFO  
(OverseerStateUpdate-97351147139235845-127.0.0.1:52473_-n_0000000000) 
[n:127.0.0.1:52473_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 2649426 WARN  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 2649427 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 2649444 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 2649550 WARN  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 2649560 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 2649581 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 2649581 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-1-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 2649581 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@4d92275b
   [junit4]   2> 2649586 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: 
[AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=1672653504, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.1]
   [junit4]   2> 2649595 WARN  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2649613 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2649613 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2649614 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 2649614 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 2649615 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=41, maxMergeAtOnceExplicit=23, 
maxMergedSegmentMB=55.8642578125, floorSegmentMB=0.7197265625, 
forceMergeDeletesPctAllowed=2.102139642005424, segmentsPerTier=16.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2649616 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@30ea41ba[collection1] main]
   [junit4]   2> 2649618 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 2649619 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2649619 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 2649620 INFO  
(searcherExecutor-6495-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@30ea41ba[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2649622 INFO  
(coreLoadExecutor-6494-thread-1-processing-n:127.0.0.1:52478_) 
[n:127.0.0.1:52478_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1557618363009597440
   [junit4]   2> 2649637 INFO  
(coreZkRegister-6489-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas found to 
continue.
   [junit4]   2> 2649637 INFO  
(coreZkRegister-6489-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new leader - try 
and sync
   [junit4]   2> 2649637 INFO  
(coreZkRegister-6489-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
https://127.0.0.1:52478/collection1/
   [junit4]   2> 2649637 INFO  
(coreZkRegister-6489-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync replicas to me
   [junit4]   2> 2649638 INFO  
(coreZkRegister-6489-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.SyncStrategy https://127.0.0.1:52478/collection1/ has no 
replicas
   [junit4]   2> 2649652 INFO  
(coreZkRegister-6489-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new leader: 
https://127.0.0.1:52478/collection1/ shard1
   [junit4]   2> 2649816 INFO  
(coreZkRegister-6489-thread-1-processing-n:127.0.0.1:52478_ x:collection1 
c:collection1) [n:127.0.0.1:52478_ c:collection1 s:shard1 r:core_node1 
x:collection1] o.a.s.c.ZkController I am the leader, no recovery necessary
   [junit4]   2> 2650264 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-2-001/cores/collection1
   [junit4]   2> 2650265 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-2-001
   [junit4]   2> 2650267 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2650270 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@6819ca0{/,null,AVAILABLE}
   [junit4]   2> 2650272 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@c974e59{SSL,[ssl, 
http/1.1]}{127.0.0.1:52496}
   [junit4]   2> 2650272 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server Started @2655747ms
   [junit4]   2> 2650273 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/tempDir-001/jetty2,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=52496, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-2-001/cores}
   [junit4]   2> 2650274 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2650274 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 2650274 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2650274 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2650274 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-26T19:57:08.217Z
   [junit4]   2> 2650278 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2650279 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-2-001/solr.xml
   [junit4]   2> 2650291 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52443/solr
   [junit4]   2> 2650309 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52496_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (2)
   [junit4]   2> 2650319 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52496_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:52496_
   [junit4]   2> 2650322 INFO  
(zkCallback-2962-thread-3-processing-n:127.0.0.1:52473_) [n:127.0.0.1:52473_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2650322 INFO  (zkCallback-2966-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2650322 INFO  
(zkCallback-2971-thread-1-processing-n:127.0.0.1:52478_) [n:127.0.0.1:52478_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2650323 INFO  
(zkCallback-2977-thread-1-processing-n:127.0.0.1:52496_) [n:127.0.0.1:52496_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 2650459 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52496_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-2-001/cores
   [junit4]   2> 2650459 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52496_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2650463 INFO  
(OverseerStateUpdate-97351147139235845-127.0.0.1:52473_-n_0000000000) 
[n:127.0.0.1:52473_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 2651484 WARN  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 2651485 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 2651498 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 2651607 WARN  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 2651610 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 2651629 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 2651630 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-2-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/../../../../../../../../../Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 2651630 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@4d92275b
   [junit4]   2> 2651634 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: 
[AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=1672653504, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.1]
   [junit4]   2> 2651672 WARN  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2651716 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2651716 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2651718 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 2651718 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 2651719 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=41, maxMergeAtOnceExplicit=23, 
maxMergedSegmentMB=55.8642578125, floorSegmentMB=0.7197265625, 
forceMergeDeletesPctAllowed=2.102139642005424, segmentsPerTier=16.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2651721 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@33ffcef2[collection1] main]
   [junit4]   2> 2651723 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 2651724 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2651724 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 2651726 INFO  
(searcherExecutor-6506-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
c:collection1) [n:127.0.0.1:52496_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@33ffcef2[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2651727 INFO  
(coreLoadExecutor-6505-thread-1-processing-n:127.0.0.1:52496_) 
[n:127.0.0.1:52496_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1557618365216849920
   [junit4]   2> 2651734 INFO  
(coreZkRegister-6500-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
c:collection1) [n:127.0.0.1:52496_ c:collection1 s:shard1 r:core_node2 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 2651734 INFO  
(updateExecutor-2974-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 2651735 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 2651736 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 2651736 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 2651736 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 2651736 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [https://127.0.0.1:52478/collection1/] 
and I am [https://127.0.0.1:52496/collection1/]
   [junit4]   2> 2651740 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [https://127.0.0.1:52478]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:52496_&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 2651788 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 2651789 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 2651789 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:52496_, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:52496","node_name":"127.0.0.1:52496_","state":"down"}
   [junit4]   2> 2652471 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-3-001/cores/collection1
   [junit4]   2> 2652472 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-3-001
   [junit4]   2> 2652475 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 2652478 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@7985df93{/,null,AVAILABLE}
   [junit4]   2> 2652478 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@29d19d84{SSL,[ssl, 
http/1.1]}{127.0.0.1:52506}
   [junit4]   2> 2652478 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.e.j.s.Server Started @2657953ms
   [junit4]   2> 2652480 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/tempDir-001/jetty3,
 solrconfig=solrconfig.xml, hostContext=/, hostPort=52506, 
coreRootDirectory=/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-3-001/cores}
   [junit4]   2> 2652481 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 2652481 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 2652481 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 2652481 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 2652481 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-01-26T19:57:10.424Z
   [junit4]   2> 2652486 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 2652486 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-3-001/solr.xml
   [junit4]   2> 2652514 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:52443/solr
   [junit4]   2> 2652533 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52506_ 
   ] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (3)
   [junit4]   2> 2652543 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52506_ 
   ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:52506_
   [junit4]   2> 2652546 INFO  
(zkCallback-2971-thread-1-processing-n:127.0.0.1:52478_) [n:127.0.0.1:52478_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2652548 INFO  
(zkCallback-2984-thread-1-processing-n:127.0.0.1:52506_) [n:127.0.0.1:52506_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2652548 INFO  
(zkCallback-2977-thread-1-processing-n:127.0.0.1:52496_) [n:127.0.0.1:52496_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2652548 INFO  
(zkCallback-2962-thread-3-processing-n:127.0.0.1:52473_) [n:127.0.0.1:52473_    
] o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2652548 INFO  (zkCallback-2966-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 2652668 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52506_ 
   ] o.a.s.c.CorePropertiesLocator Found 1 core definitions underneath 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-3-001/cores
   [junit4]   2> 2652668 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [n:127.0.0.1:52506_ 
   ] o.a.s.c.CorePropertiesLocator Cores are: [collection1]
   [junit4]   2> 2652673 INFO  
(OverseerStateUpdate-97351147139235845-127.0.0.1:52473_-n_0000000000) 
[n:127.0.0.1:52473_    ] o.a.s.c.o.ReplicaMutator Assigning new node to shard 
shard=shard1
   [junit4]   2> 2652790 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:52496_, coreNodeName=core_node2, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:52496","node_name":"127.0.0.1:52496_","state":"recovering"}
   [junit4]   2> 2652790 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: recovering, 
checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 2652790 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:52496_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1002
   [junit4]   2> 2653695 WARN  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.c.Config Beginning 
with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> instead.
   [junit4]   2> 2653696 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.c.SolrConfig Using 
Lucene MatchVersion: 7.0.0
   [junit4]   2> 2653716 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 2653853 WARN  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 2653855 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.s.IndexSchema Loaded 
schema test/1.0 with uniqueid field id
   [junit4]   2> 2653884 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.c.CoreContainer 
Creating SolrCore 'collection1' using configuration from collection collection1
   [junit4]   2> 2653884 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-3-001/cores/collection1],
 
dataDir=[/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 2653885 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.c.JmxMonitoredMap JMX 
monitoring is enabled. Adding Solr mbeans to JMX Server: 
com.sun.jmx.mbeanserver.JmxMBeanServer@4d92275b
   [junit4]   2> 2653888 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.AlcoholicMergePolicy: 
[AlcoholicMergePolicy: minMergeSize=0, mergeFactor=10, maxMergeSize=1672653504, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.1]
   [junit4]   2> 2653903 WARN  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.c.RequestHandlers 
INVALID paramSet a in requestHandler {type = requestHandler,name = /dump,class 
= DumpRequestHandler,attributes = {initParams=a, name=/dump, 
class=DumpRequestHandler},args = {defaults={a=A,b=B}}}
   [junit4]   2> 2653928 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.u.UpdateHandler Using 
UpdateLog implementation: org.apache.solr.update.UpdateLog
   [junit4]   2> 2653928 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 2653929 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.u.CommitTracker Hard 
AutoCommit: disabled
   [junit4]   2> 2653929 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.u.CommitTracker Soft 
AutoCommit: disabled
   [junit4]   2> 2653931 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.u.RandomMergePolicy 
RandomMergePolicy wrapping class org.apache.lucene.index.TieredMergePolicy: 
[TieredMergePolicy: maxMergeAtOnce=41, maxMergeAtOnceExplicit=23, 
maxMergedSegmentMB=55.8642578125, floorSegmentMB=0.7197265625, 
forceMergeDeletesPctAllowed=2.102139642005424, segmentsPerTier=16.0, 
maxCFSSegmentSizeMB=8.796093022207999E12, noCFSRatio=0.0
   [junit4]   2> 2653932 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.s.SolrIndexSearcher 
Opening [Searcher@721e0b05[collection1] main]
   [junit4]   2> 2653934 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 2653935 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 2653935 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.h.ReplicationHandler 
Commits will be reserved for  10000
   [junit4]   2> 2653936 INFO  
(searcherExecutor-6517-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
c:collection1) [n:127.0.0.1:52506_ c:collection1   x:collection1] 
o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@721e0b05[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 2653937 INFO  
(coreLoadExecutor-6516-thread-1-processing-n:127.0.0.1:52506_) 
[n:127.0.0.1:52506_ c:collection1   x:collection1] o.a.s.u.UpdateLog Could not 
find max version in index or recent updates, using new clock 1557618367534202880
   [junit4]   2> 2653946 INFO  
(coreZkRegister-6511-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
c:collection1) [n:127.0.0.1:52506_ c:collection1 s:shard1 r:core_node3 
x:collection1] o.a.s.c.ZkController Core needs to recover:collection1
   [junit4]   2> 2653947 INFO  
(updateExecutor-2981-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState Running recovery
   [junit4]   2> 2653948 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Starting recovery process. 
recoveringAfterStartup=true
   [junit4]   2> 2653948 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy ###### startupVersions=[[]]
   [junit4]   2> 2653949 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Begin buffering updates. 
core=[collection1]
   [junit4]   2> 2653949 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting to buffer updates. 
FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 2653949 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Publishing state of core 
[collection1] as recovering, leader is [https://127.0.0.1:52478/collection1/] 
and I am [https://127.0.0.1:52506/collection1/]
   [junit4]   2> 2653952 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Sending prep recovery 
command to [https://127.0.0.1:52478]; [WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:52506_&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 2653962 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, state: 
recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 2653963 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 2653963 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=down, localState=active, 
nodeName=127.0.0.1:52506_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:52506","node_name":"127.0.0.1:52506_","state":"down"}
   [junit4]   2> 2654101 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 2654101 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 
for each attempt
   [junit4]   2> 2654101 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: 
collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 2654971 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): collection=collection1, 
shard=shard1, thisCore=collection1, leaderDoesNotNeedRecovery=false, isLeader? 
true, live=true, checkLive=true, currentState=recovering, localState=active, 
nodeName=127.0.0.1:52506_, coreNodeName=core_node3, 
onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:52506","node_name":"127.0.0.1:52506_","state":"recovering"}
   [junit4]   2> 2654972 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_    ] 
o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: recovering, 
checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 2654972 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_    ] 
o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:52506_&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1009
   [junit4]   2> 2659793 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [https://127.0.0.1:52478/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 2659793 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=https://127.0.0.1:52496 START 
replicas=[https://127.0.0.1:52478/collection1/] nUpdates=1000
   [junit4]   2> 2659805 INFO  (qtp339668053-15688) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:1.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 2659805 INFO  (qtp339668053-15688) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=1
   [junit4]   2> 2659807 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 2659807 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 2659807 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2659807 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 2659808 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2659808 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 2659808 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 2659808 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 2659808 INFO  
(recoveryExecutor-2975-thread-1-processing-n:127.0.0.1:52496_ x:collection1 
s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:52496_ c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 2661973 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Attempting to PeerSync 
from [https://127.0.0.1:52478/collection1/] - recoveringAfterStartup=[true]
   [junit4]   2> 2661973 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: core=collection1 
url=https://127.0.0.1:52506 START 
replicas=[https://127.0.0.1:52478/collection1/] nUpdates=1000
   [junit4]   2> 2661980 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 2661980 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 2661982 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.IndexFingerprint IndexFingerprint 
millis:1.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 2661982 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.PeerSync We are already in sync. No need to 
do a PeerSync 
   [junit4]   2> 2661982 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2661982 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted 
changes. Skipping IW.commit.
   [junit4]   2> 2661983 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2661983 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy PeerSync stage of recovery 
was successful.
   [junit4]   2> 2661983 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Replaying updates buffered 
during PeerSync.
   [junit4]   2> 2661983 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No replay needed.
   [junit4]   2> 2661983 INFO  
(recoveryExecutor-2982-thread-1-processing-n:127.0.0.1:52506_ x:collection1 
s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:52506_ c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy Registering as Active 
after recovery.
   [junit4]   2> 2662115 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 2662144 INFO  (qtp846221081-15648) [n:127.0.0.1:52473_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2662144 INFO  (qtp846221081-15648) [n:127.0.0.1:52473_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2662145 INFO  (qtp846221081-15648) [n:127.0.0.1:52473_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 2662145 INFO  (qtp846221081-15648) [n:127.0.0.1:52473_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 1
   [junit4]   2> 2662163 INFO  (qtp339668053-15687) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2662163 INFO  (qtp339668053-15687) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2662164 INFO  (qtp339668053-15687) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 2662164 INFO  (qtp339668053-15687) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:52478/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 1
   [junit4]   2> 2662185 INFO  (qtp463464142-15713) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2662185 INFO  (qtp463464142-15713) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2662185 INFO  (qtp878465336-15748) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 2662186 INFO  (qtp878465336-15748) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 2662186 INFO  (qtp463464142-15713) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 2662186 INFO  (qtp878465336-15748) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 2662186 INFO  (qtp878465336-15748) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:52478/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 1
   [junit4]   2> 2662186 INFO  (qtp463464142-15713) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:52478/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 1
   [junit4]   2> 2662186 INFO  (qtp339668053-15686) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 35
   [junit4]   2> 2662194 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 2662201 INFO  (qtp463464142-15716) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 2662210 INFO  (qtp878465336-15751) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp= path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 2664218 INFO  (qtp846221081-15649) [n:127.0.0.1:52473_ 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{deleteByQuery=*:* (-1557618378312515584)} 0 3
   [junit4]   2> 2664227 INFO  (qtp463464142-15709) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&_version_=-1557618378316709888&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1557618378316709888)} 0 2
   [junit4]   2> 2664227 INFO  (qtp878465336-15744) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&_version_=-1557618378316709888&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1557618378316709888)} 0 2
   [junit4]   2> 2664227 INFO  (qtp339668053-15684) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{deleteByQuery=*:* (-1557618378316709888)} 0 7
   [junit4]   2> 2664238 INFO  (qtp878465336-15746) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[0
 (1557618378330341376)]} 0 2
   [junit4]   2> 2664238 INFO  (qtp463464142-15711) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[0
 (1557618378330341376)]} 0 3
   [junit4]   2> 2664239 INFO  (qtp339668053-15688) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[0 (1557618378330341376)]} 0 6
   [junit4]   2> 2664241 INFO  (qtp463464142-15712) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[1
 (1557618378337681408)]} 0 0
   [junit4]   2> 2664241 INFO  (qtp878465336-15747) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[1
 (1557618378337681408)]} 0 0
   [junit4]   2> 2664242 INFO  (qtp339668053-15687) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[1 (1557618378337681408)]} 0 2
   [junit4]   2> 2664243 INFO  (qtp463464142-15713) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[2
 (1557618378339778560)]} 0 0
   [junit4]   2> 2664243 INFO  (qtp878465336-15748) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[2
 (1557618378339778560)]} 0 0
   [junit4]   2> 2664244 INFO  (qtp339668053-15686) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[2 (1557618378339778560)]} 0 1
   [junit4]   2> 2664253 INFO  (qtp878465336-15750) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[3
 (1557618378342924288)]} 0 0
   [junit4]   2> 2664253 INFO  (qtp463464142-15715) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[3
 (1557618378342924288)]} 0 2
   [junit4]   2> 2664254 INFO  (qtp339668053-15681) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[3 (1557618378342924288)]} 0 8
   [junit4]   2> 2664256 INFO  (qtp463464142-15714) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[4
 (1557618378353410048)]} 0 0
   [junit4]   2> 2664256 INFO  (qtp878465336-15750) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[4
 (1557618378353410048)]} 0 0
   [junit4]   2> 2664258 INFO  (qtp339668053-15683) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[4 (1557618378353410048)]} 0 3
   [junit4]   2> 2664260 INFO  (qtp463464142-15716) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[5
 (1557618378357604352)]} 0 0
   [junit4]   2> 2664262 INFO  (qtp878465336-15751) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[5
 (1557618378357604352)]} 0 0
   [junit4]   2> 2664263 INFO  (qtp339668053-15685) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[5 (1557618378357604352)]} 0 4
   [junit4]   2> 2664265 INFO  (qtp878465336-15751) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[6
 (1557618378362847232)]} 0 0
   [junit4]   2> 2664266 INFO  (qtp463464142-15716) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[6
 (1557618378362847232)]} 0 0
   [junit4]   2> 2664266 INFO  (qtp339668053-15684) [n:127.0.0.1:52478_ 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={wt=javabin&version=2}{add=[6 (1557618378362847232)]} 0 2
   [junit4]   2> 2664269 INFO  (qtp463464142-15711) [n:127.0.0.1:52496_ 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:52478/collection1/&wt=javabin&version=2}{add=[7
 (1557618378365992960)]} 0 0
   [junit4]   2> 2664270 INFO  (qtp878465336-15751) [n:127.0.0.1:52506_ 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp= pa

[...truncated too long message...]

2855786 INFO  (TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    
] o.a.s.c.ChaosMonkey monkey: stop shard! 52506
   [junit4]   2> 2855788 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[A0E5E2CB84D8E147]) [    ] 
o.a.s.c.ZkTestServer connecting to 127.0.0.1:52443 52443
   [junit4]   2> 2855805 INFO  (Thread-2880) [    ] o.a.s.c.ZkTestServer 
connecting to 127.0.0.1:52443 52443
   [junit4]   2> 2860650 WARN  (Thread-2880) [    ] o.a.s.c.ZkTestServer Watch 
limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/aliases.json
   [junit4]   2>        5       /solr/security.json
   [junit4]   2>        5       /solr/configs/conf1
   [junit4]   2>        4       /solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/clusterstate.json
   [junit4]   2>        6       /solr/clusterprops.json
   [junit4]   2>        2       
/solr/overseer_elect/election/97351147139235849-127.0.0.1:52478_-n_0000000001
   [junit4]   2>        2       
/solr/collections/collection1/leader_elect/shard1/election/97351147139235849-core_node1-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2>        209     /solr/overseer/collection-queue-work
   [junit4]   2>        46      /solr/overseer/queue
   [junit4]   2>        6       /solr/collections
   [junit4]   2>        6       /solr/overseer/queue-work
   [junit4]   2>        5       /solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  
-Dtestcase=PeerSyncReplicationTest -Dtests.method=test 
-Dtests.seed=A0E5E2CB84D8E147 -Dtests.slow=true -Dtests.locale=nn-NO 
-Dtests.timezone=Australia/Broken_Hill -Dtests.asserts=true 
-Dtests.file.encoding=US-ASCII
   [junit4] FAILURE  219s J1 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: timeout waiting to see 
all nodes active
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([A0E5E2CB84D8E147:28B1DD112A248CBF]:0)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    >        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 2860694 INFO  
(SUITE-PeerSyncReplicationTest-seed#[A0E5E2CB84D8E147]-worker) [    ] 
o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: 
/Users/jenkins/workspace/Lucene-Solr-master-MacOSX/solr/build/solr-core/test/J1/temp/solr.cloud.PeerSyncReplicationTest_A0E5E2CB84D8E147-001
   [junit4]   2> NOTE: test params are: codec=Asserting(Lucene70): 
{other_tl1=PostingsFormat(name=Memory), range_facet_l_dv=FSTOrd50, 
rnd_s=PostingsFormat(name=LuceneVarGapDocFreqInterval), 
multiDefault=PostingsFormat(name=LuceneVarGapDocFreqInterval), 
a_t=PostingsFormat(name=Memory), intDefault=PostingsFormat(name=Memory), 
a_i1=PostingsFormat(name=Memory), id=FSTOrd50, 
range_facet_i_dv=PostingsFormat(name=Memory), text=Lucene50(blocksize=128), 
range_facet_l=PostingsFormat(name=Memory), 
timestamp=PostingsFormat(name=Memory)}, 
docValues:{range_facet_l_dv=DocValuesFormat(name=Lucene70), 
_version_=DocValuesFormat(name=Memory), 
range_facet_i_dv=DocValuesFormat(name=Memory), 
intDvoDefault=DocValuesFormat(name=Direct), 
timestamp=DocValuesFormat(name=Memory)}, maxPointsInLeafNode=1706, 
maxMBSortInHeap=5.210213964200542, sim=RandomSimilarity(queryNorm=false): {}, 
locale=nn-NO, timezone=Australia/Broken_Hill
   [junit4]   2> NOTE: Mac OS X 10.11.6 x86_64/Oracle Corporation 1.8.0_121 
(64-bit)/cpus=3,threads=1,free=224744368,total=530579456
   [junit4]   2> NOTE: All tests run in this JVM: [TestHdfsUpdateLog, 
TemplateUpdateProcessorTest, TestOverriddenPrefixQueryForCustomFieldType, 
TestSolr4Spatial, ExternalFileFieldSortTest, ReturnFieldsTest, 
SuggesterWFSTTest, ResponseLogComponentTest, TestManagedResource, 
InfoHandlerTest, PreAnalyzedUpdateProcessorTest, 
TestOnReconnectListenerSupport, IgnoreCommitOptimizeUpdateProcessorFactoryTest, 
TestPHPSerializedResponseWriter, DateRangeFieldTest, BlockJoinFacetDistribTest, 
TestDistributedMissingSort, TestDocBasedVersionConstraints, BooleanFieldTest, 
TestMinMaxOnMultiValuedField, TestLuceneMatchVersion, TestBinaryResponseWriter, 
TestFoldingMultitermQuery, TestJsonFacetRefinement, 
TestSolrCloudWithKerberosAlt, HdfsUnloadDistributedZkTest, 
TestDistributedGrouping, MissingSegmentRecoveryTest, 
TestExclusionRuleCollectionAccess, TestOmitPositions, 
LeaderInitiatedRecoveryOnShardRestartTest, TestStressUserVersions, 
HighlighterConfigTest, SSLMigrationTest, StatsReloadRaceTest, 
TestSolrQueryParser, SortSpecParsingTest, MinimalSchemaTest, 
RequestHandlersTest, TestReversedWildcardFilterFactory, 
TestSortByMinMaxFunction, TestDelegationWithHadoopAuth, TestCrossCoreJoin, 
ConfigSetsAPITest, SolrJmxReporterTest, TestSuggestSpellingConverter, 
SolrCoreMetricManagerTest, TestRestManager, RegexBoostProcessorTest, 
LukeRequestHandlerTest, QueryResultKeyTest, TolerantUpdateProcessorTest, 
SignatureUpdateProcessorFactoryTest, TestEmbeddedSolrServerConstructors, 
OverseerTaskQueueTest, TestExportWriter, TestWriterPerf, TestFieldCacheSort, 
TestDefaultStatsCache, TestSizeLimitedDistributedMap, 
DistributedFacetPivotLargeTest, ShowFileRequestHandlerTest, 
CloneFieldUpdateProcessorFactoryTest, TestCodecSupport, CollectionReloadTest, 
TestCorePropertiesReload, TestHashQParserPlugin, TestCollectionAPI, 
TermVectorComponentDistributedTest, TestFiltering, TestGraphMLResponseWriter, 
TestRuleBasedAuthorizationPlugin, MergeStrategyTest, TestFilteredDocIdSet, 
TestFieldCacheVsDocValues, TestSimpleQParserPlugin, TestSegmentSorting, 
ClassificationUpdateProcessorIntegrationTest, TestRTimerTree, 
DebugComponentTest, BasicAuthStandaloneTest, AutoCommitTest, QueryParsingTest, 
TestReplicaProperties, AddBlockUpdateTest, TestConfigSetsAPIExclusivity, 
DistributedQueryComponentOptimizationTest, SolrXmlInZkTest, TestSearchPerf, 
JavabinLoaderTest, URLClassifyProcessorTest, RulesTest, 
ChaosMonkeySafeLeaderTest, TestSolrDynamicMBean, BlockDirectoryTest, 
TriLevelCompositeIdRoutingTest, TestReloadDeadlock, JsonLoaderTest, 
TestCustomSort, TestJsonRequest, TestPushWriter, TestInPlaceUpdatesStandalone, 
TestRestoreCore, TestPointFields, UUIDFieldTest, HdfsBasicDistributedZk2Test, 
TestExactSharedStatsCache, TestFuzzyAnalyzedSuggestions, 
SpellPossibilityIteratorTest, FullHLLTest, TestFieldCacheReopen, 
TestLegacyNumericRangeQueryBuilder, DistributedFacetPivotSmallAdvancedTest, 
NotRequiredUniqueKeyTest, TestCryptoKeys, HdfsRecoverLeaseTest, 
TestStressLiveNodes, HdfsThreadLeakTest, 
SuggestComponentContextFilterQueryTest, UpdateParamsTest, JSONWriterTest, 
ScriptEngineTest, DistribJoinFromCollectionTest, 
TestSimpleTrackingShardHandler, TestLMJelinekMercerSimilarityFactory, 
TestRandomCollapseQParserPlugin, HLLUtilTest, RecoveryAfterSoftCommitTest, 
TestAuthorizationFramework, DirectUpdateHandlerOptimizeTest, 
TestSchemaVersionResource, OpenCloseCoreStressTest, TestJmxMonitoredMap, 
ForceLeaderTest, TestCloudDeleteByQuery, CollectionTooManyReplicasTest, 
TestStressCloudBlindAtomicUpdates, DeleteNodeTest, IndexSchemaTest, 
TestSolrFieldCacheMBean, TestStressRecovery, FileBasedSpellCheckerTest, 
TestShardHandlerFactory, TestDistribDocBasedVersion, TestConfigSetProperties, 
SimpleMLTQParserTest, AliasIntegrationTest, StressHdfsTest, 
TestSubQueryTransformerCrossCore, FieldAnalysisRequestHandlerTest, 
TestAnalyzedSuggestions, TestSolrCoreProperties, DistribCursorPagingTest, 
SpellCheckCollatorTest, TestSolrDeletionPolicy2, BadIndexSchemaTest, 
TestHighFrequencyDictionaryFactory, TestTestInjection, ResourceLoaderTest, 
CircularListTest, CdcrReplicationHandlerTest, CdcrVersionReplicationTest, 
BigEndianAscendingWordDeserializerTest, TestSolrConfigHandlerConcurrent, 
BasicDistributedZk2Test, CollectionsAPIDistributedZkTest, 
LeaderElectionIntegrationTest, ClusterStateUpdateTest, TestRandomFaceting, 
ShardRoutingCustomTest, TestDistributedSearch, ZkControllerTest, 
TestStressReorder, TestJoin, TestStressVersions, HardAutoCommitTest, 
TestRangeQuery, SuggesterTSTTest, SuggesterTest, TestTrie, NoCacheHeaderTest, 
WordBreakSolrSpellCheckerTest, SolrCoreCheckLockOnStartupTest, 
FieldMutatingUpdateProcessorTest, TestAtomicUpdateErrorCases, 
DefaultValueUpdateProcessorTest, SortByFunctionTest, TestRemoteStreaming, 
DocValuesMultiTest, DistanceFunctionTest, SolrInfoMBeanTest, TestQueryUtils, 
DirectSolrSpellCheckerTest, PrimitiveFieldTypeTest, 
XmlUpdateRequestHandlerTest, PathHierarchyTokenizerFactoryTest, 
TestIndexingPerformance, FastVectorHighlighterTest, LoggingHandlerTest, 
TestCollationField, TestCSVResponseWriter, BinaryUpdateRequestHandlerTest, 
PingRequestHandlerTest, TestPhraseSuggestions, TestXIncludeConfig, 
TestSweetSpotSimilarityFactory, TestLMDirichletSimilarityFactory, 
TestFastWriter, TestFastLRUCache, PreAnalyzedFieldTest, DateFieldTest, 
ClusterStateTest, TestUtils, TestRTGBase, CursorPagingTest, 
CdcrRequestHandlerTest, CleanupOldIndexTest, CloudExitableDirectoryReaderTest, 
CollectionStateFormat2Test, CollectionsAPISolrJTest, DeleteInactiveReplicaTest, 
DistributedQueueTest, DocValuesNotIndexedTest, HttpPartitionTest, 
OverseerCollectionConfigSetProcessorTest, OverseerRolesTest, 
PeerSyncReplicationTest]
   [junit4] Completed [506/684 (1!)] on J1 in 219.49s, 1 test, 1 failure <<< 
FAILURES!

[...truncated 63041 lines...]

---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org
For additional commands, e-mail: dev-h...@lucene.apache.org

Reply via email to