Build: https://jenkins.thetaphi.de/job/Lucene-Solr-master-Linux/18920/
Java: 64bit/jdk1.8.0_121 -XX:-UseCompressedOops -XX:+UseParallelGC

1 tests failed.
FAILED:  org.apache.solr.cloud.PeerSyncReplicationTest.test

Error Message:
timeout waiting to see all nodes active

Stack Trace:
java.lang.AssertionError: timeout waiting to see all nodes active
        at 
__randomizedtesting.SeedInfo.seed([8C195BFE53CE2548:44D6424FD3248B0]:0)
        at org.junit.Assert.fail(Assert.java:93)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1713)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$8.evaluate(RandomizedRunner.java:907)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:943)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:957)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:49)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:48)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:817)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:468)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:916)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:802)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:852)
        at 
com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:863)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule$1.evaluate(SystemPropertiesRestoreRule.java:57)
        at 
org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:45)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:41)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:40)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:53)
        at 
org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:47)
        at 
org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:64)
        at 
org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:54)
        at 
com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
        at 
com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:368)
        at java.lang.Thread.run(Thread.java:745)




Build Log:
[...truncated 12407 lines...]
   [junit4] Suite: org.apache.solr.cloud.PeerSyncReplicationTest
   [junit4]   2> Creating dataDir: 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/init-core-data-001
   [junit4]   2> 1247993 INFO  
(SUITE-PeerSyncReplicationTest-seed#[8C195BFE53CE2548]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Using TrieFields
   [junit4]   2> 1247993 INFO  
(SUITE-PeerSyncReplicationTest-seed#[8C195BFE53CE2548]-worker) [    ] 
o.a.s.SolrTestCaseJ4 Randomized ssl (true) and clientAuth (false) via: 
@org.apache.solr.util.RandomizeSSL(reason=, ssl=NaN, value=NaN, clientAuth=NaN)
   [junit4]   2> 1247993 INFO  
(SUITE-PeerSyncReplicationTest-seed#[8C195BFE53CE2548]-worker) [    ] 
o.a.s.BaseDistributedSearchTestCase Setting hostContext system property: /fsx/gp
   [junit4]   2> 1247995 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ZkTestServer STARTING ZK TEST SERVER
   [junit4]   2> 1247995 INFO  (Thread-2095) [    ] o.a.s.c.ZkTestServer client 
port:0.0.0.0/0.0.0.0:0
   [junit4]   2> 1247995 INFO  (Thread-2095) [    ] o.a.s.c.ZkTestServer 
Starting server
   [junit4]   2> 1248095 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ZkTestServer start zk server on port:33809
   [junit4]   2> 1248100 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig-tlog.xml
 to /configs/conf1/solrconfig.xml
   [junit4]   2> 1248101 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/schema.xml
 to /configs/conf1/schema.xml
   [junit4]   2> 1248102 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/solrconfig.snippet.randomindexconfig.xml
 to /configs/conf1/solrconfig.snippet.randomindexconfig.xml
   [junit4]   2> 1248103 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/stopwords.txt
 to /configs/conf1/stopwords.txt
   [junit4]   2> 1248104 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/protwords.txt
 to /configs/conf1/protwords.txt
   [junit4]   2> 1248104 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/currency.xml
 to /configs/conf1/currency.xml
   [junit4]   2> 1248105 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml
 to /configs/conf1/enumsConfig.xml
   [junit4]   2> 1248105 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/open-exchange-rates.json
 to /configs/conf1/open-exchange-rates.json
   [junit4]   2> 1248106 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/mapping-ISOLatin1Accent.txt
 to /configs/conf1/mapping-ISOLatin1Accent.txt
   [junit4]   2> 1248106 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/old_synonyms.txt
 to /configs/conf1/old_synonyms.txt
   [junit4]   2> 1248107 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractZkTestCase put 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/core/src/test-files/solr/collection1/conf/synonyms.txt
 to /configs/conf1/synonyms.txt
   [junit4]   2> 1248214 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/control-001/cores/collection1
   [junit4]   2> 1248228 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1248228 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@3dbc09c5{/fsx/gp,null,AVAILABLE}
   [junit4]   2> 1248231 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@7f18c151{SSL,[ssl, 
http/1.1]}{127.0.0.1:33170}
   [junit4]   2> 1248231 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server Started @1250260ms
   [junit4]   2> 1248231 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/tempDir-001/control/data,
 hostContext=/fsx/gp, hostPort=33170, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/control-001/cores}
   [junit4]   2> 1248231 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1248231 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1248231 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1248231 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1248231 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-02-07T17:17:21.924Z
   [junit4]   2> 1248233 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1248233 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/control-001/solr.xml
   [junit4]   2> 1248239 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33809/solr
   [junit4]   2> 1248247 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.OverseerElectionContext I am going to 
be the leader 127.0.0.1:33170_fsx%2Fgp
   [junit4]   2> 1248248 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.Overseer Overseer 
(id=97418467192930309-127.0.0.1:33170_fsx%2Fgp-n_0000000000) starting
   [junit4]   2> 1248251 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:33170_fsx%2Fgp
   [junit4]   2> 1248252 INFO  
(zkCallback-3260-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (1)
   [junit4]   2> 1248382 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Found 1 core 
definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/control-001/cores
   [junit4]   2> 1248382 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Cores are: 
[collection1]
   [junit4]   2> 1248384 INFO  
(OverseerStateUpdate-97418467192930309-127.0.0.1:33170_fsx%2Fgp-n_0000000000) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.o.ReplicaMutator Assigning new node to 
shard shard=shard1
   [junit4]   2> 1249393 WARN  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.c.Config Beginning with Solr 5.5, <mergePolicy> is deprecated, use 
<mergePolicyFactory> instead.
   [junit4]   2> 1249394 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.c.SolrConfig Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1249410 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.s.IndexSchema [collection1] Schema name=test
   [junit4]   2> 1249509 WARN  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.s.IndexSchema [collection1] default search field in schema is text. 
WARNING: Deprecated, please use 'df' on request instead.
   [junit4]   2> 1249511 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.s.IndexSchema Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1249517 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection control_collection
   [junit4]   2> 1249517 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.c.SolrCore [[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/control-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/control-001/cores/collection1/data/]
   [junit4]   2> 1249517 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@1e0f8973
   [junit4]   2> 1249519 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=30, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1252673 WARN  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 1252706 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.UpdateLog
   [junit4]   2> 1252706 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.u.UpdateLog Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH 
numRecordsToKeep=1000 maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1252707 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1252707 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1252707 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: 
minMergeSize=1000, mergeFactor=50, maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.5283481440429358]
   [junit4]   2> 1252708 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@413be660[collection1] main]
   [junit4]   2> 1252709 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1252709 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1252709 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1252710 INFO  
(searcherExecutor-5163-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection   x:collection1] o.a.s.c.SolrCore [collection1] Registered 
new searcher Searcher@413be660[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1252711 INFO  
(coreLoadExecutor-5162-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection   x:collection1] 
o.a.s.u.UpdateLog Could not find max version in index or recent updates, using 
new clock 1558695480030920704
   [junit4]   2> 1252715 INFO  
(coreZkRegister-5155-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.c.ShardLeaderElectionContext Enough replicas found to continue.
   [junit4]   2> 1252715 INFO  
(coreZkRegister-5155-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.c.ShardLeaderElectionContext I may be the new leader - try and sync
   [junit4]   2> 1252715 INFO  
(coreZkRegister-5155-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
Sync replicas to https://127.0.0.1:33170/fsx/gp/collection1/
   [junit4]   2> 1252715 INFO  
(coreZkRegister-5155-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
Sync Success - now sync replicas to me
   [junit4]   2> 1252715 INFO  
(coreZkRegister-5155-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
https://127.0.0.1:33170/fsx/gp/collection1/ has no replicas
   [junit4]   2> 1252716 INFO  
(coreZkRegister-5155-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection s:shard1 r:core_node1 x:collection1] 
o.a.s.c.ShardLeaderElectionContext I am the new leader: 
https://127.0.0.1:33170/fsx/gp/collection1/ shard1
   [junit4]   2> 1252808 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (0) -> (1)
   [junit4]   2> 1252809 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.s.i.ZkClientClusterStateProvider Cluster at 127.0.0.1:33809/solr ready
   [junit4]   2> 1252809 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ChaosMonkey monkey: init - expire sessions:false cause connection 
loss:false
   [junit4]   2> 1252867 INFO  
(coreZkRegister-5155-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp 
x:collection1 c:control_collection) [n:127.0.0.1:33170_fsx%2Fgp 
c:control_collection s:shard1 r:core_node1 x:collection1] o.a.s.c.ZkController 
I am the leader, no recovery necessary
   [junit4]   2> 1252873 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-1-001/cores/collection1
   [junit4]   2> 1252873 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 1 in directory 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-1-001
   [junit4]   2> 1252875 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1252876 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@1c6198d1{/fsx/gp,null,AVAILABLE}
   [junit4]   2> 1252876 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@38aeec77{SSL,[ssl, 
http/1.1]}{127.0.0.1:41176}
   [junit4]   2> 1252876 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server Started @1254906ms
   [junit4]   2> 1252876 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/tempDir-001/jetty1,
 solrconfig=solrconfig.xml, hostContext=/fsx/gp, hostPort=41176, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-1-001/cores}
   [junit4]   2> 1252877 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1252877 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1252877 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1252877 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1252877 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-02-07T17:17:26.570Z
   [junit4]   2> 1252880 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1252880 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-1-001/solr.xml
   [junit4]   2> 1252898 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33809/solr
   [junit4]   2> 1252902 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:41176_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (1)
   [junit4]   2> 1252904 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:41176_fsx%2Fgp    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:41176_fsx%2Fgp
   [junit4]   2> 1252905 INFO  (zkCallback-3264-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (1) -> (2)
   [junit4]   2> 1252905 INFO  
(zkCallback-3260-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (1) -> (2)
   [junit4]   2> 1252906 INFO  
(zkCallback-3269-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (1) -> (2)
   [junit4]   2> 1252953 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:41176_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Found 1 core 
definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-1-001/cores
   [junit4]   2> 1252953 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:41176_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Cores are: 
[collection1]
   [junit4]   2> 1252955 INFO  
(OverseerStateUpdate-97418467192930309-127.0.0.1:33170_fsx%2Fgp-n_0000000000) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.o.ReplicaMutator Assigning new node to 
shard shard=shard1
   [junit4]   2> 1253962 WARN  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.Config 
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> 
instead.
   [junit4]   2> 1253963 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1253975 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 1254066 WARN  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 1254068 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1254074 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection collection1
   [junit4]   2> 1254074 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-1-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-1-001/cores/collection1/data/]
   [junit4]   2> 1254074 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@1e0f8973
   [junit4]   2> 1254075 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=30, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1254106 WARN  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 1254132 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.UpdateLog
   [junit4]   2> 1254132 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1254132 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1254132 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1254133 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: 
minMergeSize=1000, mergeFactor=50, maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.5283481440429358]
   [junit4]   2> 1254133 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@581ec6c9[collection1] main]
   [junit4]   2> 1254134 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1254134 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1254134 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1254135 INFO  
(searcherExecutor-5174-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1   
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@581ec6c9[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1254135 INFO  
(coreLoadExecutor-5173-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1558695481524092928
   [junit4]   2> 1254140 INFO  
(coreZkRegister-5168-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1 s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext Enough replicas 
found to continue.
   [junit4]   2> 1254140 INFO  
(coreZkRegister-5168-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1 s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I may be the new 
leader - try and sync
   [junit4]   2> 1254140 INFO  
(coreZkRegister-5168-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1 s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync replicas to 
https://127.0.0.1:41176/fsx/gp/collection1/
   [junit4]   2> 1254140 INFO  
(coreZkRegister-5168-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1 s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy Sync Success - now sync 
replicas to me
   [junit4]   2> 1254140 INFO  
(coreZkRegister-5168-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1 s:shard1 
r:core_node1 x:collection1] o.a.s.c.SyncStrategy 
https://127.0.0.1:41176/fsx/gp/collection1/ has no replicas
   [junit4]   2> 1254141 INFO  
(coreZkRegister-5168-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1 s:shard1 
r:core_node1 x:collection1] o.a.s.c.ShardLeaderElectionContext I am the new 
leader: https://127.0.0.1:41176/fsx/gp/collection1/ shard1
   [junit4]   2> 1254292 INFO  
(coreZkRegister-5168-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:41176_fsx%2Fgp c:collection1 s:shard1 
r:core_node1 x:collection1] o.a.s.c.ZkController I am the leader, no recovery 
necessary
   [junit4]   2> 1254434 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-2-001/cores/collection1
   [junit4]   2> 1254434 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 2 in directory 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-2-001
   [junit4]   2> 1254436 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1254436 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@7617665e{/fsx/gp,null,AVAILABLE}
   [junit4]   2> 1254437 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@52429fc6{SSL,[ssl, 
http/1.1]}{127.0.0.1:42058}
   [junit4]   2> 1254437 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server Started @1256466ms
   [junit4]   2> 1254437 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/tempDir-001/jetty2,
 solrconfig=solrconfig.xml, hostContext=/fsx/gp, hostPort=42058, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-2-001/cores}
   [junit4]   2> 1254437 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1254437 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1254437 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1254438 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1254438 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-02-07T17:17:28.131Z
   [junit4]   2> 1254440 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1254440 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-2-001/solr.xml
   [junit4]   2> 1254445 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33809/solr
   [junit4]   2> 1254449 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (2)
   [junit4]   2> 1254452 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:42058_fsx%2Fgp
   [junit4]   2> 1254453 INFO  
(zkCallback-3275-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (2) -> (3)
   [junit4]   2> 1254453 INFO  
(zkCallback-3260-thread-1-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (2) -> (3)
   [junit4]   2> 1254453 INFO  (zkCallback-3264-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (2) -> (3)
   [junit4]   2> 1254453 INFO  
(zkCallback-3269-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (2) -> (3)
   [junit4]   2> 1254526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Found 1 core 
definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-2-001/cores
   [junit4]   2> 1254526 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Cores are: 
[collection1]
   [junit4]   2> 1254527 INFO  
(OverseerStateUpdate-97418467192930309-127.0.0.1:33170_fsx%2Fgp-n_0000000000) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.o.ReplicaMutator Assigning new node to 
shard shard=shard1
   [junit4]   2> 1255539 WARN  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.Config 
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> 
instead.
   [junit4]   2> 1255541 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1255561 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 1255701 WARN  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 1255703 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1255711 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection collection1
   [junit4]   2> 1255712 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-2-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-2-001/cores/collection1/data/]
   [junit4]   2> 1255712 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@1e0f8973
   [junit4]   2> 1255714 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=30, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1255766 WARN  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 1255803 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.UpdateLog
   [junit4]   2> 1255803 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1255804 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1255804 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1255804 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: 
minMergeSize=1000, mergeFactor=50, maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.5283481440429358]
   [junit4]   2> 1255805 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@3ffde539[collection1] main]
   [junit4]   2> 1255806 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1255807 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1255807 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1255809 INFO  
(searcherExecutor-5185-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:42058_fsx%2Fgp c:collection1   
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@3ffde539[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1255809 INFO  
(coreLoadExecutor-5184-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1558695483279409152
   [junit4]   2> 1255813 INFO  
(coreZkRegister-5179-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:42058_fsx%2Fgp c:collection1 s:shard1 
r:core_node2 x:collection1] o.a.s.c.ZkController Core needs to 
recover:collection1
   [junit4]   2> 1255814 INFO  
(updateExecutor-3272-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DefaultSolrCoreState 
Running recovery
   [junit4]   2> 1255814 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 1255814 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
###### startupVersions=[[]]
   [junit4]   2> 1255814 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Begin buffering updates. core=[collection1]
   [junit4]   2> 1255814 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.UpdateLog Starting 
to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 1255814 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Publishing state of core [collection1] as recovering, leader is 
[https://127.0.0.1:41176/fsx/gp/collection1/] and I am 
[https://127.0.0.1:42058/fsx/gp/collection1/]
   [junit4]   2> 1255816 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Sending prep recovery command to [https://127.0.0.1:41176/fsx/gp]; 
[WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:42058_fsx%252Fgp&coreNodeName=core_node2&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 1255842 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node2, 
state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 1255844 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 1255844 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): 
collection=collection1, shard=shard1, thisCore=collection1, 
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, 
currentState=down, localState=active, nodeName=127.0.0.1:42058_fsx%2Fgp, 
coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:42058/fsx/gp","node_name":"127.0.0.1:42058_fsx%2Fgp","state":"down"}
   [junit4]   2> 1256089 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.SolrTestCaseJ4 Writing core.properties file to 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-3-001/cores/collection1
   [junit4]   2> 1256090 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase create jetty 3 in directory 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-3-001
   [junit4]   2> 1256092 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server jetty-9.3.14.v20161028
   [junit4]   2> 1256093 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.h.ContextHandler Started 
o.e.j.s.ServletContextHandler@4ac1270a{/fsx/gp,null,AVAILABLE}
   [junit4]   2> 1256094 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.AbstractConnector Started ServerConnector@2d2baf1a{SSL,[ssl, 
http/1.1]}{127.0.0.1:39028}
   [junit4]   2> 1256094 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.Server Started @1258124ms
   [junit4]   2> 1256096 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.s.e.JettySolrRunner Jetty properties: 
{solr.data.dir=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/tempDir-001/jetty3,
 solrconfig=solrconfig.xml, hostContext=/fsx/gp, hostPort=39028, 
coreRootDirectory=/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-3-001/cores}
   [junit4]   2> 1256096 ERROR 
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.StartupLoggingUtils Missing Java Option solr.log.dir. Logging may be 
missing or incomplete.
   [junit4]   2> 1256097 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter  ___      _       Welcome to Apache Solr? version 
7.0.0
   [junit4]   2> 1256097 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter / __| ___| |_ _   Starting in cloud mode on port null
   [junit4]   2> 1256097 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter \__ \/ _ \ | '_|  Install dir: null
   [junit4]   2> 1256097 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter |___/\___/_|_|    Start time: 
2017-02-07T17:17:29.790Z
   [junit4]   2> 1256101 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.s.SolrDispatchFilter Loading solr.xml from SolrHome (not found in 
ZooKeeper)
   [junit4]   2> 1256101 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.SolrXmlConfig Loading container configuration from 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-3-001/solr.xml
   [junit4]   2> 1256127 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ZkContainer Zookeeper client=127.0.0.1:33809/solr
   [junit4]   2> 1256136 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:39028_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (0) -> (3)
   [junit4]   2> 1256141 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:39028_fsx%2Fgp    ] o.a.s.c.ZkController Register node as live in 
ZooKeeper:/live_nodes/127.0.0.1:39028_fsx%2Fgp
   [junit4]   2> 1256142 INFO  (zkCallback-3264-thread-1) [    ] 
o.a.s.c.c.ZkStateReader Updated live nodes from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256142 INFO  
(zkCallback-3275-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256142 INFO  
(zkCallback-3269-thread-1-processing-n:127.0.0.1:41176_fsx%2Fgp) 
[n:127.0.0.1:41176_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256142 INFO  
(zkCallback-3260-thread-3-processing-n:127.0.0.1:33170_fsx%2Fgp) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256144 INFO  
(zkCallback-3282-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader Updated live nodes 
from ZooKeeper... (3) -> (4)
   [junit4]   2> 1256327 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:39028_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Found 1 core 
definitions underneath 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-3-001/cores
   [junit4]   2> 1256327 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) 
[n:127.0.0.1:39028_fsx%2Fgp    ] o.a.s.c.CorePropertiesLocator Cores are: 
[collection1]
   [junit4]   2> 1256329 INFO  
(OverseerStateUpdate-97418467192930309-127.0.0.1:33170_fsx%2Fgp-n_0000000000) 
[n:127.0.0.1:33170_fsx%2Fgp    ] o.a.s.c.o.ReplicaMutator Assigning new node to 
shard shard=shard1
   [junit4]   2> 1256844 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): 
collection=collection1, shard=shard1, thisCore=collection1, 
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, 
currentState=recovering, localState=active, nodeName=127.0.0.1:42058_fsx%2Fgp, 
coreNodeName=core_node2, onlyIfActiveCheckResult=false, nodeProps: 
core_node2:{"core":"collection1","base_url":"https://127.0.0.1:42058/fsx/gp","node_name":"127.0.0.1:42058_fsx%2Fgp","state":"recovering"}
   [junit4]   2> 1256844 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node2, state: 
recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 1256844 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:42058_fsx%252Fgp&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node2&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1001
   [junit4]   2> 1257336 WARN  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.Config 
Beginning with Solr 5.5, <mergePolicy> is deprecated, use <mergePolicyFactory> 
instead.
   [junit4]   2> 1257337 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.SolrConfig 
Using Lucene MatchVersion: 7.0.0
   [junit4]   2> 1257349 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] Schema name=test
   [junit4]   2> 1257459 WARN  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
[collection1] default search field in schema is text. WARNING: Deprecated, 
please use 'df' on request instead.
   [junit4]   2> 1257461 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.s.IndexSchema 
Loaded schema test/1.0 with uniqueid field id
   [junit4]   2> 1257468 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.CoreContainer Creating SolrCore 'collection1' using configuration from 
collection collection1
   [junit4]   2> 1257469 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.c.SolrCore 
[[collection1] ] Opening new SolrCore at 
[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-3-001/cores/collection1],
 
dataDir=[/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/../../../../../../../../../home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001/shard-3-001/cores/collection1/data/]
   [junit4]   2> 1257469 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.JmxMonitoredMap JMX monitoring is enabled. Adding Solr mbeans to JMX 
Server: com.sun.jmx.mbeanserver.JmxMBeanServer@1e0f8973
   [junit4]   2> 1257471 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogByteSizeMergePolicy: [LogByteSizeMergePolicy: 
minMergeSize=1677721, mergeFactor=30, maxMergeSize=2147483648, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=1.0]
   [junit4]   2> 1257509 WARN  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.c.RequestHandlers INVALID paramSet a in requestHandler {type = 
requestHandler,name = /dump,class = DumpRequestHandler,attributes = 
{initParams=a, name=/dump, class=DumpRequestHandler},args = 
{defaults={a=A,b=B}}}
   [junit4]   2> 1257536 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.UpdateHandler Using UpdateLog implementation: 
org.apache.solr.update.UpdateLog
   [junit4]   2> 1257536 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Initializing UpdateLog: dataDir= defaultSyncLevel=FLUSH numRecordsToKeep=1000 
maxNumLogsToKeep=10 numVersionBuckets=65536
   [junit4]   2> 1257536 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Hard AutoCommit: disabled
   [junit4]   2> 1257537 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.CommitTracker Soft AutoCommit: disabled
   [junit4]   2> 1257537 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.u.RandomMergePolicy RandomMergePolicy wrapping class 
org.apache.lucene.index.LogDocMergePolicy: [LogDocMergePolicy: 
minMergeSize=1000, mergeFactor=50, maxMergeSize=9223372036854775807, 
maxMergeSizeForForcedMerge=9223372036854775807, calibrateSizeByDeletes=true, 
maxMergeDocs=2147483647, maxCFSSegmentSizeMB=8.796093022207999E12, 
noCFSRatio=0.5283481440429358]
   [junit4]   2> 1257537 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.s.SolrIndexSearcher Opening [Searcher@428a7647[collection1] main]
   [junit4]   2> 1257538 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Configured ZooKeeperStorageIO with znodeBase: 
/configs/conf1
   [junit4]   2> 1257538 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.r.ManagedResourceStorage Loaded null at path _rest_managed.json using 
ZooKeeperStorageIO:path=/configs/conf1
   [junit4]   2> 1257538 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] 
o.a.s.h.ReplicationHandler Commits will be reserved for  10000
   [junit4]   2> 1257539 INFO  
(searcherExecutor-5196-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:39028_fsx%2Fgp c:collection1   
x:collection1] o.a.s.c.SolrCore [collection1] Registered new searcher 
Searcher@428a7647[collection1] 
main{ExitableDirectoryReader(UninvertingDirectoryReader())}
   [junit4]   2> 1257540 INFO  
(coreLoadExecutor-5195-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp) 
[n:127.0.0.1:39028_fsx%2Fgp c:collection1   x:collection1] o.a.s.u.UpdateLog 
Could not find max version in index or recent updates, using new clock 
1558695485094494208
   [junit4]   2> 1257543 INFO  
(coreZkRegister-5190-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 c:collection1) [n:127.0.0.1:39028_fsx%2Fgp c:collection1 s:shard1 
r:core_node3 x:collection1] o.a.s.c.ZkController Core needs to 
recover:collection1
   [junit4]   2> 1257543 INFO  
(updateExecutor-3279-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DefaultSolrCoreState 
Running recovery
   [junit4]   2> 1257543 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
Starting recovery process. recoveringAfterStartup=true
   [junit4]   2> 1257543 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
###### startupVersions=[[]]
   [junit4]   2> 1257543 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
Begin buffering updates. core=[collection1]
   [junit4]   2> 1257543 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.UpdateLog Starting 
to buffer updates. FSUpdateLog{state=ACTIVE, tlog=null}
   [junit4]   2> 1257543 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
Publishing state of core [collection1] as recovering, leader is 
[https://127.0.0.1:41176/fsx/gp/collection1/] and I am 
[https://127.0.0.1:39028/fsx/gp/collection1/]
   [junit4]   2> 1257545 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
Sending prep recovery command to [https://127.0.0.1:41176/fsx/gp]; 
[WaitForState: 
action=PREPRECOVERY&core=collection1&nodeName=127.0.0.1:39028_fsx%252Fgp&coreNodeName=core_node3&state=recovering&checkLive=true&onlyIfLeader=true&onlyIfLeaderActive=true]
   [junit4]   2> 1257549 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp Going to wait for coreNodeName: core_node3, 
state: recovering, checkLive: true, onlyIfLeader: true, onlyIfLeaderActive: true
   [junit4]   2> 1257549 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp Will wait a max of 183 seconds to see collection1 
(shard1 of collection1) have state: recovering
   [junit4]   2> 1257549 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): 
collection=collection1, shard=shard1, thisCore=collection1, 
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, 
currentState=down, localState=active, nodeName=127.0.0.1:39028_fsx%2Fgp, 
coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:39028/fsx/gp","node_name":"127.0.0.1:39028_fsx%2Fgp","state":"down"}
   [junit4]   2> 1257752 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.SolrTestCaseJ4 ###Starting test
   [junit4]   2> 1257752 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractFullDistribZkTestBase Wait for recoveries to finish - wait 30 
for each attempt
   [junit4]   2> 1257752 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Wait for recoveries to finish - collection: 
collection1 failOnTimeout:true timeout (sec):30
   [junit4]   2> 1258549 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp In WaitForState(recovering): 
collection=collection1, shard=shard1, thisCore=collection1, 
leaderDoesNotNeedRecovery=false, isLeader? true, live=true, checkLive=true, 
currentState=recovering, localState=active, nodeName=127.0.0.1:39028_fsx%2Fgp, 
coreNodeName=core_node3, onlyIfActiveCheckResult=false, nodeProps: 
core_node3:{"core":"collection1","base_url":"https://127.0.0.1:39028/fsx/gp","node_name":"127.0.0.1:39028_fsx%2Fgp","state":"recovering"}
   [junit4]   2> 1258549 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.h.a.PrepRecoveryOp Waited coreNodeName: core_node3, state: 
recovering, checkLive: true, onlyIfLeader: true for: 1 seconds.
   [junit4]   2> 1258549 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
   ] o.a.s.s.HttpSolrCall [admin] webapp=null path=/admin/cores 
params={nodeName=127.0.0.1:39028_fsx%252Fgp&onlyIfLeaderActive=true&core=collection1&coreNodeName=core_node3&action=PREPRECOVERY&checkLive=true&state=recovering&onlyIfLeader=true&wt=javabin&version=2}
 status=0 QTime=1000
   [junit4]   2> 1263844 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Attempting to PeerSync from [https://127.0.0.1:41176/fsx/gp/collection1/] - 
recoveringAfterStartup=[true]
   [junit4]   2> 1263845 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync PeerSync: 
core=collection1 url=https://127.0.0.1:42058/fsx/gp START 
replicas=[https://127.0.0.1:41176/fsx/gp/collection1/] nUpdates=1000
   [junit4]   2> 1263847 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1263848 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp=/fsx/gp path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.PeerSync We are 
already in sync. No need to do a PeerSync 
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
PeerSync stage of recovery was successful.
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Replaying updates buffered during PeerSync.
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy No 
replay needed.
   [junit4]   2> 1263848 INFO  
(recoveryExecutor-3273-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Registering as Active after recovery.
   [junit4]   2> 1265552 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
Attempting to PeerSync from [https://127.0.0.1:41176/fsx/gp/collection1/] - 
recoveringAfterStartup=[true]
   [junit4]   2> 1265552 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync PeerSync: 
core=collection1 url=https://127.0.0.1:39028/fsx/gp START 
replicas=[https://127.0.0.1:41176/fsx/gp/collection1/] nUpdates=1000
   [junit4]   2> 1265554 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1265554 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp=/fsx/gp path=/get 
params={distrib=false&qt=/get&getFingerprint=9223372036854775807&wt=javabin&version=2}
 status=0 QTime=0
   [junit4]   2> 1265555 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.IndexFingerprint 
IndexFingerprint millis:0.0 result:{maxVersionSpecified=9223372036854775807, 
maxVersionEncountered=0, maxInHash=0, versionsHash=0, numVersions=0, numDocs=0, 
maxDoc=0}
   [junit4]   2> 1265555 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.PeerSync We are 
already in sync. No need to do a PeerSync 
   [junit4]   2> 1265555 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1265555 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1265556 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1265556 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
PeerSync stage of recovery was successful.
   [junit4]   2> 1265556 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
Replaying updates buffered during PeerSync.
   [junit4]   2> 1265556 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy No 
replay needed.
   [junit4]   2> 1265556 INFO  
(recoveryExecutor-3280-thread-1-processing-n:127.0.0.1:39028_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node3) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.RecoveryStrategy 
Registering as Active after recovery.
   [junit4]   2> 1265753 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.AbstractDistribZkTestBase Recoveries finished - collection: collection1
   [junit4]   2> 1265773 INFO  (qtp1701339311-15769) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection s:shard1 r:core_node1 
x:collection1] o.a.s.u.DirectUpdateHandler2 start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1265773 INFO  (qtp1701339311-15769) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection s:shard1 r:core_node1 
x:collection1] o.a.s.u.DirectUpdateHandler2 No uncommitted changes. Skipping 
IW.commit.
   [junit4]   2> 1265773 INFO  (qtp1701339311-15769) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection s:shard1 r:core_node1 
x:collection1] o.a.s.u.DirectUpdateHandler2 end_commit_flush
   [junit4]   2> 1265773 INFO  (qtp1701339311-15769) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection s:shard1 r:core_node1 
x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  
webapp=/fsx/gp path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 0
   [junit4]   2> 1265778 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1265779 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1265780 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1265780 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 1
   [junit4]   2> 1265798 INFO  (qtp720271657-15874) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1265798 INFO  (qtp720271657-15874) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1265798 INFO  (qtp720271657-15874) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1265798 INFO  (qtp720271657-15874) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 0
   [junit4]   2> 1265800 INFO  (qtp688148828-15841) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
start 
commit{,optimize=false,openSearcher=true,waitSearcher=true,expungeDeletes=false,softCommit=false,prepareCommit=false}
   [junit4]   2> 1265800 INFO  (qtp688148828-15841) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
No uncommitted changes. Skipping IW.commit.
   [junit4]   2> 1265801 INFO  (qtp688148828-15841) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.u.DirectUpdateHandler2 
end_commit_flush
   [junit4]   2> 1265801 INFO  (qtp688148828-15841) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&waitSearcher=true&openSearcher=true&commit=true&softCommit=false&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&commit_end_point=true&wt=javabin&version=2&expungeDeletes=false}{commit=}
 0 0
   [junit4]   2> 1265801 INFO  (qtp166156530-15806) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={waitSearcher=true&commit=true&softCommit=false&wt=javabin&version=2}{commit=}
 0 25
   [junit4]   2> 1265805 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp=/fsx/gp path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 1265807 INFO  (qtp688148828-15841) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp=/fsx/gp path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 1265809 INFO  (qtp720271657-15869) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] o.a.s.c.S.Request 
[collection1]  webapp=/fsx/gp path=/select 
params={q=*:*&distrib=false&tests=checkShardConsistency&rows=0&wt=javabin&version=2}
 hits=0 status=0 QTime=0
   [junit4]   2> 1267813 INFO  (qtp1701339311-15774) 
[n:127.0.0.1:33170_fsx%2Fgp c:control_collection s:shard1 r:core_node1 
x:collection1] o.a.s.u.p.LogUpdateProcessorFactory [collection1]  
webapp=/fsx/gp path=/update params={wt=javabin&version=2}{deleteByQuery=*:* 
(-1558695495864418304)} 0 2
   [junit4]   2> 1267824 INFO  (qtp720271657-15875) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&_version_=-1558695495872806912&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1558695495872806912)} 0 2
   [junit4]   2> 1267826 INFO  (qtp688148828-15834) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&_version_=-1558695495872806912&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{deleteByQuery=*:*
 (-1558695495872806912)} 0 2
   [junit4]   2> 1267826 INFO  (qtp166156530-15812) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={wt=javabin&version=2}{deleteByQuery=*:* (-1558695495872806912)} 0 6
   [junit4]   2> 1267832 INFO  (qtp720271657-15870) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{add=[0
 (1558695495882244096)]} 0 1
   [junit4]   2> 1267832 INFO  (qtp688148828-15839) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{add=[0
 (1558695495882244096)]} 0 1
   [junit4]   2> 1267832 INFO  (qtp166156530-15805) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={wt=javabin&version=2}{add=[0 (1558695495882244096)]} 0 4
   [junit4]   2> 1267834 INFO  (qtp688148828-15840) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{add=[1
 (1558695495887486976)]} 0 0
   [junit4]   2> 1267834 INFO  (qtp720271657-15876) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{add=[1
 (1558695495887486976)]} 0 0
   [junit4]   2> 1267835 INFO  (qtp166156530-15810) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={wt=javabin&version=2}{add=[1 (1558695495887486976)]} 0 1
   [junit4]   2> 1267836 INFO  (qtp720271657-15869) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:shard1 r:core_node3 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{add=[2
 (1558695495889584128)]} 0 0
   [junit4]   2> 1267836 INFO  (qtp688148828-15840) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{add=[2
 (1558695495889584128)]} 0 0
   [junit4]   2> 1267836 INFO  (qtp166156530-15811) [n:127.0.0.1:41176_fsx%2Fgp 
c:collection1 s:shard1 r:core_node1 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={wt=javabin&version=2}{add=[2 (1558695495889584128)]} 0 1
   [junit4]   2> 1267837 INFO  (qtp688148828-15834) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.u.p.LogUpdateProcessorFactory [collection1]  webapp=/fsx/gp path=/update 
params={update.distrib=FROMLEADER&distrib.from=https://127.0.0.1:41176/fsx/gp/collection1/&wt=javabin&version=2}{add=[3
 (1558695495891681280)]} 0 0
   [junit4]   2> 1267838 INFO  (qtp720271657-15875) [n:127.0.0.1:39028_fsx%2Fgp 
c:collection1 s:s

[...truncated too long message...]

.1:42058_fsx%2Fgp x:collection1 s:shard1 c:collection1 r:core_node2) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.c.RecoveryStrategy RecoveryStrategy has been closed
   [junit4]   2> 1456474 INFO  
(recoveryExecutor-3287-thread-1-processing-n:127.0.0.1:42058_fsx%2Fgp 
x:collection1 s:shard1 c:collection1 r:core_node2) [n:127.0.0.1:42058_fsx%2Fgp 
c:collection1 s:shard1 r:core_node2 x:collection1] o.a.s.c.RecoveryStrategy 
Finished recovery process, successful=[false]
   [junit4]   2> 1456474 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.Overseer Overseer 
(id=97418467192930322-127.0.0.1:42058_fsx%2Fgp-n_0000000004) closing
   [junit4]   2> 1456474 INFO  
(OverseerStateUpdate-97418467192930322-127.0.0.1:42058_fsx%2Fgp-n_0000000004) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.Overseer Overseer Loop exiting : 
127.0.0.1:42058_fsx%2Fgp
   [junit4]   2> 1456482 INFO  
(zkCallback-3289-thread-4-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.c.SolrCore [collection1]  CLOSING SolrCore 
org.apache.solr.core.SolrCore@1750957c
   [junit4]   2> 1456482 WARN  
(zkCallback-3289-thread-2-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp    ] o.a.s.c.c.ZkStateReader ZooKeeper watch 
triggered, but Solr cannot talk to ZK: [KeeperErrorCode = Session expired for 
/live_nodes]
   [junit4]   2> 1456483 WARN  
(zkCallback-3289-thread-4-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.c.RecoveryStrategy Stopping recovery for core=[collection1] 
coreNodeName=[core_node2]
   [junit4]   2> 1456509 INFO  
(zkCallback-3289-thread-4-processing-n:127.0.0.1:42058_fsx%2Fgp) 
[n:127.0.0.1:42058_fsx%2Fgp c:collection1 s:shard1 r:core_node2 x:collection1] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.core.collection1
   [junit4]   2> 1456509 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.m.SolrMetricManager Closing metric reporters for: solr.node
   [junit4]   2> 1456510 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.AbstractConnector Stopped ServerConnector@4db327f7{SSL,[ssl, 
http/1.1]}{127.0.0.1:42058}
   [junit4]   2> 1456510 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.e.j.s.h.ContextHandler Stopped 
o.e.j.s.ServletContextHandler@732fa284{/fsx/gp,null,UNAVAILABLE}
   [junit4]   2> 1456510 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ChaosMonkey monkey: stop shard! 39028
   [junit4]   2> 1456511 INFO  
(TEST-PeerSyncReplicationTest.test-seed#[8C195BFE53CE2548]) [    ] 
o.a.s.c.ZkTestServer connecting to 127.0.0.1:33809 33809
   [junit4]   2> 1456585 INFO  (Thread-2095) [    ] o.a.s.c.ZkTestServer 
connecting to 127.0.0.1:33809 33809
   [junit4]   2> 1456586 WARN  (Thread-2095) [    ] o.a.s.c.ZkTestServer Watch 
limit violations: 
   [junit4]   2> Maximum concurrent create/delete watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/aliases.json
   [junit4]   2>        5       /solr/security.json
   [junit4]   2>        5       /solr/configs/conf1
   [junit4]   2>        4       /solr/collections/collection1/state.json
   [junit4]   2> 
   [junit4]   2> Maximum concurrent data watches above limit:
   [junit4]   2> 
   [junit4]   2>        6       /solr/clusterstate.json
   [junit4]   2>        6       /solr/clusterprops.json
   [junit4]   2>        2       
/solr/overseer_elect/election/97418467192930313-127.0.0.1:41176_fsx%2Fgp-n_0000000001
   [junit4]   2>        2       
/solr/collections/collection1/leader_elect/shard1/election/97418467192930313-core_node1-n_0000000000
   [junit4]   2> 
   [junit4]   2> Maximum concurrent children watches above limit:
   [junit4]   2> 
   [junit4]   2>        209     /solr/overseer/collection-queue-work
   [junit4]   2>        25      /solr/overseer/queue
   [junit4]   2>        6       /solr/collections
   [junit4]   2>        6       /solr/overseer/queue-work
   [junit4]   2>        5       /solr/live_nodes
   [junit4]   2> 
   [junit4]   2> NOTE: reproduce with: ant test  
-Dtestcase=PeerSyncReplicationTest -Dtests.method=test 
-Dtests.seed=8C195BFE53CE2548 -Dtests.multiplier=3 -Dtests.slow=true 
-Dtests.locale=ms-MY -Dtests.timezone=Pacific/Tahiti -Dtests.asserts=true 
-Dtests.file.encoding=US-ASCII
   [junit4] FAILURE  209s J2 | PeerSyncReplicationTest.test <<<
   [junit4]    > Throwable #1: java.lang.AssertionError: timeout waiting to see 
all nodes active
   [junit4]    >        at 
__randomizedtesting.SeedInfo.seed([8C195BFE53CE2548:44D6424FD3248B0]:0)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.waitTillNodesActive(PeerSyncReplicationTest.java:326)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.bringUpDeadNodeAndEnsureNoReplication(PeerSyncReplicationTest.java:277)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.forceNodeFailureAndDoPeerSync(PeerSyncReplicationTest.java:259)
   [junit4]    >        at 
org.apache.solr.cloud.PeerSyncReplicationTest.test(PeerSyncReplicationTest.java:138)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsFixedStatement.callStatement(BaseDistributedSearchTestCase.java:985)
   [junit4]    >        at 
org.apache.solr.BaseDistributedSearchTestCase$ShardsRepeatRule$ShardsStatement.evaluate(BaseDistributedSearchTestCase.java:960)
   [junit4]    >        at java.lang.Thread.run(Thread.java:745)
   [junit4]   2> 1456589 INFO  
(SUITE-PeerSyncReplicationTest-seed#[8C195BFE53CE2548]-worker) [    ] 
o.a.s.SolrTestCaseJ4 ###deleteCore
   [junit4]   2> NOTE: leaving temporary files on disk at: 
/home/jenkins/workspace/Lucene-Solr-master-Linux/solr/build/solr-core/test/J2/temp/solr.cloud.PeerSyncReplicationTest_8C195BFE53CE2548-001
   [junit4]   2> Feb 07, 2017 5:20:50 PM 
com.carrotsearch.randomizedtesting.ThreadLeakControl checkThreadLeaks
   [junit4]   2> WARNING: Will linger awaiting termination of 1 leaked 
thread(s).
   [junit4]   2> NOTE: test params are: codec=Lucene70, 
sim=RandomSimilarity(queryNorm=false): {}, locale=ms-MY, timezone=Pacific/Tahiti
   [junit4]   2> NOTE: Linux 4.4.0-53-generic amd64/Oracle Corporation 
1.8.0_121 (64-bit)/cpus=12,threads=1,free=199813008,total=530055168
   [junit4]   2> NOTE: All tests run in this JVM: [ExternalFileFieldSortTest, 
DistanceUnitsTest, TestPhraseSuggestions, RAMDirectoryFactoryTest, TestConfig, 
XmlUpdateRequestHandlerTest, JvmMetricsTest, TestComponentsName, 
TestPushWriter, TestSizeLimitedDistributedMap, 
LeaderInitiatedRecoveryOnCommitTest, TestStressLiveNodes, RecoveryZkTest, 
BadIndexSchemaTest, TestInitParams, CoreAdminHandlerTest, 
SimpleCollectionCreateDeleteTest, BitVectorTest, 
FieldMutatingUpdateProcessorTest, TestLuceneMatchVersion, 
LukeRequestHandlerTest, SolrPluginUtilsTest, DistribJoinFromCollectionTest, 
ResourceLoaderTest, TestSolr4Spatial2, StatsComponentTest, 
TestFileDictionaryLookup, TestInitQParser, 
DistributedFacetPivotSmallAdvancedTest, BlobRepositoryCloudTest, 
SolrCoreCheckLockOnStartupTest, CurrencyFieldXmlFileTest, 
TestSolrDeletionPolicy1, RequestHandlersTest, CoreMergeIndexesAdminHandlerTest, 
OverriddenZkACLAndCredentialsProvidersTest, SolrIndexConfigTest, 
MetricUtilsTest, CustomCollectionTest, DateFieldTest, TestFieldCacheSort, 
TestShardHandlerFactory, SolrTestCaseJ4Test, SolrXmlInZkTest, TestSort, 
TestApiFramework, PreAnalyzedUpdateProcessorTest, CoreSorterTest, 
TestFieldResource, TestManagedSynonymFilterFactory, SynonymTokenizerTest, 
TestDFISimilarityFactory, DocValuesNotIndexedTest, TestFieldCacheWithThreads, 
TestPointFields, URLClassifyProcessorTest, NoCacheHeaderTest, 
HLLSerializationTest, HdfsBasicDistributedZk2Test, 
TestSubQueryTransformerCrossCore, TestTolerantSearch, BasicAuthIntegrationTest, 
TestMacros, OutputWriterTest, DebugComponentTest, TestSolrQueryParser, 
PrimUtilsTest, TestDocSet, TestComplexPhraseQParserPlugin, BlockCacheTest, 
TestQuerySenderNoQuery, TestConfigOverlay, RankQueryTest, 
MissingSegmentRecoveryTest, TestReload, TestInPlaceUpdatesStandalone, 
TestMissingGroups, BlockJoinFacetRandomTest, 
PeerSyncWithIndexFingerprintCachingTest, UnloadDistributedZkTest, 
TestPivotHelperCode, TestExtendedDismaxParser, TestXmlQParser, 
TestMergePolicyConfig, TestDistributedMissingSort, AutoCommitTest, 
RollingRestartTest, OverseerRolesTest, TlogReplayBufferedWhileIndexingTest, 
DistributedFacetExistsSmallTest, TestUtils, TestCoreDiscovery, 
ShowFileRequestHandlerTest, TestIndexSearcher, TestCloudRecovery, 
TestSolrCloudWithSecureImpersonation, DirectUpdateHandlerTest, 
TestUseDocValuesAsStored, TestIBSimilarityFactory, TestQueryWrapperFilter, 
UtilsToolTest, TestDynamicFieldCollectionResource, ConfigSetsAPITest, 
TestBinaryResponseWriter, TermVectorComponentDistributedTest, TestRecovery, 
TestBinaryField, HdfsSyncSliceTest, LeaderElectionContextKeyTest, 
TestRandomFlRTGCloud, TestGeoJSONResponseWriter, BasicDistributedZkTest, 
ChaosMonkeySafeLeaderTest, BasicDistributedZk2Test, SyncSliceTest, 
LeaderElectionIntegrationTest, TestZkChroot, TestRandomDVFaceting, 
TestFaceting, DistributedSpellCheckComponentTest, TestJoin, 
AnalysisAfterCoreReloadTest, SuggesterTSTTest, TestTrie, TestUpdate, 
DirectUpdateHandlerOptimizeTest, StatelessScriptUpdateProcessorFactoryTest, 
IndexBasedSpellCheckerTest, TestReversedWildcardFilterFactory, 
TermsComponentTest, TermVectorComponentTest, IndexSchemaRuntimeFieldTest, 
RegexBoostProcessorTest, TestCSVResponseWriter, JsonLoaderTest, 
TestPHPSerializedResponseWriter, SearchHandlerTest, TestFastOutputStream, 
TestSystemIdResolver, TestLRUCache, SliceStateTest, UUIDFieldTest, 
DistributedMLTComponentTest, AssignTest, AsyncCallRequestStatusResponseTest, 
CloudExitableDirectoryReaderTest, CollectionsAPIAsyncDistributedZkTest, 
DeleteStatusTest, LeaderFailoverAfterPartitionTest, 
OutOfBoxZkACLAndCredentialsProvidersTest, OverseerStatusTest, 
OverseerTaskQueueTest, PeerSyncReplicationTest]
   [junit4] Completed [517/693 (1!)] on J2 in 209.36s, 1 test, 1 failure <<< 
FAILURES!

[...truncated 63006 lines...]

---------------------------------------------------------------------
To unsubscribe, e-mail: dev-unsubscr...@lucene.apache.org
For additional commands, e-mail: dev-h...@lucene.apache.org

Reply via email to