[ 
https://issues.apache.org/jira/browse/PHOENIX-4466?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16298652#comment-16298652
 ] 

Toshihiro Suzuki edited comment on PHOENIX-4466 at 12/20/17 3:44 PM:
---------------------------------------------------------------------

[~elserj] I'm sorry for the confusion.

Let me explain it chronologically.

1. We were facing the protocol mismatch error as explained in the description.

2. To avoid the the protocol mismatch error, we tried to run spark-shell with 
spark.\{driver,executor\}.userClassPathFirst, but it failed with the following 
error you mentioned:
{code}
# spark-shell --jars /usr/hdp/current/phoenix-client/phoenix-thin-client.jar 
--conf spark.driver.userClassPathFirst=true --conf 
spark.executor.userClassPathFirst=true
...
scala> val query = 
sqlContext.read.format("jdbc").option("driver","org.apache.phoenix.queryserver.client.Driver").option("url","jdbc:phoenix:thin:url=http://<phoenix
 query server hostname>:8765;serialization=PROTOBUF").option("dbtable","<table 
name>").load 
...
Exception in thread "main" java.lang.RuntimeException: 
java.lang.RuntimeException: class 
org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback not 
org.apache.hadoop.security.GroupMappingServiceProvider
...
{code}

JniBasedUnixGroupsMappingWithFallback and GroupMappingServiceProvider are in 
both spark-assembly jar and phoenix-thin-client jar and the version is same in 
HDP-2.6.3, but the above error occurred. So we thought there was a problem 
around classloader when specifying spark.\{driver,executor\}.userClassPathFirst.

3. We built a dev jar (phoenix-4.7.0.2.6.1.0-SNAPSHOT-thin-client.jar) 
relocating hadoop-common and tried with it, and the classloader error 
disappeared but the first error re-occurred for some reason.

{code}
# spark-shell --jars phoenix-4.7.0.2.6.1.0-SNAPSHOT-thin-client.jar --conf 
spark.driver.userClassPathFirst=true --conf 
spark.executor.userClassPathFirst=true
...
scala> val query = 
sqlContext.read.format("jdbc").option("driver","org.apache.phoenix.queryserver.client.Driver").option("url","jdbc:phoenix:thin:url=http://<phoenix
 query server hostname>:8765;serialization=PROTOBUF").option("dbtable","<table 
name>").load 
...
java.sql.SQLException: While closing connection
        at org.apache.calcite.avatica.Helper.createException(Helper.java:39)
        at 
org.apache.calcite.avatica.AvaticaConnection.close(AvaticaConnection.java:156)
        at 
org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:153)
        at 
org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:91)
        at 
org.apache.spark.sql.execution.datasources.jdbc.DefaultSource.createRelation(DefaultSource.scala:57)
        at 
org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:158)
        at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:25)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
        at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
        at $iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
        at $iwC$$iwC$$iwC.<init>(<console>:38)
        at $iwC$$iwC.<init>(<console>:40)
        at $iwC.<init>(<console>:42)
        at <init>(<console>:44)
        at .<init>(<console>:48)
        at .<clinit>(<console>)
        at .<init>(<console>:7)
        at .<clinit>(<console>)
        at $print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at 
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
        at 
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at 
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
        at 
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
        at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
        at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
        at 
org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
        at 
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
        at 
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
        at 
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
        at 
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at 
org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:750)
        at 
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.RuntimeException: response code 500
        at 
org.apache.calcite.avatica.remote.RemoteService.apply(RemoteService.java:45)
        at 
org.apache.calcite.avatica.remote.JsonService.apply(JsonService.java:227)
        at 
org.apache.calcite.avatica.remote.RemoteMeta.closeConnection(RemoteMeta.java:78)
        at 
org.apache.calcite.avatica.AvaticaConnection.close(AvaticaConnection.java:153)
        ... 51 more
{code}

So I thought the userClassPathFirst was either insufficient or broken and we 
were not able to use it. And we needed to relocate Avatica to use Avatica 
classes from phoenix-thin-client jar. Also, I thought if we don't use 
userClassPathFirst, the hadoop-common relocation wasn't needed.

4. Finally, I built a new dev jar with the following relocation config (only 
Avatica is relocated) and all the errors disappeared.

{code}
<relocation>
  <pattern>org.apache.calcite.avatica</pattern>
  <shadedPattern>${shaded.package}.org.apache.calcite.avatica</shadedPattern>
  <!-- The protobuf messages can't be relocated due to a limitation
       in the Avatica protocol. -->
  <excludes>
    <exclude>org.apache.calcite.avatica.proto.*</exclude>
  </excludes>
</relocation>
{code}

Note that we need to exclude proto package due to a limitation in the Avatica 
protocol.

By the way, [~pbhardwaj] tested the dev jar (relocating only Avatica) in secure 
cluster and it was successful with a little bit URL change (adding 
authentication=SPNEGO, principal name and keytab path). 

Thanks,


was (Author: brfrn169):
[~elserj] I'm sorry for the confusion.

Let me explain it chronologically.

1. We were facing the protocol mismatch error as explained in the description.

2. To avoid the the protocol mismatch error, we tried to run spark-shell with 
spark.{driver,executor}.userClassPathFirst, but it failed with the following 
error you mentioned:
{code}
# spark-shell --jars /usr/hdp/current/phoenix-client/phoenix-thin-client.jar 
--conf spark.driver.userClassPathFirst=true --conf 
spark.executor.userClassPathFirst=true
...
scala> val query = 
sqlContext.read.format("jdbc").option("driver","org.apache.phoenix.queryserver.client.Driver").option("url","jdbc:phoenix:thin:url=http://<phoenix
 query server hostname>:8765;serialization=PROTOBUF").option("dbtable","<table 
name>").load 
...
Exception in thread "main" java.lang.RuntimeException: 
java.lang.RuntimeException: class 
org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback not 
org.apache.hadoop.security.GroupMappingServiceProvider
...
{code}

JniBasedUnixGroupsMappingWithFallback and GroupMappingServiceProvider are in 
both spark-assembly jar and phoenix-thin-client jar and the version is same in 
HDP-2.6.3, but the above error occurred. So we thought there was a problem 
around classloader when specifying spark.{driver,executor}.userClassPathFirst.

3. We built a dev jar (phoenix-4.7.0.2.6.1.0-SNAPSHOT-thin-client.jar) 
relocating hadoop-common and tried with it, and the classloader error 
disappeared but the first error re-occurred for some reason.

{code}
# spark-shell --jars phoenix-4.7.0.2.6.1.0-SNAPSHOT-thin-client.jar --conf 
spark.driver.userClassPathFirst=true --conf 
spark.executor.userClassPathFirst=true
...
scala> val query = 
sqlContext.read.format("jdbc").option("driver","org.apache.phoenix.queryserver.client.Driver").option("url","jdbc:phoenix:thin:url=http://<phoenix
 query server hostname>:8765;serialization=PROTOBUF").option("dbtable","<table 
name>").load 
...
java.sql.SQLException: While closing connection
        at org.apache.calcite.avatica.Helper.createException(Helper.java:39)
        at 
org.apache.calcite.avatica.AvaticaConnection.close(AvaticaConnection.java:156)
        at 
org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:153)
        at 
org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:91)
        at 
org.apache.spark.sql.execution.datasources.jdbc.DefaultSource.createRelation(DefaultSource.scala:57)
        at 
org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:158)
        at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:25)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
        at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
        at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
        at $iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
        at $iwC$$iwC$$iwC.<init>(<console>:38)
        at $iwC$$iwC.<init>(<console>:40)
        at $iwC.<init>(<console>:42)
        at <init>(<console>:44)
        at .<init>(<console>:48)
        at .<clinit>(<console>)
        at .<init>(<console>:7)
        at .<clinit>(<console>)
        at $print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
        at 
org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
        at 
org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
        at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
        at 
org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
        at 
org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
        at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
        at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
        at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
        at 
org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
        at 
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
        at 
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
        at 
org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
        at 
scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
        at 
org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
        at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
        at org.apache.spark.repl.Main$.main(Main.scala:31)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:750)
        at 
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.RuntimeException: response code 500
        at 
org.apache.calcite.avatica.remote.RemoteService.apply(RemoteService.java:45)
        at 
org.apache.calcite.avatica.remote.JsonService.apply(JsonService.java:227)
        at 
org.apache.calcite.avatica.remote.RemoteMeta.closeConnection(RemoteMeta.java:78)
        at 
org.apache.calcite.avatica.AvaticaConnection.close(AvaticaConnection.java:153)
        ... 51 more
{code}

So I thought the userClassPathFirst was either insufficient or broken and we 
were not able to use it. And we needed to relocate Avatica to use Avatica 
classes from phoenix-thin-client jar. Also, I thought if we don't use 
userClassPathFirst, the hadoop-common relocation wasn't needed.

4. Finally, I built a new dev jar with the following relocation config (only 
Avatica is relocated) and all the errors disappeared.

{code}
<relocation>
  <pattern>org.apache.calcite.avatica</pattern>
  <shadedPattern>${shaded.package}.org.apache.calcite.avatica</shadedPattern>
  <!-- The protobuf messages can't be relocated due to a limitation
       in the Avatica protocol. -->
  <excludes>
    <exclude>org.apache.calcite.avatica.proto.*</exclude>
  </excludes>
</relocation>
{code}

Note that we need to exclude proto package due to a limitation in the Avatica 
protocol.

By the way, [~pbhardwaj] tested the dev jar (relocating only Avatica) in secure 
cluster and it was successful with a little bit URL change (adding 
authentication=SPNEGO, principal name and keytab path). 

Thanks,

> java.lang.RuntimeException: response code 500 - Executing a spark job to 
> connect to phoenix query server and load data
> ----------------------------------------------------------------------------------------------------------------------
>
>                 Key: PHOENIX-4466
>                 URL: https://issues.apache.org/jira/browse/PHOENIX-4466
>             Project: Phoenix
>          Issue Type: Bug
>         Environment: HDP-2.6.3
>            Reporter: Toshihiro Suzuki
>            Assignee: Toshihiro Suzuki
>            Priority: Minor
>         Attachments: PHOENIX-4466.patch
>
>
> Steps to reproduce are as follows:
> 1. Start spark shell with 
> {code}
> spark-shell --jars /usr/hdp/current/phoenix-client/phoenix-thin-client.jar 
> {code}
> 2. Ran the following to load data 
> {code}
> scala> val query = 
> sqlContext.read.format("jdbc").option("driver","org.apache.phoenix.queryserver.client.Driver").option("url","jdbc:phoenix:thin:url=http://<phoenix
>  query server 
> hostname>:8765;serialization=PROTOBUF").option("dbtable","<table name>").load 
> {code}
> This failed with the following exception 
> {code:java}
> java.sql.SQLException: While closing connection
>       at org.apache.calcite.avatica.Helper.createException(Helper.java:39)
>       at 
> org.apache.calcite.avatica.AvaticaConnection.close(AvaticaConnection.java:156)
>       at 
> org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:153)
>       at 
> org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation.<init>(JDBCRelation.scala:91)
>       at 
> org.apache.spark.sql.execution.datasources.jdbc.DefaultSource.createRelation(DefaultSource.scala:57)
>       at 
> org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:158)
>       at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119)
>       at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:25)
>       at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:30)
>       at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:32)
>       at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:34)
>       at $iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
>       at $iwC$$iwC$$iwC.<init>(<console>:38)
>       at $iwC$$iwC.<init>(<console>:40)
>       at $iwC.<init>(<console>:42)
>       at <init>(<console>:44)
>       at .<init>(<console>:48)
>       at .<clinit>(<console>)
>       at .<init>(<console>:7)
>       at .<clinit>(<console>)
>       at $print(<console>)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
>       at 
> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
>       at 
> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
>       at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
>       at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
>       at 
> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
>       at 
> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
>       at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
>       at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
>       at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
>       at 
> org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
>       at 
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
>       at 
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>       at 
> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
>       at 
> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
>       at 
> org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
>       at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
>       at org.apache.spark.repl.Main$.main(Main.scala:31)
>       at org.apache.spark.repl.Main.main(Main.scala)
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:750)
>       at 
> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
>       at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
>       at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
>       at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
> Caused by: java.lang.RuntimeException: response code 500
>       at 
> org.apache.calcite.avatica.remote.RemoteService.apply(RemoteService.java:45)
>       at 
> org.apache.calcite.avatica.remote.JsonService.apply(JsonService.java:227)
>       at 
> org.apache.calcite.avatica.remote.RemoteMeta.closeConnection(RemoteMeta.java:78)
>       at 
> org.apache.calcite.avatica.AvaticaConnection.close(AvaticaConnection.java:153)
>       ... 51 more
> {code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Reply via email to