IGNITE-8534: Upgrade to spark-2.3.0 - Fixes #4033.

Signed-off-by: Nikolay Izhikov <nizhi...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/f7901651
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/f7901651
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/f7901651

Branch: refs/heads/ignite-8446
Commit: f790165128a926da11dc7da276955106c8ba0fbb
Parents: 56975c2
Author: Ray <ray...@cisco.com>
Authored: Tue Jun 26 16:30:40 2018 +0300
Committer: Nikolay Izhikov <nizhi...@apache.org>
Committed: Tue Jun 26 16:33:48 2018 +0300

----------------------------------------------------------------------
 modules/spark-2.10/README.txt                   |   4 -
 modules/spark-2.10/licenses/apache-2.0.txt      | 202 ----------------
 modules/spark-2.10/pom.xml                      | 237 -------------------
 .../impl/optimization/StringExpressions.scala   |  30 ++-
 .../spark/impl/optimization/package.scala       |   2 +-
 .../sql/ignite/IgniteExternalCatalog.scala      |  17 +-
 .../spark/sql/ignite/IgniteOptimization.scala   |  14 +-
 .../spark/sql/ignite/IgniteSparkSession.scala   |  10 +-
 .../ignite/spark/AbstractDataFrameSpec.scala    |   2 +-
 .../ignite/spark/IgniteDataFrameSuite.scala     |   2 +-
 .../IgniteOptimizationAggregationFuncSpec.scala |   4 +-
 .../spark/IgniteOptimizationJoinSpec.scala      |   6 +-
 .../spark/IgniteOptimizationMathFuncSpec.scala  |  46 ++--
 .../ignite/spark/IgniteOptimizationSpec.scala   |   8 +-
 .../IgniteOptimizationStringFuncSpec.scala      | 101 ++++++--
 parent/pom.xml                                  |   2 +-
 pom.xml                                         |   2 -
 17 files changed, 159 insertions(+), 530 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark-2.10/README.txt
----------------------------------------------------------------------
diff --git a/modules/spark-2.10/README.txt b/modules/spark-2.10/README.txt
deleted file mode 100644
index 29d3930..0000000
--- a/modules/spark-2.10/README.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-Apache Ignite Spark Module
----------------------------
-
-Apache Ignite Spark module to be build with Scala 2.10.

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark-2.10/licenses/apache-2.0.txt
----------------------------------------------------------------------
diff --git a/modules/spark-2.10/licenses/apache-2.0.txt 
b/modules/spark-2.10/licenses/apache-2.0.txt
deleted file mode 100644
index d645695..0000000
--- a/modules/spark-2.10/licenses/apache-2.0.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark-2.10/pom.xml
----------------------------------------------------------------------
diff --git a/modules/spark-2.10/pom.xml b/modules/spark-2.10/pom.xml
deleted file mode 100644
index 2cbcd79..0000000
--- a/modules/spark-2.10/pom.xml
+++ /dev/null
@@ -1,237 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-
-<!--
-    POM file.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-
-    <parent>
-        <groupId>org.apache.ignite</groupId>
-        <artifactId>ignite-parent</artifactId>
-        <version>1</version>
-        <relativePath>../../parent</relativePath>
-    </parent>
-
-    <artifactId>ignite-spark_2.10</artifactId>
-    <version>2.6.0-SNAPSHOT</version>
-    <url>http://ignite.apache.org</url>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-core</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-core</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-            <version>${scala210.library.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-reflect</artifactId>
-            <version>${scala210.library.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-core_2.10</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-catalyst_2.10</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-sql_2.10</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-network-common_2.10</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-network-shuffle_2.10</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>${spark.hadoop.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>commons-beanutils</groupId>
-                    <artifactId>commons-beanutils</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>commons-beanutils</groupId>
-                    <artifactId>commons-beanutils-core</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-annotations</artifactId>
-            <version>${jackson2.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-core_2.10</artifactId>
-            <version>3.5.0</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-indexing</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-spring</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.ignite</groupId>
-            <artifactId>ignite-log4j</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-
-        <!-- Test dependencies -->
-        <dependency>
-            <groupId>org.scalatest</groupId>
-            <artifactId>scalatest_2.10</artifactId>
-            <version>2.2.6</version>
-            <scope>test</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.scala-lang</groupId>
-                    <artifactId>scala-library</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-unsafe_2.10</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-tags_2.10</artifactId>
-            <version>${spark.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.parquet</groupId>
-            <artifactId>parquet-hadoop</artifactId>
-            <version>1.9.0</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
-            <version>${hadoop.version}</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-            <version>${guava14.version}</version>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <sourceDirectory>../spark/src/main/scala</sourceDirectory>
-        <testSourceDirectory>../spark/src/test/java</testSourceDirectory>
-
-        <resources>
-            <resource>
-                <directory>../spark/src/main/resources</directory>
-            </resource>
-        </resources>
-
-        <testResources>
-            <testResource>
-                <directory>../spark/src/test/resources</directory>
-            </testResource>
-        </testResources>
-
-        <plugins>
-            <plugin>
-                <groupId>net.alchim31.maven</groupId>
-                <artifactId>scala-maven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-
-    <profiles>
-        <profile>
-            <id>scala-test</id>
-
-            <build>
-                <plugins>
-                    <plugin>
-                        <groupId>org.scalatest</groupId>
-                        <artifactId>scalatest-maven-plugin</artifactId>
-                        <version>2.0.0</version>
-                        <configuration>
-                            
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-                            <junitxml>.</junitxml>
-                            <filereports>WDF 
IgniteScalaTestSuites.txt</filereports>
-                        </configuration>
-                        <executions>
-                            <execution>
-                                <id>test</id>
-                                <goals>
-                                    <goal>test</goal>
-                                </goals>
-                            </execution>
-                        </executions>
-                    </plugin>
-                </plugins>
-            </build>
-        </profile>
-    </profiles>
-</project>

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/StringExpressions.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/StringExpressions.scala
 
b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/StringExpressions.scala
index 1ecab2c..6588280 100644
--- 
a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/StringExpressions.scala
+++ 
b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/StringExpressions.scala
@@ -55,15 +55,24 @@ private[optimization] object StringExpressions extends 
SupportedExpressions {
         case StringRPad(str, len, pad) ⇒
             checkChild(str) && checkChild(len) && checkChild(pad)
 
-        case StringTrimLeft(child) ⇒
+        case StringTrimLeft(child, None) ⇒
             checkChild(child)
 
-        case StringTrimRight(child) ⇒
+        case StringTrimRight(child, None) ⇒
             checkChild(child)
 
-        case StringTrim(child) ⇒
+        case StringTrim(child, None) ⇒
             checkChild(child)
 
+        case StringTrimLeft(child, Some(trimStr)) ⇒
+            checkChild(child) && checkChild(trimStr)
+
+        case StringTrimRight(child,  Some(trimStr)) ⇒
+            checkChild(child) && checkChild(trimStr)
+
+        case StringTrim(child,  Some(trimStr)) ⇒
+            checkChild(child) && checkChild(trimStr)
+
         case RegExpReplace(subject, regexp, rep) ⇒
             checkChild(subject) && checkChild(regexp) && checkChild(rep)
 
@@ -121,15 +130,24 @@ private[optimization] object StringExpressions extends 
SupportedExpressions {
         case StringRPad(str, len, pad) ⇒
             Some(s"RPAD(${childToString(str)}, ${childToString(len)}, 
${childToString(pad)})")
 
-        case StringTrimLeft(child) ⇒
+        case StringTrimLeft(child, None) ⇒
             Some(s"LTRIM(${childToString(child)})")
 
-        case StringTrimRight(child) ⇒
+        case StringTrimRight(child, None) ⇒
             Some(s"RTRIM(${childToString(child)})")
 
-        case StringTrim(child) ⇒
+        case StringTrim(child, None) ⇒
             Some(s"TRIM(${childToString(child)})")
 
+        case StringTrimLeft(child,  Some(trimStr)) ⇒
+            Some(s"LTRIM(${childToString(child)}, ${childToString(trimStr)})")
+
+        case StringTrimRight(child,  Some(trimStr)) ⇒
+            Some(s"RTRIM(${childToString(child)}, ${childToString(trimStr)})")
+
+        case StringTrim(child,  Some(trimStr)) ⇒
+            Some(s"TRIM(${childToString(child)}, ${childToString(trimStr)})")
+
         case RegExpReplace(subject, regexp, rep) ⇒
             Some(s"REGEXP_REPLACE(${childToString(subject)}, 
${childToString(regexp)}, ${childToString(rep)})")
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/package.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/package.scala
 
b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/package.scala
index 4e168f4..4f3220f 100644
--- 
a/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/package.scala
+++ 
b/modules/spark/src/main/scala/org/apache/ignite/spark/impl/optimization/package.scala
@@ -118,7 +118,7 @@ package object optimization {
                     metadata = alias
                         .map(new 
MetadataBuilder().withMetadata(toCopy.metadata).putString(ALIAS, _).build())
                         .getOrElse(toCopy.metadata)
-                )(exprId = exprId.getOrElse(toCopy.exprId), qualifier = 
toCopy.qualifier, isGenerated = toCopy.isGenerated)
+                )(exprId = exprId.getOrElse(toCopy.exprId), qualifier = 
toCopy.qualifier)
 
             case a: Alias ⇒
                 toAttributeReference(a.child, existingOutput, Some(a.exprId), 
Some(alias.getOrElse(a.name)))

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteExternalCatalog.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteExternalCatalog.scala
 
b/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteExternalCatalog.scala
index 6876a3e..f632d2a 100644
--- 
a/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteExternalCatalog.scala
+++ 
b/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteExternalCatalog.scala
@@ -94,8 +94,7 @@ private[ignite] class 
IgniteExternalCatalog(defaultIgniteContext: IgniteContext)
     /** @inheritdoc */
     override def getTable(db: String, table: String): CatalogTable = 
getTableOption(db, table).get
 
-    /** @inheritdoc */
-    override def getTableOption(db: String, tabName: String): 
Option[CatalogTable] = {
+       def getTableOption(db: String, tabName: String): Option[CatalogTable] = 
{
         val ignite = igniteOrDefault(db, default)
 
         val gridName = igniteName(ignite)
@@ -234,17 +233,25 @@ private[ignite] class 
IgniteExternalCatalog(defaultIgniteContext: IgniteContext)
     override def listFunctions(db: String, pattern: String): Seq[String] = 
Seq.empty[String]
 
     /** @inheritdoc */
-    override def alterDatabase(dbDefinition: CatalogDatabase): Unit =
+    override def doAlterDatabase(dbDefinition: CatalogDatabase): Unit =
         throw new UnsupportedOperationException("unsupported")
 
     /** @inheritdoc */
-    override def alterTable(tableDefinition: CatalogTable): Unit =
+    override def doAlterFunction(db: String, funcDefinition: CatalogFunction): 
Unit =
         throw new UnsupportedOperationException("unsupported")
 
     /** @inheritdoc */
-    override def alterTableSchema(db: String, table: String, schema: 
StructType): Unit =
+    override def doAlterTableStats(db: String, table: String, stats: 
Option[CatalogStatistics]): Unit =
         throw new UnsupportedOperationException("unsupported")
 
+       /** @inheritdoc */
+       override def doAlterTable(tableDefinition: CatalogTable): Unit =
+               throw new UnsupportedOperationException("unsupported")
+
+       /** @inheritdoc */
+       override def doAlterTableDataSchema(db: String, table: String, schema: 
StructType): Unit =
+               throw new UnsupportedOperationException("unsupported")
+
     /** @inheritdoc */
     override protected def doCreateFunction(db: String, funcDefinition: 
CatalogFunction): Unit = { /* no-op */ }
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteOptimization.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteOptimization.scala
 
b/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteOptimization.scala
index b23cd6f..4a0f791 100644
--- 
a/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteOptimization.scala
+++ 
b/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteOptimization.scala
@@ -63,7 +63,7 @@ object IgniteOptimization extends Rule[LogicalPlan] with 
Logging {
         plan.transformUp {
             //We found basic node to transform.
             //We create new accumulator and going to the upper layers.
-            case LogicalRelation(igniteSqlRelation: IgniteSQLRelation[_, _], 
output, _catalogTable) ⇒
+            case LogicalRelation(igniteSqlRelation: IgniteSQLRelation[_, _], 
output, _catalogTable, _) ⇒
                 //Clear flag to optimize each statement separately
                 stepSkipped = false
 
@@ -352,7 +352,8 @@ object IgniteOptimization extends Rule[LogicalPlan] with 
Logging {
                 new LogicalRelation (
                     relation = IgniteSQLAccumulatorRelation(acc),
                     output = acc.outputExpressions.map(toAttributeReference(_, 
Seq.empty)),
-                    catalogTable = acc.igniteQueryContext.catalogTable)
+                    catalogTable = acc.igniteQueryContext.catalogTable,
+                    false)
         }
 
     /**
@@ -407,19 +408,16 @@ object IgniteOptimization extends Rule[LogicalPlan] with 
Logging {
                                     nullable = found.nullable,
                                     metadata = found.metadata)(
                                     exprId = found.exprId,
-                                    qualifier = found.qualifier,
-                                    isGenerated = found.isGenerated),
+                                    qualifier = found.qualifier),
                                 alias.name) (
                                 exprId = alias.exprId,
                                 qualifier = alias.qualifier,
-                                explicitMetadata = alias.explicitMetadata,
-                                isGenerated = 
alias.isGenerated).asInstanceOf[T]
+                                explicitMetadata = 
alias.explicitMetadata).asInstanceOf[T]
 
                         case attr: AttributeReference ⇒
                             attr.copy(name = found.name)(
                                 exprId = found.exprId,
-                                qualifier = found.qualifier,
-                                isGenerated = 
found.isGenerated).asInstanceOf[T]
+                                qualifier = found.qualifier).asInstanceOf[T]
 
                         case _ ⇒ ne.asInstanceOf[T]
                     }

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteSparkSession.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteSparkSession.scala
 
b/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteSparkSession.scala
index 1fccc3a..1cc63ed 100644
--- 
a/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteSparkSession.scala
+++ 
b/modules/spark/src/main/scala/org/apache/spark/sql/ignite/IgniteSparkSession.scala
@@ -167,7 +167,7 @@ class IgniteSparkSession private(ic: IgniteContext, proxy: 
SparkSession) extends
 
     /** @inheritdoc */
     override private[sql] def applySchemaToPythonRDD(rdd: RDD[Array[Any]], 
schema: StructType) = {
-        val rowRdd = rdd.map(r => python.EvaluatePython.fromJava(r, 
schema).asInstanceOf[InternalRow])
+        val rowRdd = rdd.map(r => 
python.EvaluatePython.makeFromJava(schema).asInstanceOf[InternalRow])
         Dataset.ofRows(self, LogicalRDD(schema.toAttributes, rowRdd)(self))
     }
 
@@ -179,14 +179,6 @@ class IgniteSparkSession private(ic: IgniteContext, proxy: 
SparkSession) extends
         proxy.extensions
 
     /** @inheritdoc */
-    override private[sql] def internalCreateDataFrame(
-        catalystRows: RDD[InternalRow],
-        schema: StructType) = {
-        val logicalPlan = LogicalRDD(schema.toAttributes, catalystRows)(self)
-        Dataset.ofRows(self, logicalPlan)
-    }
-
-    /** @inheritdoc */
     override private[sql] def createDataFrame(rowRDD: RDD[Row],
         schema: StructType,
         needsConversion: Boolean) = {

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/test/scala/org/apache/ignite/spark/AbstractDataFrameSpec.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/test/scala/org/apache/ignite/spark/AbstractDataFrameSpec.scala
 
b/modules/spark/src/test/scala/org/apache/ignite/spark/AbstractDataFrameSpec.scala
index 29a4e6f..b46ede1 100644
--- 
a/modules/spark/src/test/scala/org/apache/ignite/spark/AbstractDataFrameSpec.scala
+++ 
b/modules/spark/src/test/scala/org/apache/ignite/spark/AbstractDataFrameSpec.scala
@@ -206,7 +206,7 @@ object AbstractDataFrameSpec {
         val plan = df.queryExecution.optimizedPlan
 
         val cnt = plan.collectLeaves.count {
-            case LogicalRelation(relation: IgniteSQLAccumulatorRelation[_, _], 
_, _) ⇒
+            case LogicalRelation(relation: IgniteSQLAccumulatorRelation[_, _], 
_, _, _) ⇒
                 if (qry != "")
                     assert(qry.toLowerCase == 
relation.acc.compileQuery().toLowerCase,
                         s"Generated query should be equal to 
expected.\nexpected  - $qry\ngenerated - ${relation.acc.compileQuery()}")

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteDataFrameSuite.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteDataFrameSuite.scala
 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteDataFrameSuite.scala
index 0ec65a1..e1bb7ff 100644
--- 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteDataFrameSuite.scala
+++ 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteDataFrameSuite.scala
@@ -20,7 +20,7 @@ package org.apache.ignite.spark
 import org.scalatest.Suites
 
 /**
-  * Test suite for Spark DataFram API implementation.
+  * Test suite for Spark DataFrame API implementation.
   */
 class IgniteDataFrameSuite extends Suites (
     new IgniteDataFrameSchemaSpec,

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationAggregationFuncSpec.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationAggregationFuncSpec.scala
 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationAggregationFuncSpec.scala
index d2527c8..be0e1ba 100644
--- 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationAggregationFuncSpec.scala
+++ 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationAggregationFuncSpec.scala
@@ -51,7 +51,7 @@ class IgniteOptimizationAggregationFuncSpec extends 
AbstractDataFrameSpec {
         it("AVG - DOUBLE") {
             val df = igniteSession.sql("SELECT AVG(val) FROM numbers WHERE id 
<= 3")
 
-            checkOptimizationResult(df, "SELECT AVG(val) FROM numbers WHERE id 
IS NOT NULL and id <= 3")
+            checkOptimizationResult(df, "SELECT AVG(val) FROM numbers WHERE id 
<= 3")
 
             val data = Tuple1(.5)
 
@@ -81,7 +81,7 @@ class IgniteOptimizationAggregationFuncSpec extends 
AbstractDataFrameSpec {
         it("SUM - DOUBLE") {
             val df = igniteSession.sql("SELECT SUM(val) FROM numbers WHERE id 
<= 3")
 
-            checkOptimizationResult(df, "SELECT SUM(val) FROM numbers WHERE id 
IS NOT NULL and id <= 3")
+            checkOptimizationResult(df, "SELECT SUM(val) FROM numbers WHERE id 
<= 3")
 
             val data = Tuple1(1.5)
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationJoinSpec.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationJoinSpec.scala
 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationJoinSpec.scala
index b4b36a8..d4e94c9 100644
--- 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationJoinSpec.scala
+++ 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationJoinSpec.scala
@@ -279,7 +279,7 @@ class IgniteOptimizationJoinSpec extends 
AbstractDataFrameSpec {
 
             checkOptimizationResult(df, "SELECT jt1.id as id1, jt1.val1, 
jt2.id as id2, jt2.val2 " +
                 "FROM jt1 JOIN jt2 ON jt1.val1 = jt2.val2 " +
-                "WHERE jt1.id IS NOT NULL AND jt1.id < 10 AND jt1.val1 IS NOT 
NULL and jt2.val2 IS NOT NULL")
+                "WHERE jt1.id < 10 AND jt1.val1 IS NOT NULL and jt2.val2 IS 
NOT NULL")
 
             val data = (
                 (2, "B", 1, "B"),
@@ -427,9 +427,7 @@ class IgniteOptimizationJoinSpec extends 
AbstractDataFrameSpec {
                 "SELECT jt1.id + jt2.id as sum_id, jt1.val1, jt2.val2 FROM " +
                     "jt1 JOIN jt2 ON NOT jt1.id + jt2.id = 15 AND jt1.val1 = 
jt2.val2 " +
                     "WHERE " +
-                    "jt1.id IS NOT NULL AND " +
                     "jt1.val1 IS NOT NULL AND " +
-                    "jt2.id IS NOT NULL AND " +
                     "jt2.val2 IS NOT NULL"
             )
 
@@ -458,7 +456,7 @@ class IgniteOptimizationJoinSpec extends 
AbstractDataFrameSpec {
             checkOptimizationResult(df,
                 "SELECT CAST(SUM(JT1.ID + JT2.ID) AS BIGINT) AS 
\"sum(sum_id)\" " +
                     "FROM JT1 JOIN JT2 ON NOT JT1.id + JT2.id = 15 AND 
JT1.val1 = JT2.val2 " +
-                    "WHERE JT1.id IS NOT NULL AND JT1.val1 IS NOT NULL AND 
JT2.id IS NOT NULL AND JT2.val2 IS NOT NULL")
+                    "WHERE JT1.val1 IS NOT NULL AND JT2.val2 IS NOT NULL")
 
             val data = Tuple1(8)
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationMathFuncSpec.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationMathFuncSpec.scala
 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationMathFuncSpec.scala
index 02793c9..2cb41ae 100644
--- 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationMathFuncSpec.scala
+++ 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationMathFuncSpec.scala
@@ -36,7 +36,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("ABS") {
             val df = igniteSession.sql("SELECT ABS(val) FROM numbers WHERE id 
= 6")
 
-            checkOptimizationResult(df, "SELECT ABS(val) FROM numbers WHERE id 
is not null AND id = 6")
+            checkOptimizationResult(df, "SELECT ABS(val) FROM numbers WHERE id 
= 6")
 
             val data = Tuple1(.5)
 
@@ -46,7 +46,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("ACOS") {
             val df = igniteSession.sql("SELECT ACOS(val) FROM numbers WHERE id 
= 7")
 
-            checkOptimizationResult(df, "SELECT ACOS(val) FROM numbers WHERE 
id is not null AND id = 7")
+            checkOptimizationResult(df, "SELECT ACOS(val) FROM numbers WHERE 
id = 7")
 
             val data = Tuple1(Math.PI)
 
@@ -56,7 +56,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("ASIN") {
             val df = igniteSession.sql("SELECT ASIN(val) FROM numbers WHERE id 
= 7")
 
-            checkOptimizationResult(df, "SELECT ASIN(val) FROM numbers WHERE 
id is not null AND id = 7")
+            checkOptimizationResult(df, "SELECT ASIN(val) FROM numbers WHERE 
id = 7")
 
             val data = Tuple1(-Math.PI/2)
 
@@ -66,7 +66,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("ATAN") {
             val df = igniteSession.sql("SELECT ATAN(val) FROM numbers WHERE id 
= 7")
 
-            checkOptimizationResult(df, "SELECT ATAN(val) FROM numbers WHERE 
id is not null AND id = 7")
+            checkOptimizationResult(df, "SELECT ATAN(val) FROM numbers WHERE 
id = 7")
 
             val data = Tuple1(-Math.PI/4)
 
@@ -76,7 +76,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("COS") {
             val df = igniteSession.sql("SELECT COS(val) FROM numbers WHERE id 
= 1")
 
-            checkOptimizationResult(df, "SELECT COS(val) FROM numbers WHERE id 
is not null AND id = 1")
+            checkOptimizationResult(df, "SELECT COS(val) FROM numbers WHERE id 
= 1")
 
             val data = Tuple1(1.0)
 
@@ -86,7 +86,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("SIN") {
             val df = igniteSession.sql("SELECT SIN(val) FROM numbers WHERE id 
= 1")
 
-            checkOptimizationResult(df, "SELECT SIN(val) FROM numbers WHERE id 
is not null AND id = 1")
+            checkOptimizationResult(df, "SELECT SIN(val) FROM numbers WHERE id 
= 1")
 
             val data = Tuple1(.0)
 
@@ -96,7 +96,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("TAN") {
             val df = igniteSession.sql("SELECT TAN(val) FROM numbers WHERE id 
= 1")
 
-            checkOptimizationResult(df, "SELECT TAN(val) FROM numbers WHERE id 
is not null AND id = 1")
+            checkOptimizationResult(df, "SELECT TAN(val) FROM numbers WHERE id 
= 1")
 
             val data = Tuple1(.0)
 
@@ -106,7 +106,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("COSH") {
             val df = igniteSession.sql("SELECT COSH(val) FROM numbers WHERE id 
= 1")
 
-            checkOptimizationResult(df, "SELECT COSH(val) FROM numbers WHERE 
id is not null AND id = 1")
+            checkOptimizationResult(df, "SELECT COSH(val) FROM numbers WHERE 
id = 1")
 
             val data = Tuple1(1.0)
 
@@ -116,7 +116,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("SINH") {
             val df = igniteSession.sql("SELECT SINH(val) FROM numbers WHERE id 
= 1")
 
-            checkOptimizationResult(df, "SELECT SINH(val) FROM numbers WHERE 
id is not null AND id = 1")
+            checkOptimizationResult(df, "SELECT SINH(val) FROM numbers WHERE 
id = 1")
 
             val data = Tuple1(.0)
 
@@ -126,7 +126,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("TANH") {
             val df = igniteSession.sql("SELECT TANH(val) FROM numbers WHERE id 
= 1")
 
-            checkOptimizationResult(df, "SELECT TANH(val) FROM numbers WHERE 
id is not null AND id = 1")
+            checkOptimizationResult(df, "SELECT TANH(val) FROM numbers WHERE 
id = 1")
 
             val data = Tuple1(.0)
 
@@ -137,7 +137,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT ATAN2(val, 0.0) FROM numbers 
WHERE id = 1")
 
             checkOptimizationResult(df, "SELECT ATAN2(val, 0.0) AS 
\"ATAN2(val, CAST(0.0 AS DOUBLE))\" " +
-                "FROM numbers WHERE id is not null AND id = 1")
+                "FROM numbers WHERE id = 1")
 
             val data = Tuple1(.0)
 
@@ -148,7 +148,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT val % 9 FROM numbers WHERE id = 
8")
 
             checkOptimizationResult(df, "SELECT val % 9.0 as \"(val % CAST(9 
AS DOUBLE))\" " +
-                "FROM numbers WHERE id is not null AND id = 8")
+                "FROM numbers WHERE id = 8")
 
             val data = Tuple1(6.0)
 
@@ -159,7 +159,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT CEIL(val) FROM numbers WHERE id 
= 2")
 
             checkOptimizationResult(df, "SELECT CAST(CEIL(val) AS LONG) as 
\"CEIL(val)\" " +
-                "FROM numbers WHERE id is not null AND id = 2")
+                "FROM numbers WHERE id = 2")
 
             val data = Tuple1(1)
 
@@ -183,7 +183,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT FLOOR(val) FROM numbers WHERE 
id = 2")
 
             checkOptimizationResult(df, "SELECT CAST(FLOOR(val) AS LONG) as 
\"FLOOR(val)\" FROM numbers " +
-                "WHERE id is not null AND id = 2")
+                "WHERE id = 2")
 
             val data = Tuple1(0)
 
@@ -194,7 +194,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT POWER(val, 3) FROM numbers 
WHERE id = 4")
 
             checkOptimizationResult(df, "SELECT POWER(val, 3.0) as 
\"POWER(val, CAST(3 AS DOUBLE))\" FROM numbers " +
-                "WHERE id is not null AND id = 4")
+                "WHERE id = 4")
 
             val data = Tuple1(8.0)
 
@@ -217,7 +217,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT LOG(val) FROM numbers WHERE id 
= 12")
 
             checkOptimizationResult(df, "SELECT LOG(val) as \"LOG(E(), val)\" 
FROM numbers " +
-                "WHERE id IS NOT NULL AND id = 12")
+                "WHERE id = 12")
 
             val data = Tuple1(2.0)
 
@@ -227,7 +227,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("LOG10") {
             val df = igniteSession.sql("SELECT LOG10(val) FROM numbers WHERE 
id = 11")
 
-            checkOptimizationResult(df, "SELECT LOG10(val) FROM numbers WHERE 
id IS NOT NULL AND id = 11")
+            checkOptimizationResult(df, "SELECT LOG10(val) FROM numbers WHERE 
id = 11")
 
             val data = Tuple1(2.0)
 
@@ -237,7 +237,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("DEGREES") {
             val df = igniteSession.sql("SELECT DEGREES(val) FROM numbers WHERE 
id = 13")
 
-            checkOptimizationResult(df, "SELECT DEGREES(val) FROM numbers 
WHERE id IS NOT NULL AND id = 13")
+            checkOptimizationResult(df, "SELECT DEGREES(val) FROM numbers 
WHERE id = 13")
 
             val data = Tuple1(180.0)
 
@@ -247,7 +247,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("RADIANS") {
             val df = igniteSession.sql("SELECT RADIANS(val) FROM numbers WHERE 
id = 14")
 
-            checkOptimizationResult(df, "SELECT RADIANS(val) FROM numbers 
WHERE id IS NOT NULL AND id = 14")
+            checkOptimizationResult(df, "SELECT RADIANS(val) FROM numbers 
WHERE id = 14")
 
             val data = Tuple1(Math.PI)
 
@@ -258,7 +258,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT int_val&1 FROM numbers WHERE id 
= 15")
 
             checkOptimizationResult(df, "SELECT BITAND(int_val, 1) as 
\"(int_val & CAST(1 AS BIGINT))\" FROM numbers " +
-                "WHERE id IS NOT NULL AND id = 15")
+                "WHERE id = 15")
 
             val data = Tuple1(1)
 
@@ -269,7 +269,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT int_val|1 FROM numbers WHERE id 
= 16")
 
             checkOptimizationResult(df, "SELECT BITOR(int_val, 1) as 
\"(int_val | CAST(1 AS BIGINt))\" FROM numbers " +
-                "WHERE id IS NOT NULL AND id = 16")
+                "WHERE id = 16")
 
             val data = Tuple1(3)
 
@@ -280,7 +280,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT int_val^1 FROM numbers WHERE id 
= 17")
 
             checkOptimizationResult(df, "SELECT BITXOR(int_val, 1) AS 
\"(int_val ^ CAST(1 AS BIGINT))\" FROM numbers " +
-                "WHERE id IS NOT NULL AND id = 17")
+                "WHERE id = 17")
 
             val data = Tuple1(2)
 
@@ -290,7 +290,7 @@ class IgniteOptimizationMathFuncSpec extends 
AbstractDataFrameSpec {
         it("RAND") {
             val df = igniteSession.sql("SELECT id, RAND(1) FROM numbers WHERE 
id = 17")
 
-            checkOptimizationResult(df, "SELECT id, RAND(1) FROM numbers WHERE 
id IS NOT NULL AND id = 17")
+            checkOptimizationResult(df, "SELECT id, RAND(1) FROM numbers WHERE 
id = 17")
 
             val data = df.rdd.collect
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationSpec.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationSpec.scala
 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationSpec.scala
index 128f625..ff367af 100644
--- 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationSpec.scala
+++ 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationSpec.scala
@@ -60,7 +60,7 @@ class IgniteOptimizationSpec extends AbstractDataFrameSpec {
         it("SELECT id, name FROM person WHERE id > 3 ORDER BY id") {
             val df = igniteSession.sql("SELECT id, name FROM person WHERE id > 
3 ORDER BY id")
 
-            checkOptimizationResult(df, "SELECT id, name FROM person WHERE id 
IS NOT NULL AND id > 3 ORDER BY id")
+            checkOptimizationResult(df, "SELECT id, name FROM person WHERE id 
> 3 ORDER BY id")
 
             val data = (
                 (4, "Richard Miles"),
@@ -72,7 +72,7 @@ class IgniteOptimizationSpec extends AbstractDataFrameSpec {
         it("SELECT id, name FROM person WHERE id > 3 ORDER BY id DESC") {
             val df = igniteSession.sql("SELECT id, name FROM person WHERE id > 
3 ORDER BY id DESC")
 
-            checkOptimizationResult(df, "SELECT id, name FROM person WHERE id 
IS NOT NULL AND id > 3 ORDER BY id DESC")
+            checkOptimizationResult(df, "SELECT id, name FROM person WHERE id 
> 3 ORDER BY id DESC")
 
             val data = (
                 (5, null),
@@ -111,7 +111,7 @@ class IgniteOptimizationSpec extends AbstractDataFrameSpec {
         it("SELECT id FROM city HAVING id > 1") {
             val df = igniteSession.sql("SELECT id FROM city HAVING id > 1")
 
-            checkOptimizationResult(df, "SELECT id FROM city WHERE id IS NOT 
NULL AND id > 1")
+            checkOptimizationResult(df, "SELECT id FROM city WHERE id > 1")
 
             val data = (2, 3, 4)
 
@@ -145,7 +145,7 @@ class IgniteOptimizationSpec extends AbstractDataFrameSpec {
         it("SELECT id, name FROM city WHERE id > 1 ORDER BY id") {
             val df = igniteSession.sql("SELECT id, name FROM city WHERE id > 1 
ORDER BY id")
 
-            checkOptimizationResult(df, "SELECT id, name FROM city WHERE id IS 
NOT NULL and id > 1 ORDER BY id")
+            checkOptimizationResult(df, "SELECT id, name FROM city WHERE id > 
1 ORDER BY id")
 
             val data = (
                 (2, "Denver"),

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationStringFuncSpec.scala
----------------------------------------------------------------------
diff --git 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationStringFuncSpec.scala
 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationStringFuncSpec.scala
index db106f2..de94de3 100644
--- 
a/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationStringFuncSpec.scala
+++ 
b/modules/spark/src/test/scala/org/apache/ignite/spark/IgniteOptimizationStringFuncSpec.scala
@@ -60,7 +60,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT LENGTH(str) FROM strings WHERE 
id <= 3")
 
             checkOptimizationResult(df, "SELECT CAST(LENGTH(str) AS INTEGER) 
as \"length(str)\" FROM strings " +
-                "WHERE id is not null AND id <= 3")
+                "WHERE id <= 3")
 
             val data = (3, 3, 6)
 
@@ -70,37 +70,97 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("RTRIM") {
             val df = igniteSession.sql("SELECT RTRIM(str) FROM strings WHERE 
id = 3")
 
-            checkOptimizationResult(df, "SELECT RTRIM(str) FROM strings WHERE 
id is not null AND id = 3")
+            checkOptimizationResult(df, "SELECT RTRIM(str) FROM strings WHERE 
id = 3")
 
             val data = Tuple1("AAA")
 
             checkQueryData(df, data)
         }
 
+        it("RTRIMWithTrimStr") {
+            val df = igniteSession.sql("SELECT RTRIM('B', str) FROM strings 
WHERE id = 9")
+
+            checkOptimizationResult(df, "SELECT RTRIM(str, 'B') FROM strings 
WHERE id = 9")
+
+            val data = Tuple1("BAAA")
+
+            checkQueryData(df, data)
+        }
+
         it("LTRIM") {
             val df = igniteSession.sql("SELECT LTRIM(str) FROM strings WHERE 
id = 4")
 
-            checkOptimizationResult(df, "SELECT LTRIM(str) FROM strings WHERE 
id is not null AND id = 4")
+            checkOptimizationResult(df, "SELECT LTRIM(str) FROM strings WHERE 
id = 4")
 
             val data = Tuple1("AAA")
 
             checkQueryData(df, data)
         }
 
+        it("LTRIMWithTrimStr") {
+            val df = igniteSession.sql("SELECT LTRIM('B', str) FROM strings 
WHERE id = 9")
+
+            checkOptimizationResult(df, "SELECT LTRIM(str, 'B') FROM strings 
WHERE id = 9")
+
+            val data = Tuple1("AAAB")
+
+            checkQueryData(df, data)
+        }
+
         it("TRIM") {
             val df = igniteSession.sql("SELECT TRIM(str) FROM strings WHERE id 
= 5")
 
-            checkOptimizationResult(df, "SELECT TRIM(str) FROM strings WHERE 
id is not null AND id = 5")
+            checkOptimizationResult(df, "SELECT TRIM(str) FROM strings WHERE 
id = 5")
 
             val data = Tuple1("AAA")
 
             checkQueryData(df, data)
         }
 
+               it("TRIMWithTrimStr") {
+                       val df = igniteSession.sql("SELECT TRIM('B', str) FROM 
strings WHERE id = 9")
+
+                       checkOptimizationResult(df, "SELECT TRIM(str, 'B') FROM 
strings WHERE id = 9")
+
+                       val data = Tuple1("AAA")
+
+                       checkQueryData(df, data)
+               }
+
+               it("TRIMWithTrimStrBOTH") {
+                       val df = igniteSession.sql("SELECT TRIM(BOTH 'B' FROM 
str) FROM strings WHERE id = 9")
+
+                       checkOptimizationResult(df, "SELECT TRIM(str, 'B') FROM 
strings WHERE id = 9")
+
+                       val data = Tuple1("AAA")
+
+                       checkQueryData(df, data)
+               }
+
+               it("TRIMWithTrimStrLEADING") {
+                       val df = igniteSession.sql("SELECT TRIM(LEADING 'B' 
FROM str) FROM strings WHERE id = 9")
+
+                       checkOptimizationResult(df, "SELECT LTRIM(str, 'B') 
FROM strings WHERE id = 9")
+
+                       val data = Tuple1("AAAB")
+
+                       checkQueryData(df, data)
+               }
+
+               it("TRIMWithTrimStrTRAILING") {
+                       val df = igniteSession.sql("SELECT TRIM(TRAILING 'B' 
FROM str) FROM strings WHERE id = 9")
+
+                       checkOptimizationResult(df, "SELECT RTRIM(str, 'B') 
FROM strings WHERE id = 9")
+
+                       val data = Tuple1("BAAA")
+
+                       checkQueryData(df, data)
+               }
+
         it("LOWER") {
             val df = igniteSession.sql("SELECT LOWER(str) FROM strings WHERE 
id = 2")
 
-            checkOptimizationResult(df, "SELECT LOWER(str) FROM strings WHERE 
id is not null AND id = 2")
+            checkOptimizationResult(df, "SELECT LOWER(str) FROM strings WHERE 
id = 2")
 
             val data = Tuple1("aaa")
 
@@ -110,7 +170,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("UPPER") {
             val df = igniteSession.sql("SELECT UPPER(str) FROM strings WHERE 
id = 1")
 
-            checkOptimizationResult(df, "SELECT UPPER(str) FROM strings WHERE 
id is not null AND id = 1")
+            checkOptimizationResult(df, "SELECT UPPER(str) FROM strings WHERE 
id = 1")
 
             val data = Tuple1("AAA")
 
@@ -120,7 +180,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("LOWER(RTRIM)") {
             val df = igniteSession.sql("SELECT LOWER(RTRIM(str)) FROM strings 
WHERE id = 3")
 
-            checkOptimizationResult(df, "SELECT LOWER(RTRIM(str)) FROM strings 
WHERE id is not null AND id = 3")
+            checkOptimizationResult(df, "SELECT LOWER(RTRIM(str)) FROM strings 
WHERE id = 3")
 
             val data = Tuple1("aaa")
 
@@ -130,7 +190,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("LOCATE") {
             val df = igniteSession.sql("SELECT LOCATE('D', str) FROM strings 
WHERE id = 6")
 
-            checkOptimizationResult(df, "SELECT LOCATE('D', str, 1) FROM 
strings WHERE id is not null AND id = 6")
+            checkOptimizationResult(df, "SELECT LOCATE('D', str, 1) FROM 
strings WHERE id = 6")
 
             val data = Tuple1(4)
 
@@ -140,7 +200,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("LOCATE - 2") {
             val df = igniteSession.sql("SELECT LOCATE('A', str) FROM strings 
WHERE id = 6")
 
-            checkOptimizationResult(df, "SELECT LOCATE('A', str, 1) FROM 
strings WHERE id is not null AND id = 6")
+            checkOptimizationResult(df, "SELECT LOCATE('A', str, 1) FROM 
strings WHERE id = 6")
 
             val data = Tuple1(1)
 
@@ -151,7 +211,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT instr(str, 'BCD') FROM strings 
WHERE id = 6")
 
             checkOptimizationResult(df, "SELECT POSITION('BCD', str) as 
\"instr(str, BCD)\" FROM strings " +
-                "WHERE id is not null AND id = 6")
+                "WHERE id = 6")
 
             val data = Tuple1(2)
 
@@ -161,7 +221,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("CONCAT") {
             val df = igniteSession.sql("SELECT concat(str, 'XXX') FROM strings 
WHERE id = 6")
 
-            checkOptimizationResult(df, "SELECT concat(str, 'XXX') FROM 
strings WHERE id is not null AND id = 6")
+            checkOptimizationResult(df, "SELECT concat(str, 'XXX') FROM 
strings WHERE id = 6")
 
             val data = Tuple1("ABCDEFXXX")
 
@@ -171,7 +231,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("RPAD") {
             val df = igniteSession.sql("SELECT RPAD(str, 10, 'X') FROM strings 
WHERE id = 6")
 
-            checkOptimizationResult(df, "SELECT RPAD(str, 10, 'X') FROM 
strings WHERE id is not null AND id = 6")
+            checkOptimizationResult(df, "SELECT RPAD(str, 10, 'X') FROM 
strings WHERE id = 6")
 
             val data = Tuple1("ABCDEFXXXX")
 
@@ -181,7 +241,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("LPAD") {
             val df = igniteSession.sql("SELECT LPAD(str, 10, 'X') FROM strings 
WHERE id = 6")
 
-            checkOptimizationResult(df, "SELECT LPAD(str, 10, 'X') FROM 
strings WHERE id is not null AND id = 6")
+            checkOptimizationResult(df, "SELECT LPAD(str, 10, 'X') FROM 
strings WHERE id = 6")
 
             val data = Tuple1("XXXXABCDEF")
 
@@ -191,7 +251,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("REPEAT") {
             val df = igniteSession.sql("SELECT REPEAT(str, 2) FROM strings 
WHERE id = 6")
 
-            checkOptimizationResult(df, "SELECT REPEAT(str, 2) FROM strings 
WHERE id is not null AND id = 6")
+            checkOptimizationResult(df, "SELECT REPEAT(str, 2) FROM strings 
WHERE id = 6")
 
             val data = Tuple1("ABCDEFABCDEF")
 
@@ -202,7 +262,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT SUBSTRING(str, 4, 3) FROM 
strings WHERE id = 6")
 
             checkOptimizationResult(df, "SELECT SUBSTR(str, 4, 3) as 
\"SUBSTRING(str, 4, 3)\" FROM strings " +
-                "WHERE id is not null AND id = 6")
+                "WHERE id = 6")
 
             val data = Tuple1("DEF")
 
@@ -213,7 +273,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT SPACE(LENGTH(str)) FROM strings 
WHERE id = 1")
 
             checkOptimizationResult(df, "SELECT SPACE(CAST(LENGTH(str) AS 
INTEGER)) as \"SPACE(LENGTH(str))\" " +
-                "FROM strings WHERE id is not null AND id = 1")
+                "FROM strings WHERE id = 1")
 
             val data = Tuple1("   ")
 
@@ -223,7 +283,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         it("ASCII") {
             val df = igniteSession.sql("SELECT ASCII(str) FROM strings WHERE 
id = 7")
 
-            checkOptimizationResult(df, "SELECT ASCII(str) FROM strings WHERE 
id is not null AND id = 7")
+            checkOptimizationResult(df, "SELECT ASCII(str) FROM strings WHERE 
id = 7")
 
             val data = Tuple1(50)
 
@@ -234,7 +294,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT REGEXP_REPLACE(str, '(\\\\d+)', 
'num') FROM strings WHERE id = 7")
 
             checkOptimizationResult(df, "SELECT REGEXP_REPLACE(str, '(\\d+)', 
'num') FROM strings " +
-                "WHERE id is not null AND id = 7")
+                "WHERE id = 7")
 
             val data = Tuple1("num")
 
@@ -246,7 +306,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
                 "WHERE id >= 7 AND id <= 8")
 
             checkOptimizationResult(df, "SELECT id, CONCAT_WS(', ', str, 
'after') FROM strings " +
-                "WHERE id is not null AND id >= 7 AND id <= 8")
+                "WHERE id >= 7 AND id <= 8")
 
             val data = (
                 (7, "222, after"),
@@ -259,7 +319,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
             val df = igniteSession.sql("SELECT id, TRANSLATE(str, 'DEF', 
'ABC') FROM strings WHERE id = 6")
 
             checkOptimizationResult(df, "SELECT id, TRANSLATE(str, 'DEF', 
'ABC') FROM strings " +
-                "WHERE id is not null AND id = 6")
+                "WHERE id = 6")
 
             val data = Tuple1((6, "ABCABC"))
 
@@ -288,6 +348,7 @@ class IgniteOptimizationStringFuncSpec extends 
AbstractDataFrameSpec {
         cache.query(qry.setArgs(6L.asInstanceOf[JLong], "ABCDEF")).getAll
         cache.query(qry.setArgs(7L.asInstanceOf[JLong], "222")).getAll
         cache.query(qry.setArgs(8L.asInstanceOf[JLong], null)).getAll
+        cache.query(qry.setArgs(9L.asInstanceOf[JLong], "BAAAB")).getAll
     }
 
     override protected def beforeAll(): Unit = {

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/parent/pom.xml
----------------------------------------------------------------------
diff --git a/parent/pom.xml b/parent/pom.xml
index 0e54733..1dea4be 100644
--- a/parent/pom.xml
+++ b/parent/pom.xml
@@ -111,7 +111,7 @@
         <slf4j.version>1.7.7</slf4j.version>
         <slf4j16.version>1.6.4</slf4j16.version>
         <spark.hadoop.version>2.6.5</spark.hadoop.version>
-        <spark.version>2.2.0</spark.version>
+        <spark.version>2.3.0</spark.version>
         <spring.data.version>1.13.11.RELEASE</spring.data.version> <!-- don't 
forget to update spring version -->
         <spring.version>4.3.7.RELEASE</spring.version><!-- don't forget to 
update spring-data version -->
         <spring.data-2.0.version>2.0.6.RELEASE</spring.data-2.0.version> <!-- 
don't forget to update spring-5.0 version -->

http://git-wip-us.apache.org/repos/asf/ignite/blob/f7901651/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 14a0ebe..81410b0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -122,7 +122,6 @@
             <modules><!-- sorted alphabetically -->
                 <module>modules/scalar-2.10</module>
                 <module>modules/scalar</module>
-                <module>modules/spark-2.10</module>
                 <module>modules/spark</module>
                 <module>modules/visor-console-2.10</module>
                 <module>modules/visor-console</module>
@@ -555,7 +554,6 @@
 
             <modules>
                 <module>modules/scalar-2.10</module>
-                <module>modules/spark-2.10</module>
                 <module>modules/visor-console-2.10</module>
                 <module>modules/visor-plugins</module>
             </modules>

Reply via email to