This is an automated email from the ASF dual-hosted git repository.

ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark-connect-go.git


The following commit(s) were added to refs/heads/master by this push:
     new a1b4f12  [SPARK-48777][BUILD] Properly lint, vet and check for license 
headers. (#32)
a1b4f12 is described below

commit a1b4f12ed6b2dedf0d752118b0ed9f96f5a3fa2c
Author: Martin Grund <martin.gr...@databricks.com>
AuthorDate: Tue Jul 9 04:31:36 2024 +0200

    [SPARK-48777][BUILD] Properly lint, vet and check for license headers. (#32)
    
    * [SPARK-48777][BUILD] Making sure that style, format, and license headers 
are present
    
    * add wf
    
    * adding missing files
    
    * comments
---
 .github/workflows/build.yml                       | 14 +++-
 .gitignore                                        |  5 +-
 .gitignore => .golangci.yml                       | 15 +---
 CONTRIBUTING.md                                   | 18 ++++-
 Makefile                                          |  9 ++-
 cmd/spark-connect-example-raw-grpc-client/main.go |  3 +-
 cmd/spark-connect-example-spark-session/main.go   |  6 +-
 dev/.rat-excludes                                 | 15 ++++
 dev/check-license                                 | 86 +++++++++++++++++++++++
 spark/client/channel/channel.go                   |  8 +--
 spark/client/channel/channel_test.go              |  4 +-
 spark/client/channel/compat.go                    | 15 ++++
 spark/mocks/mocks.go                              | 16 +++++
 spark/sparkerrors/errors_test.go                  | 15 ++++
 spark/sql/dataframe.go                            |  3 +-
 spark/sql/dataframe_test.go                       |  6 +-
 spark/sql/dataframereader.go                      | 16 +++++
 spark/sql/dataframereader_test.go                 | 15 ++++
 spark/sql/dataframewriter.go                      | 16 +++++
 spark/sql/dataframewriter_test.go                 | 15 ++++
 spark/sql/executeplanclient.go                    | 16 +++++
 spark/sql/mocks_test.go                           | 16 +++++
 spark/sql/plan_test.go                            | 16 +++++
 spark/sql/row_test.go                             | 16 +++++
 spark/sql/session/sparksession_test.go            | 30 ++++++--
 spark/sql/utils/check.go                          | 23 ++++++
 26 files changed, 380 insertions(+), 37 deletions(-)

diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index dc0eade..877d768 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -31,6 +31,14 @@ on:
     branches:
       - master
 
+permissions:
+  # Required: allow read access to the content for analysis.
+  contents: read
+  # Optional: allow read access to pull request. Use with `only-new-issues` 
option.
+  pull-requests: read
+  # Optional: allow write access to checks to allow the action to annotate 
code in the PR.
+  checks: write
+
 
 jobs:
   build:
@@ -59,4 +67,8 @@ jobs:
           go mod download -x
           make gen
           make
-          make test
\ No newline at end of file
+          make test
+      - name: golangci-lint
+        uses: golangci/golangci-lint-action@v6
+        with:
+          version: v1.59
diff --git a/.gitignore b/.gitignore
index e76d6f0..8381e8d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -26,4 +26,7 @@ coverage*
 
 # Ignore binaries
 cmd/spark-connect-example-raw-grpc-client/spark-connect-example-raw-grpc-client
-cmd/spark-connect-example-spark-session/spark-connect-example-spark-session
\ No newline at end of file
+cmd/spark-connect-example-spark-session/spark-connect-example-spark-session
+
+target
+lib
\ No newline at end of file
diff --git a/.gitignore b/.golangci.yml
similarity index 73%
copy from .gitignore
copy to .golangci.yml
index e76d6f0..05a64f5 100644
--- a/.gitignore
+++ b/.golangci.yml
@@ -15,15 +15,6 @@
 # limitations under the License.
 #
 
-# All generated files
-internal/generated.out
-
-# Ignore Coverage Files
-coverage*
-
-# Ignore IDE files
-.idea/
-
-# Ignore binaries
-cmd/spark-connect-example-raw-grpc-client/spark-connect-example-raw-grpc-client
-cmd/spark-connect-example-spark-session/spark-connect-example-spark-session
\ No newline at end of file
+linters:
+  enable:
+    - gofumpt
\ No newline at end of file
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 4e5a578..995f799 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -13,4 +13,20 @@ When you contribute code, you affirm that the contribution 
is your original work
 license the work to the project under the project's open source license. 
Whether or not you
 state this explicitly, by submitting any copyrighted material via pull 
request, email, or
 other means you agree to license the material under the project's open source 
license and
-warrant that you have the legal authority to do so.
\ No newline at end of file
+warrant that you have the legal authority to do so.
+
+
+### Code Style and Checks
+
+When submitting code we use a number of checks in our continous integration 
system to ensure
+a consitent style and adherence to license rules. You can run these checks 
locally by running:
+
+```bash
+make check
+```
+
+This requires the following tools to be present in your PATH:
+
+1. Java for checking license headers
+2. `gofumpt` for formatting Go code
+3. `golangci-lint` for linting Go code
\ No newline at end of file
diff --git a/Makefile b/Makefile
index 01fd6fb..055544e 100644
--- a/Makefile
+++ b/Makefile
@@ -79,7 +79,7 @@ lint: $(BUILD_OUTPUT)
        @golangci-lint run
 
 fmt:
-       @gofumpt -extra -w $(ALLGOFILES)
+       @gofumpt -l -w $(ALLGOFILES)
 
 test: $(BUILD_OUTPUT)
        @echo ">> TEST, \"verbose\""
@@ -97,6 +97,13 @@ fulltest: $(BUILD_OUTPUT)
        @$(GO) tool cover -html=coverage-all.out -o coverage-all.html
 
 
+check:
+       @echo -n ">> CHECK"
+       ./dev/check-license
+       @echo -n ">> glongci-lint: "
+       golangci-lint run
+
+
 clean:
        @echo -n ">> CLEAN"
        @$(GO) clean -i ./...
diff --git a/cmd/spark-connect-example-raw-grpc-client/main.go 
b/cmd/spark-connect-example-raw-grpc-client/main.go
index f85316b..08c22a4 100644
--- a/cmd/spark-connect-example-raw-grpc-client/main.go
+++ b/cmd/spark-connect-example-raw-grpc-client/main.go
@@ -31,12 +31,11 @@ import (
 var remote = flag.String("remote", "localhost:15002", "the remote address of 
Spark Connect server to connect to")
 
 func main() {
-       ctx := context.Background()
        opts := []grpc.DialOption{
                grpc.WithTransportCredentials(insecure.NewCredentials()),
        }
 
-       conn, err := grpc.DialContext(ctx, *remote, opts...)
+       conn, err := grpc.NewClient(*remote, opts...)
        if err != nil {
                log.Fatalf("Failed: %s", err)
        }
diff --git a/cmd/spark-connect-example-spark-session/main.go 
b/cmd/spark-connect-example-spark-session/main.go
index a9c17b0..69934de 100644
--- a/cmd/spark-connect-example-spark-session/main.go
+++ b/cmd/spark-connect-example-spark-session/main.go
@@ -21,9 +21,9 @@ import (
        "flag"
        "log"
 
-       "github.com/apache/spark-connect-go/v35/spark/sql/session"
-
        "github.com/apache/spark-connect-go/v35/spark/sql"
+       "github.com/apache/spark-connect-go/v35/spark/sql/session"
+       "github.com/apache/spark-connect-go/v35/spark/sql/utils"
 )
 
 var remote = flag.String("remote", "sc://localhost:15002",
@@ -36,7 +36,7 @@ func main() {
        if err != nil {
                log.Fatalf("Failed: %s", err)
        }
-       defer spark.Stop()
+       defer utils.WarnOnError(spark.Stop, func(err error) {})
 
        df, err := spark.Sql(ctx, "select 'apple' as word, 123 as count union 
all select 'orange' as word, 456 as count")
        if err != nil {
diff --git a/dev/.rat-excludes b/dev/.rat-excludes
new file mode 100644
index 0000000..2021b52
--- /dev/null
+++ b/dev/.rat-excludes
@@ -0,0 +1,15 @@
+.gitignore
+.gitmodules
+.gitattributes
+.project
+coverage*
+LICENSE
+NOTICE
+TAGS
+RELEASE
+.*md
+.rat-excludes
+sparksrc
+target
+generated.out
+go.sum
\ No newline at end of file
diff --git a/dev/check-license b/dev/check-license
new file mode 100755
index 0000000..bc7f493
--- /dev/null
+++ b/dev/check-license
@@ -0,0 +1,86 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+acquire_rat_jar () {
+
+  
URL="${DEFAULT_ARTIFACT_REPOSITORY:-https://repo1.maven.org/maven2/}org/apache/rat/apache-rat/${RAT_VERSION}/apache-rat-${RAT_VERSION}.jar";
+
+  JAR="$rat_jar"
+
+  # Download rat launch jar if it hasn't been downloaded yet
+  if [ ! -f "$JAR" ]; then
+    # Download
+    printf "Attempting to fetch rat\n"
+    JAR_DL="${JAR}.part"
+    if [ $(command -v curl) ]; then
+      curl -L --silent "${URL}" > "$JAR_DL" && mv "$JAR_DL" "$JAR"
+    elif [ $(command -v wget) ]; then
+      wget --quiet ${URL} -O "$JAR_DL" && mv "$JAR_DL" "$JAR"
+    else
+      printf "You do not have curl or wget installed, please install rat 
manually.\n"
+      exit -1
+    fi
+  fi
+
+  unzip -tq "$JAR" &> /dev/null
+  if [ $? -ne 0 ]; then 
+    # We failed to download
+    rm "$JAR"
+    printf "Our attempt to download rat locally to ${JAR} failed. Please 
install rat manually.\n"
+    exit -1
+  fi
+}
+
+# Go to the Spark project root directory
+FWDIR="$(cd "`dirname "$0"`"/..; pwd)"
+cd "$FWDIR"
+
+if test -x "$JAVA_HOME/bin/java"; then
+    declare java_cmd="$JAVA_HOME/bin/java"
+else
+    declare java_cmd=java
+fi
+
+export RAT_VERSION=0.15
+export rat_jar="$FWDIR"/lib/apache-rat-${RAT_VERSION}.jar
+mkdir -p "$FWDIR"/lib
+
+[[ -f "$rat_jar" ]] || acquire_rat_jar || {
+    echo "Download failed. Obtain the rat jar manually and place it at 
$rat_jar"
+    exit 1
+}
+
+mkdir -p target
+$java_cmd -jar "$rat_jar" -E "$FWDIR"/dev/.rat-excludes -d "$FWDIR" > 
target/rat-results.txt
+
+if [ $? -ne 0 ]; then
+   echo "RAT exited abnormally"
+   exit 1
+fi
+
+ERRORS="$(cat target/rat-results.txt | grep -e "??")"
+
+if test ! -z "$ERRORS"; then 
+    echo "Could not find Apache license headers in the following files:"
+    echo "$ERRORS"
+    exit 1
+else 
+    echo -e "RAT checks passed."
+fi
diff --git a/spark/client/channel/channel.go b/spark/client/channel/channel.go
index 94de111..d0b0394 100644
--- a/spark/client/channel/channel.go
+++ b/spark/client/channel/channel.go
@@ -36,9 +36,6 @@ import (
        "google.golang.org/grpc/credentials/oauth"
 )
 
-// Reserved header parameters that must not be injected as variables.
-var reservedParams = []string{"user_id", "token", "use_ssl"}
-
 // Builder is the interface that is used to implement different patterns that
 // create the GRPC connection.
 //
@@ -113,7 +110,7 @@ func (cb *BaseBuilder) Build(ctx context.Context) 
(*grpc.ClientConn, error) {
        }
 
        remote := fmt.Sprintf("%v:%v", cb.host, cb.port)
-       conn, err := grpc.DialContext(ctx, remote, opts...)
+       conn, err := grpc.NewClient(remote, opts...)
        if err != nil {
                return nil, sparkerrors.WithType(fmt.Errorf("failed to connect 
to remote %s: %w", remote, err), sparkerrors.ConnectionError)
        }
@@ -122,6 +119,9 @@ func (cb *BaseBuilder) Build(ctx context.Context) 
(*grpc.ClientConn, error) {
 
 // NewBuilder creates a new instance of the BaseBuilder. This constructor 
effectively
 // parses the connection string and extracts the relevant parameters directly.
+//
+// The following parameters to the connection string are reserved: user_id, 
session_id, use_ssl,
+// and token. These parameters are not allowed to be injected as headers.
 func NewBuilder(connection string) (Builder, error) {
        u, err := url.Parse(connection)
        if err != nil {
diff --git a/spark/client/channel/channel_test.go 
b/spark/client/channel/channel_test.go
index f60b813..f678f7a 100644
--- a/spark/client/channel/channel_test.go
+++ b/spark/client/channel/channel_test.go
@@ -22,7 +22,6 @@ import (
        "testing"
 
        "github.com/apache/spark-connect-go/v35/spark/client/channel"
-
        "github.com/apache/spark-connect-go/v35/spark/sparkerrors"
        "github.com/stretchr/testify/assert"
 )
@@ -58,6 +57,7 @@ func TestBasicChannelParsing(t *testing.T) {
        assert.ErrorIs(t, err, sparkerrors.InvalidInputError)
 
        cb, err = channel.NewBuilder(goodChannelURL)
+       assert.Nilf(t, err, "Should not have an error for a proper URL")
        assert.Equal(t, "host", cb.Host())
        assert.Equal(t, 15002, cb.Port())
        assert.Len(t, cb.Headers(), 1)
@@ -76,7 +76,7 @@ func TestBasicChannelParsing(t *testing.T) {
 func TestChannelBuildConnect(t *testing.T) {
        ctx := context.Background()
        cb, err := channel.NewBuilder("sc://localhost")
-       assert.Nil(t, err, "Should not have an error for a proper URL.")
+       assert.NoError(t, err, "Should not have an error for a proper URL.")
        conn, err := cb.Build(ctx)
        assert.Nil(t, err, "no error for proper connection")
        assert.NotNil(t, conn)
diff --git a/spark/client/channel/compat.go b/spark/client/channel/compat.go
index 3b4a735..6327108 100644
--- a/spark/client/channel/compat.go
+++ b/spark/client/channel/compat.go
@@ -1,3 +1,18 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package channel
 
 // ChannelBuilder re-exports BaseBuilder as its previous name for 
compatibility.
diff --git a/spark/mocks/mocks.go b/spark/mocks/mocks.go
index 90662aa..aace770 100644
--- a/spark/mocks/mocks.go
+++ b/spark/mocks/mocks.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package mocks
 
 import (
diff --git a/spark/sparkerrors/errors_test.go b/spark/sparkerrors/errors_test.go
index f5857ec..d12a1fb 100644
--- a/spark/sparkerrors/errors_test.go
+++ b/spark/sparkerrors/errors_test.go
@@ -1,3 +1,18 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sparkerrors
 
 import (
diff --git a/spark/sql/dataframe.go b/spark/sql/dataframe.go
index ac7473e..3563d5f 100644
--- a/spark/sql/dataframe.go
+++ b/spark/sql/dataframe.go
@@ -78,8 +78,7 @@ func NewDataFrame(sparkExecutor SparkExecutor, relation 
*proto.Relation) DataFra
        }
 }
 
-type consoleCollector struct {
-}
+type consoleCollector struct{}
 
 func (c consoleCollector) WriteRow(values []any) {
        fmt.Println(values...)
diff --git a/spark/sql/dataframe_test.go b/spark/sql/dataframe_test.go
index cccd703..d68143f 100644
--- a/spark/sql/dataframe_test.go
+++ b/spark/sql/dataframe_test.go
@@ -198,14 +198,16 @@ func TestReadArrowRecord(t *testing.T) {
                float16.New(10000.1), float32(100000.1), 1000000.1,
                decimal128.FromI64(10000000), decimal256.FromI64(100000000),
                "str1", []byte("bytes1"),
-               arrow.Timestamp(1686981953115000), 
arrow.Date64(1686981953117000)},
+               arrow.Timestamp(1686981953115000), 
arrow.Date64(1686981953117000),
+       },
                values[0])
        assert.Equal(t, []any{
                true, int8(2), int16(20), int32(200), int64(2000),
                float16.New(20000.1), float32(200000.1), 2000000.1,
                decimal128.FromI64(20000000), decimal256.FromI64(200000000),
                "str2", []byte("bytes2"),
-               arrow.Timestamp(1686981953116000), 
arrow.Date64(1686981953118000)},
+               arrow.Timestamp(1686981953116000), 
arrow.Date64(1686981953118000),
+       },
                values[1])
 }
 
diff --git a/spark/sql/dataframereader.go b/spark/sql/dataframereader.go
index 17cce98..df73aaf 100644
--- a/spark/sql/dataframereader.go
+++ b/spark/sql/dataframereader.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import proto "github.com/apache/spark-connect-go/v35/internal/generated"
diff --git a/spark/sql/dataframereader_test.go 
b/spark/sql/dataframereader_test.go
index f52ac55..572df1d 100644
--- a/spark/sql/dataframereader_test.go
+++ b/spark/sql/dataframereader_test.go
@@ -1,3 +1,18 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import (
diff --git a/spark/sql/dataframewriter.go b/spark/sql/dataframewriter.go
index 4c99788..f380cee 100644
--- a/spark/sql/dataframewriter.go
+++ b/spark/sql/dataframewriter.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import (
diff --git a/spark/sql/dataframewriter_test.go 
b/spark/sql/dataframewriter_test.go
index e886f6d..c9bc6bb 100644
--- a/spark/sql/dataframewriter_test.go
+++ b/spark/sql/dataframewriter_test.go
@@ -1,3 +1,18 @@
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import (
diff --git a/spark/sql/executeplanclient.go b/spark/sql/executeplanclient.go
index e79b018..81123b0 100644
--- a/spark/sql/executeplanclient.go
+++ b/spark/sql/executeplanclient.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import (
diff --git a/spark/sql/mocks_test.go b/spark/sql/mocks_test.go
index ded00e0..06f51dd 100644
--- a/spark/sql/mocks_test.go
+++ b/spark/sql/mocks_test.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import (
diff --git a/spark/sql/plan_test.go b/spark/sql/plan_test.go
index c733862..884df12 100644
--- a/spark/sql/plan_test.go
+++ b/spark/sql/plan_test.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import (
diff --git a/spark/sql/row_test.go b/spark/sql/row_test.go
index 7ae4f97..5789d07 100644
--- a/spark/sql/row_test.go
+++ b/spark/sql/row_test.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package sql
 
 import (
diff --git a/spark/sql/session/sparksession_test.go 
b/spark/sql/session/sparksession_test.go
index 002c030..7ab57ea 100644
--- a/spark/sql/session/sparksession_test.go
+++ b/spark/sql/session/sparksession_test.go
@@ -1,3 +1,19 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
 package session
 
 import (
@@ -133,13 +149,15 @@ func TestSQLCallsExecutePlanWithSQLOnClient(t *testing.T) 
{
        }
        session := &sparkSessionImpl{
                client: &connectServiceClient{
-                       executePlanClient: 
&sql.ExecutePlanClient{&mocks.ProtoClient{
-                               RecvResponse: &proto.ExecutePlanResponse{
-                                       ResponseType: 
&proto.ExecutePlanResponse_SqlCommandResult_{
-                                               SqlCommandResult: 
&proto.ExecutePlanResponse_SqlCommandResult{},
+                       executePlanClient: &sql.ExecutePlanClient{
+                               SparkConnectService_ExecutePlanClient: 
&mocks.ProtoClient{
+                                       RecvResponse: 
&proto.ExecutePlanResponse{
+                                               ResponseType: 
&proto.ExecutePlanResponse_SqlCommandResult_{
+                                                       SqlCommandResult: 
&proto.ExecutePlanResponse_SqlCommandResult{},
+                                               },
                                        },
                                },
-                       }},
+                       },
                        expectedExecutePlanRequest: request,
                        t:                          t,
                },
@@ -196,7 +214,7 @@ func TestWriteResultStreamsArrowResultToCollector(t 
*testing.T) {
        session := &sparkSessionImpl{
                client: &connectServiceClient{
                        executePlanClient: &sql.ExecutePlanClient{
-                               &mocks.ProtoClient{
+                               SparkConnectService_ExecutePlanClient: 
&mocks.ProtoClient{
                                        RecvResponses: 
[]*proto.ExecutePlanResponse{
                                                {
                                                        ResponseType: 
&proto.ExecutePlanResponse_SqlCommandResult_{
diff --git a/spark/sql/utils/check.go b/spark/sql/utils/check.go
new file mode 100644
index 0000000..3482082
--- /dev/null
+++ b/spark/sql/utils/check.go
@@ -0,0 +1,23 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package utils
+
+func WarnOnError(f func() error, h func(e error)) {
+       if err := f(); err != nil {
+               h(err)
+       }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to