This is an automated email from the ASF dual-hosted git repository.

huaxingao pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/main by this push:
     new 9c3bed6a65 Docs: Fix MERGE INTO example in Getting Started (#14943)
9c3bed6a65 is described below

commit 9c3bed6a651f20fc06b1a4c329e40910bd6eb36a
Author: Varun Lakhyani <[email protected]>
AuthorDate: Wed Dec 31 01:46:19 2025 +0530

    Docs: Fix MERGE INTO example in Getting Started (#14943)
    
    * Docs: Fix MERGE INTO example in Getting Started
    
    * Docs: Fix Getting Started Spark SQL example to be runnable
    
    * Apply same documentation fix to Spark 3.4, 4.0, and 4.1 versions
    
    * Retry CI as earlier gave 429
---
 docs/docs/spark-getting-started.md                                | 8 ++++++--
 .../integration/java/org/apache/iceberg/spark/TestRoundTrip.java  | 2 --
 .../integration/java/org/apache/iceberg/spark/TestRoundTrip.java  | 2 --
 .../integration/java/org/apache/iceberg/spark/TestRoundTrip.java  | 2 --
 .../integration/java/org/apache/iceberg/spark/TestRoundTrip.java  | 2 --
 5 files changed, 6 insertions(+), 10 deletions(-)

diff --git a/docs/docs/spark-getting-started.md 
b/docs/docs/spark-getting-started.md
index 6813c76937..821bfd022c 100644
--- a/docs/docs/spark-getting-started.md
+++ b/docs/docs/spark-getting-started.md
@@ -61,6 +61,8 @@ To create your first Iceberg table in Spark, use the 
`spark-sql` shell or `spark
 ```sql
 -- local is the path-based catalog defined above
 CREATE TABLE local.db.table (id bigint, data string) USING iceberg;
+CREATE TABLE source (id bigint, data string) USING parquet;
+CREATE TABLE updates (id bigint, data string) USING parquet;
 ```
 
 Iceberg catalogs support the full range of SQL DDL commands, including:
@@ -76,14 +78,16 @@ Once your table is created, insert data using [`INSERT 
INTO`](spark-writes.md#in
 
 ```sql
 INSERT INTO local.db.table VALUES (1, 'a'), (2, 'b'), (3, 'c');
+INSERT INTO source VALUES (10, 'd'), (11, 'ee');
+INSERT INTO updates VALUES (1, 'x'), (2, 'x'), (4, 'z');
 INSERT INTO local.db.table SELECT id, data FROM source WHERE length(data) = 1;
 ```
 
 Iceberg also adds row-level SQL updates to Spark, [`MERGE 
INTO`](spark-writes.md#merge-into) and [`DELETE 
FROM`](spark-writes.md#delete-from):
 
 ```sql
-MERGE INTO local.db.target t USING (SELECT * FROM updates) u ON t.id = u.id
-WHEN MATCHED THEN UPDATE SET t.count = t.count + u.count
+MERGE INTO local.db.table t USING (SELECT * FROM updates) u ON t.id = u.id
+WHEN MATCHED THEN UPDATE SET t.data = u.data
 WHEN NOT MATCHED THEN INSERT *;
 ```
 
diff --git 
a/spark/v3.4/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
 
b/spark/v3.4/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
index 29f725615a..709f626b08 100644
--- 
a/spark/v3.4/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
+++ 
b/spark/v3.4/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
@@ -38,8 +38,6 @@ public class TestRoundTrip extends ExtensionsTestBase {
   }
 
   // Run through our Doc's Getting Started Example
-  // TODO Update doc example so that it can actually be run, modifications 
were required for this
-  // test suite to run
   @TestTemplate
   public void testGettingStarted() throws IOException {
     // Creating a table
diff --git 
a/spark/v3.5/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
 
b/spark/v3.5/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
index 29f725615a..709f626b08 100644
--- 
a/spark/v3.5/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
+++ 
b/spark/v3.5/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
@@ -38,8 +38,6 @@ public class TestRoundTrip extends ExtensionsTestBase {
   }
 
   // Run through our Doc's Getting Started Example
-  // TODO Update doc example so that it can actually be run, modifications 
were required for this
-  // test suite to run
   @TestTemplate
   public void testGettingStarted() throws IOException {
     // Creating a table
diff --git 
a/spark/v4.0/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
 
b/spark/v4.0/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
index 29f725615a..709f626b08 100644
--- 
a/spark/v4.0/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
+++ 
b/spark/v4.0/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
@@ -38,8 +38,6 @@ public class TestRoundTrip extends ExtensionsTestBase {
   }
 
   // Run through our Doc's Getting Started Example
-  // TODO Update doc example so that it can actually be run, modifications 
were required for this
-  // test suite to run
   @TestTemplate
   public void testGettingStarted() throws IOException {
     // Creating a table
diff --git 
a/spark/v4.1/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
 
b/spark/v4.1/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
index 29f725615a..709f626b08 100644
--- 
a/spark/v4.1/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
+++ 
b/spark/v4.1/spark-runtime/src/integration/java/org/apache/iceberg/spark/TestRoundTrip.java
@@ -38,8 +38,6 @@ public class TestRoundTrip extends ExtensionsTestBase {
   }
 
   // Run through our Doc's Getting Started Example
-  // TODO Update doc example so that it can actually be run, modifications 
were required for this
-  // test suite to run
   @TestTemplate
   public void testGettingStarted() throws IOException {
     // Creating a table

Reply via email to