JonasJ-ap commented on code in PR #6449:
URL: https://github.com/apache/iceberg/pull/6449#discussion_r1068877654


##########
build.gradle:
##########
@@ -438,6 +442,76 @@ project(':iceberg-aws') {
   }
 }
 
+project(':iceberg-delta-lake') {
+  // use integration test since we can take advantages of spark 3.3 to read 
datafiles of delta lake table
+  // and create some tests involving sql query.
+  configurations {
+    integrationImplementation.extendsFrom testImplementation
+    integrationRuntime.extendsFrom testRuntimeOnly
+  }
+
+  dependencies {
+    implementation project(path: ':iceberg-bundled-guava', configuration: 
'shadow')
+    api project(':iceberg-api')
+    implementation project(':iceberg-common')
+    implementation project(':iceberg-core')
+    implementation project(':iceberg-parquet')
+    implementation project(':iceberg-orc')
+    implementation "com.fasterxml.jackson.core:jackson-databind"
+
+    compileOnly "io.delta:delta-standalone_${scalaVersion}"
+
+    compileOnly("org.apache.hadoop:hadoop-common") {
+      exclude group: 'org.apache.avro', module: 'avro'
+      exclude group: 'org.slf4j', module: 'slf4j-log4j12'
+      exclude group: 'javax.servlet', module: 'servlet-api'
+      exclude group: 'com.google.code.gson', module: 'gson'
+    }
+
+    // The newest version of delta-core uses Spark 3.3.*. Since its only for 
test, we do
+    // not need to include older version of delta-core
+    if (sparkVersions.contains("3.3")) {
+      integrationImplementation "io.delta:delta-core_${scalaVersion}"
+      integrationImplementation project(path: 
":iceberg-spark:iceberg-spark-3.3_${scalaVersion}")
+      integrationImplementation("org.apache.hadoop:hadoop-minicluster") {
+        exclude group: 'org.apache.avro', module: 'avro'
+        // to make sure netty libs only come from project(':iceberg-arrow')
+        exclude group: 'io.netty', module: 'netty-buffer'
+        exclude group: 'io.netty', module: 'netty-common'
+      }
+      integrationImplementation project(path: ':iceberg-hive-metastore')
+      integrationImplementation project(path: ':iceberg-hive-metastore', 
configuration: 'testArtifacts')
+      
integrationImplementation("org.apache.spark:spark-hive_${scalaVersion}:3.3.1") {
+        exclude group: 'org.apache.avro', module: 'avro'
+        exclude group: 'org.apache.arrow'
+        exclude group: 'org.apache.parquet'
+        // to make sure netty libs only come from project(':iceberg-arrow')
+        exclude group: 'io.netty', module: 'netty-buffer'
+        exclude group: 'io.netty', module: 'netty-common'
+        exclude group: 'org.roaringbitmap'
+      }
+    }
+  }
+
+  // The newest version of delta-core uses Spark 3.3.*. The integration test 
should only be built
+  // if iceberg-spark-3.3 is available
+  if (sparkVersions.contains("3.3")) {
+    sourceSets {
+      integration {
+        java.srcDir "$projectDir/src/integration/java"
+        resources.srcDir "$projectDir/src/integration/resources"
+        compileClasspath += main.output + test.output
+        runtimeClasspath += main.output + test.output
+      }
+    }
+
+    task integrationTest(type: Test) {
+      testClassesDirs = sourceSets.integration.output.classesDirs
+      classpath = sourceSets.integration.runtimeClasspath
+    }
+  }

Review Comment:
   Thank you for your suggestions. I also changed the new `delta-conversion-ci` 
to run `iceberg-delta-lake:check` insteadof `iceberg-delta-lake:IntegrationTest`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to