rdblue commented on code in PR #4465:
URL: https://github.com/apache/iceberg/pull/4465#discussion_r841279466


##########
build.gradle:
##########
@@ -606,6 +613,101 @@ project(':iceberg-dell') {
   }
 }
 
+project(':iceberg-azure') {
+  apply plugin: "com.bmuschko.docker-remote-api"
+  apply plugin: "com.github.johnrengelman.shadow"
+
+  dependencies {
+    api project(':iceberg-api')
+    implementation project(path: ':iceberg-bundled-guava', configuration: 
'shadow')
+    implementation platform('com.azure:azure-sdk-bom')
+    implementation 'com.azure:azure-storage-blob'
+
+    compileOnly("org.apache.hadoop:hadoop-common") {
+      exclude group: 'org.apache.avro', module: 'avro'
+      exclude group: 'org.slf4j', module: 'slf4j-log4j12'
+    }
+
+    testImplementation project(path: ':iceberg-api', configuration: 
'testArtifacts')
+  }
+
+  // Azure sdk is included in the iceberg-azure jar since the SDK internally 
uses netty. The netty version expected by
+  // the azure sdk might not be same as the netty version present on the 
classpath.
+  // The netty and jackson dependencies are shaded to avoid 
NoClassDefFoundError and NoSuchMethodError errors that might
+  // arise due to multiple versions of the corresponding jars available on the 
classpath.
+  // E.g. Netty 4.1.72.Final imported by Azure storage SDK and Netty 
4.1.75.Final imported by Spark.
+  shadowJar {
+    zip64 true
+    relocate "io.netty", "org.apache.iceberg.azure.shaded.io.netty"
+    relocate "org.codehaus.jackson", 
"org.apache.iceberg.azure.shaded.org.codehaus.jackson"
+    relocate "com.fasterxml.jackson", 
"org.apache.iceberg.azure.shaded.com.fasterxml.jackson"
+    classifier null
+  }
+
+  jar {
+    enabled = false
+  }
+
+  tasks.jar.dependsOn tasks.shadowJar
+
+  sourceSets {
+    integration {
+      java.srcDir "$projectDir/src/integration/java"
+      resources.srcDir "$projectDir/src/integration/resources"
+      compileClasspath += main.output + test.output
+      runtimeClasspath += main.output + test.output
+    }
+  }
+
+  configurations {
+    integrationImplementation.extendsFrom testImplementation
+    integrationRuntime.extendsFrom testRuntimeOnly
+  }
+
+  task pullImage(type: DockerPullImage) {
+    image = 'mcr.microsoft.com/azure-storage/azurite:latest'
+  }
+
+  task createContainer(type: DockerCreateContainer) {
+    dependsOn pullImage
+    targetImageId pullImage.getImage()
+    // Note: The storage account key should be a valid base64 string.
+    withEnvVar("AZURITE_ACCOUNTS", 
"storageAccount1:storageAccount1Key;storageAccount2:storageAccount2Key;storageAccount3:storageAccount3Key")
+    hostConfig.portBindings = ['10000:10000']
+    hostConfig.autoRemove = true
+  }
+
+  task startContainer(type: DockerStartContainer) {
+    dependsOn createContainer
+    targetContainerId createContainer.getContainerId()
+  }
+
+  task stopContainer(type: DockerStopContainer) {
+    targetContainerId createContainer.getContainerId()
+  }
+
+  task integrationTest(type: Test) {

Review Comment:
   Can you make sure that there is a workflow that runs these integration tests?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: dev-unsubscr...@iceberg.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to