rdblue commented on code in PR #13219:
URL: https://github.com/apache/iceberg/pull/13219#discussion_r2232249812


##########
spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/GenericsHelpers.java:
##########
@@ -296,6 +320,134 @@ private static void assertEqualsUnsafe(Types.MapType map, 
Map<?, ?> expected, Ma
     }
   }
 
+  static void assertEquals(Variant expected, VariantVal actual) {
+    VariantMetadata actualMetadata =
+        
VariantMetadata.from(ByteBuffer.wrap(actual.getMetadata()).order(ByteOrder.LITTLE_ENDIAN));
+    VariantTestUtil.assertEqual(expected.metadata(), actualMetadata);
+
+    org.apache.spark.types.variant.Variant sparkVariant =
+        new org.apache.spark.types.variant.Variant(actual.getValue(), 
actual.getMetadata());
+    assertEquals(expected.value(), sparkVariant);
+  }
+
+  static void assertEquals(VariantValue expected, 
org.apache.spark.types.variant.Variant actual) {
+    assertThat(actual).isNotNull();
+    assertThat(expected).isNotNull();
+
+    if (expected.type() == PhysicalType.OBJECT) {
+      assertThat(actual.getType()).isEqualTo(VariantUtil.Type.OBJECT);
+      VariantObject expectedObject = expected.asObject();
+      assertThat(actual.objectSize())
+          .as("Variant object num fields should match")
+          .isEqualTo(expectedObject.numFields());
+
+      for (String fieldName : expectedObject.fieldNames()) {
+        assertEquals(expectedObject.get(fieldName), 
actual.getFieldByKey(fieldName));
+      }
+
+    } else if (expected.type() == PhysicalType.ARRAY) {
+      assertThat(actual.getType()).isEqualTo(VariantUtil.Type.ARRAY);
+      VariantArray expectedArray = expected.asArray();
+      assertThat(actual.arraySize())
+          .as("Variant array num element should match")
+          .isEqualTo(expectedArray.numElements());
+
+      for (int i = 0; i < expectedArray.numElements(); i += 1) {
+        assertEquals(expectedArray.get(i), actual.getElementAtIndex(i));
+      }
+
+    } else {
+      // Primitive type and value should match
+      VariantUtil.Type expectedType = null;
+      Object actualValue = null;
+      switch (expected.type()) {
+        case NULL:
+          expectedType = VariantUtil.Type.NULL;
+          actualValue = null;
+          break;
+        case BOOLEAN_TRUE:
+        case BOOLEAN_FALSE:
+          expectedType = VariantUtil.Type.BOOLEAN;
+          actualValue = actual.getBoolean();
+          break;
+        case INT8:
+          expectedType = VariantUtil.Type.LONG;
+          actualValue = (byte) actual.getLong();
+          break;
+        case INT16:
+          expectedType = VariantUtil.Type.LONG;
+          actualValue = (short) actual.getLong();
+          break;
+        case INT32:
+          expectedType = VariantUtil.Type.LONG;
+          actualValue = (int) actual.getLong();
+          break;
+        case INT64:
+          expectedType = VariantUtil.Type.LONG;
+          actualValue = actual.getLong();
+          break;
+        case DOUBLE:
+          expectedType = VariantUtil.Type.DOUBLE;
+          actualValue = actual.getDouble();
+          break;
+        case DECIMAL4:
+        case DECIMAL8:
+        case DECIMAL16:
+          expectedType = VariantUtil.Type.DECIMAL;
+          actualValue = actual.getDecimal();
+          break;
+        case DATE:
+          expectedType = VariantUtil.Type.DATE;
+          actualValue = (int) actual.getLong();
+          break;
+        case TIMESTAMPTZ:
+          expectedType = VariantUtil.Type.TIMESTAMP;
+          actualValue = actual.getLong();
+          break;
+        case TIMESTAMPNTZ:
+          expectedType = VariantUtil.Type.TIMESTAMP_NTZ;
+          actualValue = actual.getLong();
+          break;
+        case FLOAT:
+          expectedType = VariantUtil.Type.FLOAT;
+          actualValue = actual.getFloat();
+          break;
+        case BINARY:
+          expectedType = VariantUtil.Type.BINARY;
+          actualValue = ByteBuffer.wrap(actual.getBinary());
+          break;
+        case STRING:
+          expectedType = VariantUtil.Type.STRING;
+          actualValue = actual.getString();
+          break;
+        case UUID:
+          expectedType = VariantUtil.Type.UUID;
+          actualValue = actual.getUuid();
+          break;
+        case TIME:
+        case TIMESTAMPTZ_NANOS:
+        case TIMESTAMPNTZ_NANOS:
+          // Skip unsupported types in Spark

Review Comment:
   I think that this should result in an error. That's another reason not to 
mix this with the object model assertions. If Spark doesn't support a type, we 
should test what happens.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to