JonasJ-ap commented on code in PR #6997:
URL: https://github.com/apache/iceberg/pull/6997#discussion_r1131154963
##########
python/pyiceberg/io/pyarrow.py:
##########
@@ -476,6 +483,202 @@ def expression_to_pyarrow(expr: BooleanExpression) ->
pc.Expression:
return boolean_expression_visit(expr, _ConvertToArrowExpression())
+def pyarrow_to_schema(schema: pa.Schema) -> Schema:
+ visitor = _ConvertToIceberg()
+ struct_results = []
+ for i in range(len(schema.names)):
+ field = schema.field(i)
+ visitor.before_field(field)
+ struct_result = visit_pyarrow(field.type, visitor)
+ visitor.after_field(field)
+ struct_results.append(struct_result)
+ return visitor.schema(schema, struct_results)
+
+
+@singledispatch
+def visit_pyarrow(obj: pa.DataType, visitor: PyarrowSchemaVisitor[T]) -> T:
+ """A generic function for applying a pyarrow schema visitor to any point
within a schema
+
+ The function traverses the schema in post-order fashion
+
+ Args:
+ obj(Schema | IcebergType): An instance of a Schema or an IcebergType
+ visitor (PyarrowSchemaVisitor[T]): An instance of an implementation of
the generic PyarrowSchemaVisitor base class
+
+ Raises:
+ NotImplementedError: If attempting to visit an unrecognized object type
+ """
+ raise NotImplementedError("Cannot visit non-type: %s" % obj)
+
+
+@visit_pyarrow.register(pa.StructType)
+def _(obj: pa.StructType, visitor: PyarrowSchemaVisitor[T]) -> T:
+ struct_results = []
+ for field in obj:
+ visitor.before_field(field)
+ struct_result = visit_pyarrow(field.type, visitor)
+ visitor.after_field(field)
+ struct_results.append(struct_result)
+
+ return visitor.struct(obj, struct_results)
+
+
+@visit_pyarrow.register(pa.ListType)
+def _(obj: pa.ListType, visitor: PyarrowSchemaVisitor[T]) -> T:
+ visitor.before_list_element(obj.value_field)
+ list_result = visit_pyarrow(obj.value_field.type, visitor)
+ visitor.after_list_element(obj.value_field)
+ return visitor.list(obj, list_result)
+
+
+@visit_pyarrow.register(pa.MapType)
+def _(obj: pa.MapType, visitor: PyarrowSchemaVisitor[T]) -> T:
+ visitor.before_map_key(obj.key_field)
+ key_result = visit_pyarrow(obj.key_field.type, visitor)
+ visitor.after_map_key(obj.key_field)
+ visitor.before_map_value(obj.item_field)
+ value_result = visit_pyarrow(obj.item_field.type, visitor)
+ visitor.after_map_value(obj.item_field)
+ return visitor.map(obj, key_result, value_result)
+
+
+@visit_pyarrow.register(pa.DataType)
+def _(obj: pa.DataType, visitor: PyarrowSchemaVisitor[T]) -> T:
+ if pa.types.is_nested(obj):
+ raise TypeError(f"Expected primitive type, got {type(obj)}")
+ return visitor.primitive(obj)
+
+
+class PyarrowSchemaVisitor(Generic[T], ABC):
+ def before_field(self, field: pa.Field) -> None:
+ """Override this method to perform an action immediately before
visiting a field."""
+
+ def after_field(self, field: pa.Field) -> None:
+ """Override this method to perform an action immediately after
visiting a field."""
+
+ def before_list_element(self, element: pa.Field) -> None:
+ """Override this method to perform an action immediately before
visiting a list element."""
+
+ def after_list_element(self, element: pa.Field) -> None:
+ """Override this method to perform an action immediately after
visiting a list element."""
+
+ def before_map_key(self, key: pa.Field) -> None:
+ """Override this method to perform an action immediately before
visiting a map key."""
+
+ def after_map_key(self, key: pa.Field) -> None:
+ """Override this method to perform an action immediately after
visiting a map key."""
+
+ def before_map_value(self, value: pa.Field) -> None:
+ """Override this method to perform an action immediately before
visiting a map value."""
+
+ def after_map_value(self, value: pa.Field) -> None:
+ """Override this method to perform an action immediately after
visiting a map value."""
+
+ @abstractmethod
+ def schema(self, schema: pa.Schema, field_results: List[T]) -> Schema:
+ """visit a schema"""
+
+ @abstractmethod
+ def struct(self, struct: pa.StructType, field_results: List[T]) -> T:
+ """visit a struct"""
+
+ @abstractmethod
+ def list(self, list_type: pa.ListType, element_result: T) -> T:
+ """visit a list"""
+
+ @abstractmethod
+ def map(self, map_type: pa.MapType, key_result: T, value_result: T) -> T:
+ """visit a map"""
+
+ @abstractmethod
+ def primitive(self, primitive: pa.DataType) -> T:
+ """visit a primitive type"""
+
+
+def _get_field_id(field: pa.Field) -> int:
+ if field.metadata is not None:
+ field_metadata = {k.decode(): v.decode() for k, v in
field.metadata.items()}
+ if field_id := field_metadata.get("PARQUET:field_id"):
+ return int(field_id)
+ raise ValueError(f"Field {field.name} does not have a field_id")
+
+
+class _ConvertToIceberg(PyarrowSchemaVisitor[IcebergType], ABC):
+ def schema(self, schema: pa.Schema, field_results: List[IcebergType]) ->
Schema:
+ fields = []
+ for i in range(len(schema.names)):
+ field = schema.field(i)
+ field_id = _get_field_id(field)
+ field_type = field_results[i]
+ if field_type is not None:
+ fields.append(NestedField(field_id, field.name, field_type,
required=not field.nullable))
+ return Schema(*fields)
+
+ def struct(self, struct: pa.StructType, field_results: List[IcebergType])
-> IcebergType:
+ fields = []
+ for i in range(struct.num_fields):
+ field = struct[i]
+ field_id = _get_field_id(field)
+ # may need to check doc strings
Review Comment:
Thank you for your explanation. I added relevant code to handle the optional
doc if exists.
I did some tests and found that optional doc seemed not to be stored in
parquet field metadata:
The table is created by :
```python
spark.sql("CREATE TABLE demo.iceberg_ref.schema_doc2 (id bigint COMMENT
'unique id', data string) USING iceberg")
```
Fetch parquet schema:
```python
doc_test_path =
"/Users/jonasjiang/.CMVolumes/gluetestjonas/warehouse/iceberg_ref.db/schema_doc2/data/00000-0-49fb8801-9660-47ee-a8c7-b2af7fb703b4-00001.parquet"
with fs.open_input_file(doc_test_path) as f:
parquet_schema = pq.read_schema(f)
print("=======parquet schema=======")
print(parquet_schema)
print("=======iceberg schema=======")
print(Schema.parse_raw(parquet_schema.metadata[b'iceberg.schema'].decode()))
```
Result:
```python
=======parquet schema=======
id: int64 not null
-- field metadata --
PARQUET:field_id: '1'
data: string not null
-- field metadata --
PARQUET:field_id: '2'
-- schema metadata --
iceberg.schema: '{"type":"struct","schema-id":0,"fields":[{"id":1,"name":' +
110
=======iceberg schema=======
table {
1: id: required long (unique id)
2: data: required string
}
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]