This is an automated email from the ASF dual-hosted git repository.

wesm pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/arrow.git


The following commit(s) were added to refs/heads/master by this push:
     new ac789ba  ARROW-232: [Python] Add unit test for writing Parquet file 
from chunked table
ac789ba is described below

commit ac789bafee598326ca9aeaaaa84468017b9f2598
Author: Wes McKinney <[email protected]>
AuthorDate: Tue Dec 19 09:40:11 2017 -0500

    ARROW-232: [Python] Add unit test for writing Parquet file from chunked 
table
    
    This requires PARQUET-1092 https://github.com/apache/parquet-cpp/pull/426
    
    Author: Wes McKinney <[email protected]>
    
    Closes #1425 from wesm/ARROW-232 and squashes the following commits:
    
    da8d9998 [Wes McKinney] Add unit test to validate PARQUET-1092
---
 python/pyarrow/tests/test_parquet.py | 18 ++++++++++++++++++
 1 file changed, 18 insertions(+)

diff --git a/python/pyarrow/tests/test_parquet.py 
b/python/pyarrow/tests/test_parquet.py
index fc8c8f0..c2bb31c 100644
--- a/python/pyarrow/tests/test_parquet.py
+++ b/python/pyarrow/tests/test_parquet.py
@@ -119,6 +119,24 @@ def test_pandas_parquet_2_0_rountrip(tmpdir):
 
 
 @parquet
+def test_chunked_table_write(tmpdir):
+    # ARROW-232
+    df = alltypes_sample(size=10)
+
+    # The nanosecond->ms conversion is a nuisance, so we just avoid it here
+    del df['datetime']
+
+    batch = pa.RecordBatch.from_pandas(df)
+    table = pa.Table.from_batches([batch] * 3)
+    _check_roundtrip(table, version='2.0')
+
+    df, _ = dataframe_with_lists()
+    batch = pa.RecordBatch.from_pandas(df)
+    table = pa.Table.from_batches([batch] * 3)
+    _check_roundtrip(table, version='2.0')
+
+
+@parquet
 def test_pandas_parquet_datetime_tz():
     import pyarrow.parquet as pq
 

-- 
To stop receiving notification emails like this one, please contact
['"[email protected]" <[email protected]>'].

Reply via email to