[
https://issues.apache.org/jira/browse/ARROW-2005?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16332626#comment-16332626
]
ASF GitHub Bot commented on ARROW-2005:
---------------------------------------
wesm closed pull request #1488: ARROW-2005: [Python] Fix incorrect flake8
config path to Cython lint config
URL: https://github.com/apache/arrow/pull/1488
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/ci/travis_lint.sh b/ci/travis_lint.sh
index e234b7b01..6a2a0be18 100755
--- a/ci/travis_lint.sh
+++ b/ci/travis_lint.sh
@@ -35,10 +35,10 @@ popd
# Fail fast on style checks
sudo pip install flake8
-PYARROW_DIR=$TRAVIS_BUILD_DIR/python/pyarrow
+PYTHON_DIR=$TRAVIS_BUILD_DIR/python
-flake8 --count $PYARROW_DIR
+flake8 --count $PYTHON_DIR/pyarrow
# Check Cython files with some checks turned off
flake8 --count --config=$PYTHON_DIR/.flake8.cython \
- $PYARROW_DIR
+ $PYTHON_DIR/pyarrow
diff --git a/python/pyarrow/_orc.pxd b/python/pyarrow/_orc.pxd
index 411691510..c07a19442 100644
--- a/python/pyarrow/_orc.pxd
+++ b/python/pyarrow/_orc.pxd
@@ -29,9 +29,10 @@ from pyarrow.includes.libarrow cimport (CArray, CSchema,
CStatus,
TimeUnit)
-cdef extern from "arrow/adapters/orc/adapter.h" namespace
"arrow::adapters::orc" nogil:
- cdef cppclass ORCFileReader:
+cdef extern from "arrow/adapters/orc/adapter.h" \
+ namespace "arrow::adapters::orc" nogil:
+ cdef cppclass ORCFileReader:
@staticmethod
CStatus Open(const shared_ptr[RandomAccessFile]& file,
CMemoryPool* pool,
@@ -40,7 +41,8 @@ cdef extern from "arrow/adapters/orc/adapter.h" namespace
"arrow::adapters::orc"
CStatus ReadSchema(shared_ptr[CSchema]* out)
CStatus ReadStripe(int64_t stripe, shared_ptr[CRecordBatch]* out)
- CStatus ReadStripe(int64_t stripe, std_vector[int],
shared_ptr[CRecordBatch]* out)
+ CStatus ReadStripe(int64_t stripe, std_vector[int],
+ shared_ptr[CRecordBatch]* out)
CStatus Read(shared_ptr[CTable]* out)
CStatus Read(std_vector[int], shared_ptr[CTable]* out)
diff --git a/python/pyarrow/_orc.pyx b/python/pyarrow/_orc.pyx
index 7ff4bac6d..cf04f48a3 100644
--- a/python/pyarrow/_orc.pyx
+++ b/python/pyarrow/_orc.pyx
@@ -50,7 +50,7 @@ cdef class ORCReader:
get_reader(source, &rd_handle)
with nogil:
check_status(ORCFileReader.Open(rd_handle, self.allocator,
- &self.reader))
+ &self.reader))
def schema(self):
"""
@@ -69,10 +69,10 @@ cdef class ORCReader:
return pyarrow_wrap_schema(sp_arrow_schema)
def nrows(self):
- return deref(self.reader).NumberOfRows();
+ return deref(self.reader).NumberOfRows()
def nstripes(self):
- return deref(self.reader).NumberOfStripes();
+ return deref(self.reader).NumberOfStripes()
def read_stripe(self, n, include_indices=None):
cdef:
@@ -85,11 +85,13 @@ cdef class ORCReader:
if include_indices is None:
with nogil:
- check_status(deref(self.reader).ReadStripe(stripe,
&sp_record_batch))
+ (check_status(deref(self.reader)
+ .ReadStripe(stripe, &sp_record_batch)))
else:
indices = include_indices
with nogil:
- check_status(deref(self.reader).ReadStripe(stripe, indices,
&sp_record_batch))
+ (check_status(deref(self.reader)
+ .ReadStripe(stripe, indices, &sp_record_batch)))
batch = RecordBatch()
batch.init(sp_record_batch)
diff --git a/python/pyarrow/plasma.pyx b/python/pyarrow/plasma.pyx
index 29e233b6e..32f6d189d 100644
--- a/python/pyarrow/plasma.pyx
+++ b/python/pyarrow/plasma.pyx
@@ -248,8 +248,8 @@ cdef class PlasmaClient:
check_status(self.client.get().Get(ids.data(), ids.size(),
timeout_ms, result[0].data()))
- cdef _make_plasma_buffer(self, ObjectID object_id, shared_ptr[CBuffer]
buffer,
- int64_t size):
+ cdef _make_plasma_buffer(self, ObjectID object_id,
+ shared_ptr[CBuffer] buffer, int64_t size):
result = PlasmaBuffer(object_id, self)
result.init(buffer)
return result
@@ -302,7 +302,9 @@ cdef class PlasmaClient:
check_status(self.client.get().Create(object_id.data, data_size,
<uint8_t*>(metadata.data()),
metadata.size(), &data))
- return self._make_mutable_plasma_buffer(object_id,
data.get().mutable_data(), data_size)
+ return self._make_mutable_plasma_buffer(object_id,
+ data.get().mutable_data(),
+ data_size)
def get_buffers(self, object_ids, timeout_ms=-1):
"""
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
> [Python] pyflakes warnings on Cython files not failing build
> ------------------------------------------------------------
>
> Key: ARROW-2005
> URL: https://issues.apache.org/jira/browse/ARROW-2005
> Project: Apache Arrow
> Issue Type: Bug
> Components: Python
> Reporter: Wes McKinney
> Priority: Major
> Labels: pull-request-available
> Fix For: 0.9.0
>
>
> I see the following flakes in master:
> {code:java}
> pyarrow/plasma.pyx:251:80: E501 line too long (82 > 79 characters)
> pyarrow/plasma.pyx:305:80: E501 line too long (96 > 79 characters)
> pyarrow/_orc.pyx:53:46: E127 continuation line over-indented for visual indent
> pyarrow/_orc.pyx:72:49: E703 statement ends with a semicolon
> pyarrow/_orc.pyx:75:52: E703 statement ends with a semicolon
> pyarrow/_orc.pyx:88:80: E501 line too long (85 > 79 characters)
> pyarrow/_orc.pyx:92:80: E501 line too long (94 > 79 characters)
> pyarrow/_orc.pxd:32:80: E501 line too long (87 > 79 characters)
> pyarrow/_orc.pxd:43:80: E501 line too long (90 > 79 characters)
> 9{code}
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)