gengliangwang commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1004034513
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala:
##
@@ -477,7 +477,10 @@ case class Add(
override protected def
gengliangwang commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1004033998
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala:
##
@@ -477,7 +477,10 @@ case class Add(
override protected def
gengliangwang commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1004033863
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala:
##
@@ -477,7 +477,10 @@ case class Add(
override protected def
MaxGekk commented on PR #38350:
URL: https://github.com/apache/spark/pull/38350#issuecomment-1290021018
@panbingkun Please, fix the coding style issue:
```
[error]
MaxGekk commented on PR #38359:
URL: https://github.com/apache/spark/pull/38359#issuecomment-1290020506
@panbingkun Please, fix coding style issue:
```
[error]
cloud-fan commented on code in PR #38312:
URL: https://github.com/apache/spark/pull/38312#discussion_r1004026112
##
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala:
##
@@ -198,6 +205,31 @@ abstract class ParquetSchemaTest
cloud-fan commented on code in PR #38345:
URL: https://github.com/apache/spark/pull/38345#discussion_r1004023351
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala:
##
@@ -238,13 +238,17 @@ package object dsl {
def join(
cloud-fan commented on code in PR #38345:
URL: https://github.com/apache/spark/pull/38345#discussion_r1004022886
##
connector/connect/src/main/protobuf/spark/connect/relations.proto:
##
@@ -109,6 +109,10 @@ message Join {
Relation right = 2;
Expression join_condition = 3;
zhengruifeng commented on code in PR #38375:
URL: https://github.com/apache/spark/pull/38375#discussion_r1004021114
##
sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala:
##
@@ -85,42 +51,144 @@ object FrequentItems extends Logging {
cols:
MaxGekk commented on PR #38360:
URL: https://github.com/apache/spark/pull/38360#issuecomment-1290009670
@LuciferYang @panbingkun @itholic Could you review this PR, please.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and
cloud-fan commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1004001685
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala:
##
@@ -26,4 +26,12 @@ private[spark] object Connect {
.intConf
cloud-fan commented on code in PR #38375:
URL: https://github.com/apache/spark/pull/38375#discussion_r1004000396
##
sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala:
##
@@ -85,42 +51,144 @@ object FrequentItems extends Logging {
cols:
itholic commented on code in PR #37955:
URL: https://github.com/apache/spark/pull/37955#discussion_r1003999111
##
python/pyspark/pandas/strings.py:
##
@@ -2316,7 +2316,7 @@ def zfill(self, width: int) -> "ps.Series":
left). 1000 remains unchanged as it is longer than
zhengruifeng commented on code in PR #38375:
URL: https://github.com/apache/spark/pull/38375#discussion_r1003991925
##
sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala:
##
@@ -85,42 +51,142 @@ object FrequentItems extends Logging {
cols:
grundprinzip commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003991428
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala:
##
@@ -26,4 +26,12 @@ private[spark] object Connect {
.intConf
itholic commented on PR #38177:
URL: https://github.com/apache/spark/pull/38177#issuecomment-1289968424
Fixed & re-gened the golden files!
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the
HyukjinKwon commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003988774
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala:
##
@@ -26,4 +26,12 @@ private[spark] object Connect {
.intConf
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003984608
##
connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala:
##
@@ -163,18 +163,22 @@ class ProtobufSerdeSuite extends
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003984608
##
connector/protobuf/src/test/scala/org/apache/spark/sql/protobuf/ProtobufSerdeSuite.scala:
##
@@ -163,18 +163,22 @@ class ProtobufSerdeSuite extends
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003984452
##
connector/protobuf/src/test/resources/protobuf/timestamp.proto:
##
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003983288
##
connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala:
##
@@ -196,27 +194,31 @@ private[sql] object ProtobufUtils extends
zhengruifeng commented on code in PR #38375:
URL: https://github.com/apache/spark/pull/38375#discussion_r1003976467
##
sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala:
##
@@ -85,42 +51,142 @@ object FrequentItems extends Logging {
cols:
zhengruifeng commented on code in PR #38375:
URL: https://github.com/apache/spark/pull/38375#discussion_r1003976380
##
sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala:
##
@@ -85,42 +51,142 @@ object FrequentItems extends Logging {
cols:
zhengruifeng commented on code in PR #38375:
URL: https://github.com/apache/spark/pull/38375#discussion_r1003975996
##
sql/core/src/main/scala/org/apache/spark/sql/execution/stat/FrequentItems.scala:
##
@@ -85,42 +51,142 @@ object FrequentItems extends Logging {
cols:
zhengruifeng commented on PR #38383:
URL: https://github.com/apache/spark/pull/38383#issuecomment-1289944021
will also send a separate fix for `PandasMode` since it's dedicated for
Pandas
--
This is an automated message from the Apache Git Service.
To respond to the message, please log
zhengruifeng opened a new pull request, #38383:
URL: https://github.com/apache/spark/pull/38383
### What changes were proposed in this pull request?
`Mode` should copy keys before inserting into Map
### Why are the changes needed?
the result maybe incorrect:
```
val df
LuciferYang opened a new pull request, #38382:
URL: https://github.com/apache/spark/pull/38382
### What changes were proposed in this pull request?
This pr aims to upgrade rocksdbjni from 7.6.0 to 7.7.3.
### Why are the changes needed?
This version bring the performance of
itholic commented on code in PR #38177:
URL: https://github.com/apache/spark/pull/38177#discussion_r1003966632
##
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala:
##
@@ -1124,8 +1124,9 @@ class StringExpressionsSuite extends
itholic commented on code in PR #38177:
URL: https://github.com/apache/spark/pull/38177#discussion_r1003965906
##
sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala:
##
@@ -2602,31 +2604,46 @@ private[sql] object QueryExecutionErrors extends
chenminghua8 commented on PR #38381:
URL: https://github.com/apache/spark/pull/38381#issuecomment-1289930238
> e.g., `[SPARK-40793][SQL] Fix the LogicalRelation computeStats for
Row-level Runtime Filtering cannot be applied `
@HyukjinKwon I have linked the JIRA ticket into the PR title.
cloud-fan commented on code in PR #38336:
URL: https://github.com/apache/spark/pull/38336#discussion_r1003962035
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/subquery.scala:
##
@@ -561,14 +566,18 @@ object RewriteCorrelatedScalarSubquery extends
cloud-fan commented on code in PR #38336:
URL: https://github.com/apache/spark/pull/38336#discussion_r1003961963
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/subquery.scala:
##
@@ -561,14 +566,18 @@ object RewriteCorrelatedScalarSubquery extends
cloud-fan commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003959231
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala:
##
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache
cloud-fan commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003958927
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala:
##
@@ -26,4 +26,12 @@ private[spark] object Connect {
.intConf
cloud-fan closed pull request #38374: [SPARK-40899] [CONNECT] Make UserContext
extensible.
URL: https://github.com/apache/spark/pull/38374
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the
cloud-fan commented on PR #38374:
URL: https://github.com/apache/spark/pull/38374#issuecomment-1289921835
thanks, merging to master!
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific
HyukjinKwon commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003955956
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala:
##
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache
HyukjinKwon commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003955262
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala:
##
@@ -26,4 +26,12 @@ private[spark] object Connect {
.intConf
cloud-fan commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003954707
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala:
##
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache
cloud-fan commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003954578
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala:
##
@@ -26,4 +26,12 @@ private[spark] object Connect {
.intConf
HyukjinKwon commented on PR #38381:
URL: https://github.com/apache/spark/pull/38381#issuecomment-1289915367
e.g., `[SPARK-40793][SQL] Fix the LogicalRelation computeStats for Row-level
Runtime Filtering cannot be applied `
--
This is an automated message from the Apache Git Service.
To
chenminghua8 commented on PR #38381:
URL: https://github.com/apache/spark/pull/38381#issuecomment-1289909307
> @chenminghua8 mind linking the JIRA ticket into the PR title? See also
https://spark.apache.org/contributing.html
@HyukjinKwon Thank you! but I don't know how to get the JIRA
HyukjinKwon commented on PR #38381:
URL: https://github.com/apache/spark/pull/38381#issuecomment-1289904503
@chenminghua8 mind linking the JIRA ticket into the PR title? See also
https://spark.apache.org/contributing.html
--
This is an automated message from the Apache Git Service.
To
dcoliversun commented on PR #19:
URL: https://github.com/apache/spark-docker/pull/19#issuecomment-1289903890
Thanks for your review :) @Yikun @holdenk
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to
HyukjinKwon commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003942285
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala:
##
@@ -26,4 +26,12 @@ private[spark] object Connect {
.intConf
HyukjinKwon commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003941174
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala:
##
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache
HyukjinKwon commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003941174
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala:
##
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache
chenminghua8 commented on PR #38381:
URL: https://github.com/apache/spark/pull/38381#issuecomment-1289898298
[SPARK-40793](https://issues.apache.org/jira/browse/SPARK-40793) is the JIRA
for this PR.
--
This is an automated message from the Apache Git Service.
To respond to the message,
HyukjinKwon commented on code in PR #38320:
URL: https://github.com/apache/spark/pull/38320#discussion_r1003940378
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala:
##
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache
chenminghua8 commented on PR #38213:
URL: https://github.com/apache/spark/pull/38213#issuecomment-1289895440
change to Pull request 38381
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the
dongjoon-hyun commented on PR #38380:
URL: https://github.com/apache/spark/pull/38380#issuecomment-1289894414
Thank you, @HyukjinKwon !
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the
chenminghua8 opened a new pull request, #38381:
URL: https://github.com/apache/spark/pull/38381
### What changes were proposed in this pull request?
This RP modifies the "computeStats" method of the "LogicalRelation" class:
when the external table does not perform
Yikun commented on PR #19:
URL: https://github.com/apache/spark-docker/pull/19#issuecomment-1289885331
@holdenk @dcoliversun Thanks! Merge to master (3.4.0).
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL
Yikun closed pull request #19: [SPARK-40855] Add CONTRIBUTING.md for
apache/spark-docker
URL: https://github.com/apache/spark-docker/pull/19
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the
ulysses-you commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1003919162
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala:
##
@@ -477,7 +477,10 @@ case class Add(
override protected def
cloud-fan commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1003906257
##
sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala:
##
@@ -4518,6 +4518,14 @@ class SQLQuerySuite extends QueryTest with
SharedSparkSession with
warrenzhu25 commented on PR #38183:
URL: https://github.com/apache/spark/pull/38183#issuecomment-1289850864
@gengliangwang and @sarutak FYI @dongjoon-hyun Could you help take a look?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to
cloud-fan commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1003905809
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala:
##
@@ -477,7 +477,10 @@ case class Add(
override protected def
cloud-fan commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1003905297
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala:
##
@@ -477,7 +477,10 @@ case class Add(
override protected def
cloud-fan commented on code in PR #38379:
URL: https://github.com/apache/spark/pull/38379#discussion_r1003905057
##
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala:
##
@@ -242,7 +242,13 @@ abstract class Expression extends
pan3793 commented on PR #38205:
URL: https://github.com/apache/spark/pull/38205#issuecomment-1289847809
@mridulm Yes, https://github.com/apache/spark/pull/38357 is the proposed
version.
I opened this PR mostly for collecting feedback in case the community has
another idea.
--
amaliujia commented on code in PR #38301:
URL: https://github.com/apache/spark/pull/38301#discussion_r1003890195
##
connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/DataTypeProtoConverter.scala:
##
@@ -50,11 +50,27 @@ object DataTypeProtoConverter {
github-actions[bot] closed pull request #37141: [SPARK-39024][CORE][YARN]
Notify External Shuffle Service when Yarn Sends a Node in Decommissioning State
URL: https://github.com/apache/spark/pull/37141
--
This is an automated message from the Apache Git Service.
To respond to the message,
github-actions[bot] closed pull request #37053: [SPARK-39452][GraphX] Extend
EdgePartition1D with Destination based Strategy
URL: https://github.com/apache/spark/pull/37053
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use
dongjoon-hyun commented on PR #38380:
URL: https://github.com/apache/spark/pull/38380#issuecomment-1289812287
Merged to master for Apache Spark 3.4.0.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to
dongjoon-hyun closed pull request #38380: [SPARK-40904][K8S] Support `zsh` in
K8s `entrypoint.sh`
URL: https://github.com/apache/spark/pull/38380
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the
dongjoon-hyun commented on PR #38380:
URL: https://github.com/apache/spark/pull/38380#issuecomment-1289808788
Thank you, @viirya . The `else` statement is only supported by `zsh`~
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to
dongjoon-hyun commented on PR #38380:
URL: https://github.com/apache/spark/pull/38380#issuecomment-1289803308
All tests passed.
Could you review this, please, @viirya ?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub
amaliujia commented on code in PR #38345:
URL: https://github.com/apache/spark/pull/38345#discussion_r1003878598
##
connector/connect/src/main/protobuf/spark/connect/relations.proto:
##
@@ -109,6 +109,7 @@ message Join {
Relation right = 2;
Expression join_condition = 3;
amaliujia commented on code in PR #38345:
URL: https://github.com/apache/spark/pull/38345#discussion_r1003878330
##
connector/connect/src/main/protobuf/spark/connect/relations.proto:
##
@@ -109,6 +109,7 @@ message Join {
Relation right = 2;
Expression join_condition = 3;
allisonwang-db commented on PR #38336:
URL: https://github.com/apache/spark/pull/38336#issuecomment-1289772401
cc @cloud-fan
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific
dongjoon-hyun opened a new pull request, #38380:
URL: https://github.com/apache/spark/pull/38380
### What changes were proposed in this pull request?
### Why are the changes needed?
### Does this PR introduce _any_ user-facing change?
###
gengliangwang commented on PR #38379:
URL: https://github.com/apache/spark/pull/38379#issuecomment-1289658114
I confirmed that the regression is caused by the refactoring PR
https://github.com/apache/spark/pull/36698/. Before the refactor, the query
will look like
```
amaliujia commented on code in PR #38374:
URL: https://github.com/apache/spark/pull/38374#discussion_r1003781192
##
connector/connect/src/main/protobuf/spark/connect/base.proto:
##
@@ -51,6 +52,12 @@ message Request {
message UserContext {
string user_id = 1;
sadikovi commented on code in PR #38277:
URL: https://github.com/apache/spark/pull/38277#discussion_r1003781190
##
sql/hive/src/main/java/org/apache/hadoop/hive/ql/io/DelegateSymlinkTextInputFormat.java:
##
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation
maryannxue commented on code in PR #38358:
URL: https://github.com/apache/spark/pull/38358#discussion_r1003780647
##
sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/AdaptiveSparkPlanExec.scala:
##
@@ -221,6 +221,8 @@ case class AdaptiveSparkPlanExec(
awdavidson commented on code in PR #38312:
URL: https://github.com/apache/spark/pull/38312#discussion_r1003756142
##
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala:
##
@@ -198,6 +205,31 @@ abstract class ParquetSchemaTest
gengliangwang commented on PR #38379:
URL: https://github.com/apache/spark/pull/38379#issuecomment-1289600215
cc @peter-toth @ulysses-you
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the
gengliangwang opened a new pull request, #38379:
URL: https://github.com/apache/spark/pull/38379
### What changes were proposed in this pull request?
Avoid reordering Add for canonicalizing if it is decimal type.
Expressions are canonicalized for comparisons and
rangadi commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003729392
##
connector/protobuf/src/main/scala/org/apache/spark/sql/protobuf/utils/ProtobufUtils.scala:
##
@@ -196,27 +194,31 @@ private[sql] object ProtobufUtils extends
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003686011
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
LucaCanali commented on PR #33559:
URL: https://github.com/apache/spark/pull/33559#issuecomment-1289502676
Thank you @cloud-fan !
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific
rangadi commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003664791
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
${protobuf.version}
+
srowen commented on PR #38352:
URL: https://github.com/apache/spark/pull/38352#issuecomment-1289480615
Huh, that also seems unrelated. Let's hold onto this for a day or two and
then rerun if needed
--
This is an automated message from the Apache Git Service.
To respond to the message,
mridulm commented on code in PR #37638:
URL: https://github.com/apache/spark/pull/37638#discussion_r1003658008
##
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java:
##
@@ -1904,4 +1941,42 @@ long getPos() {
return pos;
mridulm commented on code in PR #37638:
URL: https://github.com/apache/spark/pull/37638#discussion_r1003656427
##
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java:
##
@@ -593,6 +607,9 @@ public void onData(String streamId,
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003653232
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
mridulm commented on PR #38205:
URL: https://github.com/apache/spark/pull/38205#issuecomment-1289465582
I tagged @tgravescs on #38357, assuming that is the version that will get
supported - or is this what we are looking at ?
--
This is an automated message from the Apache Git Service.
rangadi commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003651551
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
${protobuf.version}
+
mridulm commented on PR #38357:
URL: https://github.com/apache/spark/pull/38357#issuecomment-1289462750
+CC @tgravescs Since you have more context on this from yarn pov than I do
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to
otterc commented on code in PR #37638:
URL: https://github.com/apache/spark/pull/37638#discussion_r1003646453
##
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java:
##
@@ -1904,4 +1941,42 @@ long getPos() {
return pos;
bjornjorgensen commented on PR #38352:
URL: https://github.com/apache/spark/pull/38352#issuecomment-1289451302
@srowen I did rerun the failed tests now and now they pass.
But there are 2 python tests that don't pass,
-The python3.9 -m black command was not found
and
mridulm commented on code in PR #37638:
URL: https://github.com/apache/spark/pull/37638#discussion_r1003638095
##
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java:
##
@@ -1197,15 +1230,15 @@ public void onData(String streamId,
mridulm commented on code in PR #37638:
URL: https://github.com/apache/spark/pull/37638#discussion_r1003635126
##
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java:
##
@@ -1197,15 +1230,15 @@ public void onData(String streamId,
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003632674
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003632674
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
otterc commented on code in PR #37638:
URL: https://github.com/apache/spark/pull/37638#discussion_r1003631087
##
common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java:
##
@@ -593,6 +607,9 @@ public void onData(String streamId,
rangadi commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003630603
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
${protobuf.version}
+
tgravescs commented on PR #38032:
URL: https://github.com/apache/spark/pull/38032#issuecomment-1289432118
I don't have a strong opinion, there is already the LOCAL_N_FAILURES_REGEX
mode that could just be used. why not just use that?
If we do this I think the default in local mode
SandishKumarHN commented on code in PR #38344:
URL: https://github.com/apache/spark/pull/38344#discussion_r1003623154
##
connector/protobuf/pom.xml:
##
@@ -123,6 +123,7 @@
com.google.protobuf:protoc:${protobuf.version}
1 - 100 of 213 matches
Mail list logo