dtenedor commented on PR #46309:
URL: https://github.com/apache/spark/pull/46309#issuecomment-2089333094
```
[info] *** 1 TEST FAILED ***
[error] Failed: Total 4098, Failed 1, Errors 0, Passed 4097, Ignored 10,
Canceled 2
[error] Failed tests:
[error]
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1586820408
##
core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala:
##
@@ -76,7 +76,8 @@ private[spark] class SortShuffleManager(conf: SparkConf)
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1586810024
##
core/src/main/scala/org/apache/spark/metrics/sink/StatsdReporter.scala:
##
@@ -67,7 +67,8 @@ private[spark] class StatsdReporter(
Try(new DatagramSocket)
gengliangwang commented on PR #46309:
URL: https://github.com/apache/spark/pull/46309#issuecomment-2089082622
@dtenedor the test failure looks relevant
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1586662318
##
core/src/main/scala/org/apache/spark/deploy/master/Master.scala:
##
@@ -1294,7 +1293,7 @@ private[deploy] class Master(
if (worker.state !=
dtenedor commented on PR #46309:
URL: https://github.com/apache/spark/pull/46309#issuecomment-2088811850
@gengliangwang thanks for a thorough review. I followed your instructions
for every comment, and then just resolved them all to clean up the GitHub
conversation history page. Please
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585642399
##
core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala:
##
@@ -182,12 +182,12 @@ private[spark] object StratifiedSamplingUtils extends
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585642163
##
core/src/main/scala/org/apache/spark/util/random/StratifiedSamplingUtils.scala:
##
@@ -182,12 +182,12 @@ private[spark] object StratifiedSamplingUtils extends
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585642022
##
core/src/main/scala/org/apache/spark/util/logging/RollingPolicy.scala:
##
@@ -53,8 +54,10 @@ private[spark] class TimeBasedRollingPolicy(
import
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585641821
##
core/src/main/scala/org/apache/spark/util/logging/RollingPolicy.scala:
##
@@ -53,8 +54,10 @@ private[spark] class TimeBasedRollingPolicy(
import
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585638170
##
core/src/main/scala/org/apache/spark/util/Utils.scala:
##
@@ -1813,13 +1818,13 @@ private[spark] object Utils
try {
process.destroyForcibly()
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585637901
##
core/src/main/scala/org/apache/spark/util/Utils.scala:
##
@@ -889,17 +893,18 @@ private[spark] object Utils
// because of Inet6Address.toHostName
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585637440
##
core/src/main/scala/org/apache/spark/util/Utils.scala:
##
@@ -889,17 +893,18 @@ private[spark] object Utils
// because of Inet6Address.toHostName
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585636366
##
core/src/main/scala/org/apache/spark/util/Utils.scala:
##
@@ -797,8 +799,10 @@ private[spark] object Utils
}
if (uris.nonEmpty) {
logWarning(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585635909
##
core/src/main/scala/org/apache/spark/util/Utils.scala:
##
@@ -397,16 +398,16 @@ private[spark] object Utils
// Decompress the file if it's a .tar or
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585634253
##
core/src/main/scala/org/apache/spark/util/HadoopFSUtils.scala:
##
@@ -323,8 +326,8 @@ private[spark] object HadoopFSUtils extends Logging {
}
if
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585632772
##
core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala:
##
@@ -412,7 +416,7 @@ private[spark] class MemoryStore(
o.close()
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585630661
##
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##
@@ -1120,7 +1124,7 @@ final class ShuffleBlockFetcherIterator(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585630494
##
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##
@@ -1033,8 +1037,8 @@ final class ShuffleBlockFetcherIterator(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585629698
##
core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala:
##
@@ -846,20 +846,23 @@ final class ShuffleBlockFetcherIterator(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585627091
##
core/src/main/scala/org/apache/spark/storage/PushBasedFetchHelper.scala:
##
@@ -246,8 +246,9 @@ private class PushBasedFetchHelper(
case
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585626163
##
core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala:
##
@@ -298,7 +302,7 @@ class BlockManagerMaster(
if
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585625493
##
core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala:
##
@@ -362,8 +364,8 @@ private[storage] class BlockManagerDecommissioner(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585624924
##
core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala:
##
@@ -319,7 +321,7 @@ private[storage] class BlockManagerDecommissioner(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585624782
##
core/src/main/scala/org/apache/spark/storage/BlockManagerDecommissioner.scala:
##
@@ -224,7 +226,7 @@ private[storage] class BlockManagerDecommissioner(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585623380
##
core/src/main/scala/org/apache/spark/storage/BlockManager.scala:
##
@@ -1214,14 +1214,16 @@ private[spark] class BlockManager(
// Give up trying
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585620988
##
core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala:
##
@@ -76,8 +76,9 @@ private[spark] class SortShuffleManager(conf: SparkConf)
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585620423
##
core/src/main/scala/org/apache/spark/shuffle/ShuffleBlockPusher.scala:
##
@@ -132,7 +133,7 @@ private[spark] class ShuffleBlockPusher(conf: SparkConf)
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585615139
##
core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala:
##
@@ -248,7 +248,7 @@ private[spark] class
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585615292
##
core/src/main/scala/org/apache/spark/serializer/SerializationDebugger.scala:
##
@@ -44,7 +44,7 @@ private[spark] object SerializationDebugger extends Logging
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585614454
##
core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala:
##
@@ -235,7 +235,7 @@ private[spark] class
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585613832
##
core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala:
##
@@ -154,7 +154,7 @@ private[spark] class
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585613712
##
core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala:
##
@@ -122,9 +122,9 @@ private[spark] class
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585611197
##
core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala:
##
@@ -276,9 +277,9 @@ private[spark] class TaskSchedulerImpl(
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585609934
##
core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala:
##
@@ -154,9 +154,11 @@ private[spark] class FairSchedulableBuilder(val rootPool:
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585608660
##
core/src/main/scala/org/apache/spark/scheduler/JobWaiter.scala:
##
@@ -65,7 +65,7 @@ private[spark] class JobWaiter[T](
override def jobFailed(exception:
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585606860
##
core/src/main/scala/org/apache/spark/resource/ResourceUtils.scala:
##
@@ -454,14 +455,21 @@ private[spark] object ResourceUtils extends Logging {
if
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585603072
##
core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala:
##
@@ -254,8 +257,8 @@ class HadoopRDD[K, V](
array
} catch {
case e:
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585601079
##
core/src/main/scala/org/apache/spark/metrics/sink/StatsdReporter.scala:
##
@@ -65,9 +66,11 @@ private[spark] class StatsdReporter(
meters:
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585591097
##
core/src/main/scala/org/apache/spark/executor/Executor.scala:
##
@@ -638,10 +638,12 @@ private[spark] class Executor(
val freedMemory =
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585564218
##
core/src/main/scala/org/apache/spark/deploy/security/HBaseDelegationTokenProvider.scala:
##
@@ -54,7 +54,8 @@ private[security] class
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585558505
##
core/src/main/scala/org/apache/spark/deploy/master/Master.scala:
##
@@ -1280,8 +1293,8 @@ private[deploy] class Master(
for (worker <- toRemove) {
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r158540
##
core/src/main/scala/org/apache/spark/deploy/history/HistoryServerDiskManager.scala:
##
@@ -312,8 +314,9 @@ private class HistoryServerDiskManager(
if
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585543510
##
core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala:
##
@@ -155,10 +155,12 @@ private[r] class RBackendHandler(server: RBackend)
args)
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585539259
##
core/src/main/scala/org/apache/spark/SparkContext.scala:
##
@@ -747,8 +748,9 @@ class SparkContext(config: SparkConf) extends Logging {
case
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585533767
##
core/src/main/scala/org/apache/spark/SparkConf.scala:
##
@@ -772,15 +774,19 @@ private[spark] object SparkConf extends Logging {
def
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585532948
##
core/src/main/scala/org/apache/spark/SparkConf.scala:
##
@@ -507,11 +508,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable
with Logging with
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585528937
##
core/src/main/scala/org/apache/spark/Dependency.scala:
##
@@ -211,10 +212,13 @@ class ShuffleDependency[K: ClassTag, V: ClassTag, C:
ClassTag](
// This
gengliangwang commented on code in PR #46309:
URL: https://github.com/apache/spark/pull/46309#discussion_r1585527897
##
common/utils/src/main/scala/org/apache/spark/internal/LogKey.scala:
##
@@ -304,20 +355,33 @@ object LogKeys {
case object NUM_LEFT_PARTITION_VALUES extends
dtenedor opened a new pull request, #46309:
URL: https://github.com/apache/spark/pull/46309
### What changes were proposed in this pull request?
Migrate logWarning with variables of the Spark Core module to structured
logging framework. This transforms the logWarning entries of the
50 matches
Mail list logo