[ 
https://issues.apache.org/jira/browse/SPARK-25180?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16588005#comment-16588005
 ] 

Steve Loughran commented on SPARK-25180:
----------------------------------------

Stack
{code}
scala> text("hello all!")
res10: String =
"
  _              _   _                     _   _   _
 | |__     ___  | | | |   ___       __ _  | | | | | |
 | '_ \   / _ \ | | | |  / _ \     / _` | | | | | | |
 | | | | |  __/ | | | | | (_) |   | (_| | | | | | |_|
 |_| |_|  \___| |_| |_|  \___/     \__,_| |_| |_| (_)

"

scala> val landsatCsvGZOnS3 = path("s3a://landsat-pds/scene_list.gz")
landsatCsvGZOnS3: org.apache.hadoop.fs.Path = s3a://landsat-pds/scene_list.gz

scala>

scala> val landsatCsvGZ = path("file:///tmp/scene_list.gz")
landsatCsvGZ: org.apache.hadoop.fs.Path = file:/tmp/scene_list.gz

scala> copyFile(landsatCsvGZOnS3, landsatCsvGZ, conf, false)
18/08/21 11:00:56 INFO FluentPropertyBeanIntrospector: Error when creating 
PropertyDescriptor for public final void 
org.apache.commons.configuration2.AbstractConfiguration.setProperty(java.lang.String,java.lang.Object)!
 Ignoring this property.
18/08/21 11:00:56 WARN MetricsConfig: Cannot locate configuration: tried 
hadoop-metrics2-s3a-file-system.properties,hadoop-metrics2.properties
18/08/21 11:00:56 INFO MetricsSystemImpl: Scheduled Metric snapshot period at 
10 second(s).
18/08/21 11:00:56 INFO MetricsSystemImpl: s3a-file-system metrics system started
18/08/21 11:00:59 INFO deprecation: fs.s3a.server-side-encryption-key is 
deprecated. Instead, use fs.s3a.server-side-encryption.key
18/08/21 11:00:59 INFO ObjectStoreOperations: Copying 
s3a://landsat-pds/scene_list.gz to file:/tmp/scene_list.gz (44534 KB)
18/08/21 11:04:36 INFO ObjectStoreOperations: Copy Duration = 216.47322015 
seconds
18/08/21 11:04:36 INFO ObjectStoreOperations: Effective copy bandwidth = 
205.72521612207376 KiB/s
res11: Boolean = true

scala> val csvSchema = LandsatIO.buildCsvSchema()
csvSchema: org.apache.spark.sql.types.StructType = 
StructType(StructField(entityId,StringType,true), 
StructField(acquisitionDate,StringType,true), 
StructField(cloudCover,StringType,true), 
StructField(processingLevel,StringType,true), 
StructField(path,StringType,true), StructField(row,StringType,true), 
StructField(min_lat,StringType,true), StructField(min_lon,StringType,true), 
StructField(max_lat,StringType,true), StructField(max_lon,StringType,true), 
StructField(download_url,StringType,true))

scala> val csvDataFrame = LandsatIO.addLandsatColumns(
     |   spark.read.options(LandsatIO.CsvOptions).
     |     schema(csvSchema).csv(landsatCsvGZ.toUri.toString))
18/08/21 11:34:24 INFO SharedState: Setting hive.metastore.warehouse.dir 
('null') to the value of spark.sql.warehouse.dir 
('file:/Users/stevel/Projects/sparkwork/spark/spark-warehouse').
18/08/21 11:34:24 INFO SharedState: Warehouse path is 
'file:/Users/stevel/Projects/sparkwork/spark/spark-warehouse'.
18/08/21 11:34:24 INFO StateStoreCoordinatorRef: Registered 
StateStoreCoordinator endpoint
csvDataFrame: org.apache.spark.sql.DataFrame = [id: string, acquisitionDate: 
timestamp ... 12 more fields]

scala> val filteredCSV = csvDataFrame.
     |   sample(false, 0.01d).
     |   filter("cloudCover < 0").cache()
18/08/21 11:34:44 INFO FileSourceStrategy: Pruning directories with:
18/08/21 11:34:44 INFO FileSourceStrategy: Post-Scan Filters:
18/08/21 11:34:44 INFO FileSourceStrategy: Output Data Schema: struct<entityId: 
string, acquisitionDate: string, cloudCover: string, processingLevel: string, 
path: string ... 9 more fields>
18/08/21 11:34:44 INFO FileSourceScanExec: Pushed Filters:
18/08/21 11:34:45 INFO CodeGenerator: Code generated in 239.359919 ms
18/08/21 11:34:45 INFO MemoryStore: Block broadcast_0 stored as values in 
memory (estimated size 347.6 KB, free 366.0 MB)
18/08/21 11:34:45 INFO MemoryStore: Block broadcast_0_piece0 stored as bytes in 
memory (estimated size 29.3 KB, free 365.9 MB)
18/08/21 11:34:45 INFO BlockManagerInfo: Added broadcast_0_piece0 in memory on 
hw13176.lan:52731 (size: 29.3 KB, free: 366.3 MB)
18/08/21 11:34:45 INFO SparkContext: Created broadcast 0 from cache at 
<console>:46
18/08/21 11:34:45 INFO FileSourceScanExec: Planning scan with bin packing, max 
size: 6224701 bytes, open cost is considered as scanning 4194304 bytes.
filteredCSV: org.apache.spark.sql.Dataset[org.apache.spark.sql.Row] = [id: 
string, acquisitionDate: timestamp ... 12 more fields]

scala> ireland
<console>:43: error: not found: value ireland
       ireland
       ^

scala> val ireland = new Path("s3a://hwdev-steve-ireland-new/")
ireland: org.apache.hadoop.fs.Path = s3a://hwdev-steve-ireland-new/

scala> val orcData = new Path(ireland, "orcData")
orcData: org.apache.hadoop.fs.Path = s3a://hwdev-steve-ireland-new/orcData

scala> logDuration("write to ORC S3A") {
     |   filteredCSV.write.
     |     partitionBy("year", "month").
     |     mode(SaveMode.Append).format("orc").save(orcData.toString)
     | }
18/08/21 11:35:44 INFO ObjectStoreOperations: Starting write to ORC S3A
18/08/21 11:35:50 INFO FileOutputCommitter: File Output Committer Algorithm 
version is 1
18/08/21 11:35:50 INFO FileOutputCommitter: FileOutputCommitter skip cleanup 
_temporary folders under output directory:false, ignore cleanup failures: false
18/08/21 11:35:50 INFO AbstractS3ACommitterFactory: Using Commmitter 
StagingCommitter{AbstractS3ACommitter{role=Task committer 
attempt_20180821113550_0000_m_000000_0, namestaging, 
outputPath=s3a://hwdev-steve-ireland-new/orcData, 
workPath=file:/tmp/hadoop-stevel/s3a/local-1534872224019/_temporary/0/_temporary/attempt_20180821113550_0000_m_000000_0},
 conflictResolution=REPLACE, 
wrappedCommitter=FileOutputCommitter{PathOutputCommitter{context=TaskAttemptContextImpl{JobContextImpl{jobId=job_20180821113550_0000};
 taskId=attempt_20180821113550_0000_m_000000_0, status=''}}; 
outputPath=file:/Users/stevel/Projects/sparkwork/spark/tmp/staging/stevel/local-1534872224019/staging-uploads,
 workPath=null, algorithmVersion=1, skipCleanup=false, 
ignoreCleanupFailures=false}} for s3a://hwdev-steve-ireland-new/orcData
18/08/21 11:35:50 INFO SparkContext: Starting job: save at <console>:50
18/08/21 11:35:50 INFO DAGScheduler: Got job 0 (save at <console>:50) with 1 
output partitions
18/08/21 11:35:50 INFO DAGScheduler: Final stage: ResultStage 0 (save at 
<console>:50)
18/08/21 11:35:50 INFO DAGScheduler: Parents of final stage: List()
18/08/21 11:35:50 INFO DAGScheduler: Missing parents: List()
18/08/21 11:35:50 INFO DAGScheduler: Submitting ResultStage 0 
(MapPartitionsRDD[6] at save at <console>:50), which has no missing parents
18/08/21 11:35:50 INFO MemoryStore: Block broadcast_1 stored as values in 
memory (estimated size 207.7 KB, free 365.7 MB)
18/08/21 11:35:50 INFO MemoryStore: Block broadcast_1_piece0 stored as bytes in 
memory (estimated size 75.5 KB, free 365.7 MB)
18/08/21 11:35:50 INFO BlockManagerInfo: Added broadcast_1_piece0 in memory on 
hw13176.lan:52731 (size: 75.5 KB, free: 366.2 MB)
18/08/21 11:35:50 INFO SparkContext: Created broadcast 1 from broadcast at 
DAGScheduler.scala:1039
18/08/21 11:35:50 INFO DAGScheduler: Submitting 1 missing tasks from 
ResultStage 0 (MapPartitionsRDD[6] at save at <console>:50) (first 15 tasks are 
for partitions Vector(0))
18/08/21 11:35:50 INFO TaskSchedulerImpl: Adding task set 0.0 with 1 tasks
18/08/21 11:35:50 INFO TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, 
localhost, executor driver, partition 0, PROCESS_LOCAL, 8290 bytes)
18/08/21 11:35:50 INFO Executor: Running task 0.0 in stage 0.0 (TID 0)
18/08/21 11:35:50 INFO Executor: Fetching 
spark://hw13176.lan:52730/jars/hortonworks-spark-cloud-integration-1.0-SNAPSHOT.jar
 with timestamp 1534872223691
18/08/21 11:35:51 ERROR Utils: Aborting task
java.io.IOException: Failed to connect to hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more
18/08/21 11:35:51 WARN Utils: Suppressing exception in catch: null
java.lang.NullPointerException
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
18/08/21 11:35:51 ERROR Executor: Exception in task 0.0 in stage 0.0 (TID 0)
java.io.IOException: Failed to connect to hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more
18/08/21 11:35:51 WARN TaskSetManager: Lost task 0.0 in stage 0.0 (TID 0, 
localhost, executor driver): java.io.IOException: Failed to connect to 
hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more

18/08/21 11:35:51 ERROR TaskSetManager: Task 0 in stage 0.0 failed 1 times; 
aborting job
18/08/21 11:35:51 INFO TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have 
all completed, from pool
18/08/21 11:35:51 INFO TaskSchedulerImpl: Cancelling stage 0
18/08/21 11:35:51 INFO DAGScheduler: ResultStage 0 (save at <console>:50) 
failed in 0.343 s due to Job aborted due to stage failure: Task 0 in stage 0.0 
failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, 
localhost, executor driver): java.io.IOException: Failed to connect to 
hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more

Driver stacktrace:
18/08/21 11:35:51 INFO DAGScheduler: Job 0 failed: save at <console>:50, took 
0.389757 s
18/08/21 11:35:51 ERROR FileFormatWriter: Aborting job null.
org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in 
stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 
0, localhost, executor driver): java.io.IOException: Failed to connect to 
hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more

Driver stacktrace:
        at 
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1602)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1590)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1589)
        at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at 
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1589)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
        at scala.Option.foreach(Option.scala:257)
        at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1823)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1772)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1761)
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
        at 
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2034)
        at 
org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:194)
        at 
org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:154)
        at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:104)
        at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:102)
        at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:122)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
        at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
        at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
        at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:654)
        at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:273)
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:267)
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:225)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$anonfun$1.apply$mcV$sp(<console>:50)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$anonfun$1.apply(<console>:50)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$anonfun$1.apply(<console>:50)
        at 
com.hortonworks.spark.cloud.utils.TimeOperations$class.durationOf(TimeOperations.scala:90)
        at 
com.hortonworks.spark.cloud.ObjectStoreOperations$.durationOf(ObjectStoreOperations.scala:504)
        at 
com.hortonworks.spark.cloud.utils.TimeOperations$class.logDuration(TimeOperations.scala:62)
        at 
com.hortonworks.spark.cloud.ObjectStoreOperations$.logDuration(ObjectStoreOperations.scala:504)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:47)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:56)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:58)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:60)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:62)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:64)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:66)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:68)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:70)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:72)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:74)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:76)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:78)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:80)
        at $line44.$read$$iw$$iw$$iw$$iw.<init>(<console>:82)
        at $line44.$read$$iw$$iw$$iw.<init>(<console>:84)
        at $line44.$read$$iw$$iw.<init>(<console>:86)
        at $line44.$read$$iw.<init>(<console>:88)
        at $line44.$read.<init>(<console>:90)
        at $line44.$read$.<init>(<console>:94)
        at $line44.$read$.<clinit>(<console>)
        at $line44.$eval$.$print$lzycompute(<console>:7)
        at $line44.$eval$.$print(<console>:6)
        at $line44.$eval.$print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
        at 
scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
        at 
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
        at 
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
        at 
scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
        at 
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
        at 
scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
        at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
        at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)
        at 
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)
        at 
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at 
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at 
scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
        at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
        at org.apache.spark.repl.Main$.doMain(Main.scala:76)
        at org.apache.spark.repl.Main$.main(Main.scala:56)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at 
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:904)
        at 
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.io.IOException: Failed to connect to hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more
18/08/21 11:35:51 INFO AbstractS3ACommitter: Task committer 
attempt_20180821113550_0000_m_000000_0: aborting job (no job ID) in state FAILED
18/08/21 11:35:51 INFO StagingCommitter: Starting: Task committer 
attempt_20180821113550_0000_m_000000_0: aborting job in state (no job ID)
18/08/21 11:35:51 INFO AbstractS3ACommitter: Task committer 
attempt_20180821113550_0000_m_000000_0: no pending commits to abort
18/08/21 11:35:51 INFO StagingCommitter: Task committer 
attempt_20180821113550_0000_m_000000_0: aborting job in state (no job ID) : 
duration 0:00.011s
18/08/21 11:35:51 INFO AbstractS3ACommitter: Starting: Cleanup job (no job ID)
18/08/21 11:35:51 INFO AbstractS3ACommitter: Starting: Aborting all pending 
commits under s3a://hwdev-steve-ireland-new/orcData
18/08/21 11:35:52 INFO AbstractS3ACommitter: Aborting all pending commits under 
s3a://hwdev-steve-ireland-new/orcData: duration 0:00.262s
18/08/21 11:35:52 INFO AbstractS3ACommitter: Cleanup job (no job ID): duration 
0:00.262s
18/08/21 11:35:53 ERROR ObjectStoreOperations: After 8.423744s: 
org.apache.spark.SparkException: Job aborted.
org.apache.spark.SparkException: Job aborted.
        at 
org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:224)
        at 
org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:154)
        at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:104)
        at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:102)
        at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:122)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
        at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
        at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
        at 
org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
        at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
        at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
        at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
        at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
        at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
        at 
org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:654)
        at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:273)
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:267)
        at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:225)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$anonfun$1.apply$mcV$sp(<console>:50)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$anonfun$1.apply(<console>:50)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$anonfun$1.apply(<console>:50)
        at 
com.hortonworks.spark.cloud.utils.TimeOperations$class.durationOf(TimeOperations.scala:90)
        at 
com.hortonworks.spark.cloud.ObjectStoreOperations$.durationOf(ObjectStoreOperations.scala:504)
        at 
com.hortonworks.spark.cloud.utils.TimeOperations$class.logDuration(TimeOperations.scala:62)
        at 
com.hortonworks.spark.cloud.ObjectStoreOperations$.logDuration(ObjectStoreOperations.scala:504)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:47)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:56)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:58)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:60)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:62)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:64)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:66)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:68)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:70)
        at 
$line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:72)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:74)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:76)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw$$iw.<init>(<console>:78)
        at $line44.$read$$iw$$iw$$iw$$iw$$iw.<init>(<console>:80)
        at $line44.$read$$iw$$iw$$iw$$iw.<init>(<console>:82)
        at $line44.$read$$iw$$iw$$iw.<init>(<console>:84)
        at $line44.$read$$iw$$iw.<init>(<console>:86)
        at $line44.$read$$iw.<init>(<console>:88)
        at $line44.$read.<init>(<console>:90)
        at $line44.$read$.<init>(<console>:94)
        at $line44.$read$.<clinit>(<console>)
        at $line44.$eval$.$print$lzycompute(<console>:7)
        at $line44.$eval$.$print(<console>:6)
        at $line44.$eval.$print(<console>)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
        at 
scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
        at 
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
        at 
scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
        at 
scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
        at 
scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
        at 
scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
        at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at 
scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:825)
        at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
        at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
        at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)
        at 
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)
        at 
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at 
scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
        at 
scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
        at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
        at org.apache.spark.repl.Main$.doMain(Main.scala:76)
        at org.apache.spark.repl.Main$.main(Main.scala:56)
        at org.apache.spark.repl.Main.main(Main.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at 
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:904)
        at 
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: 
Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 
0.0 (TID 0, localhost, executor driver): java.io.IOException: Failed to connect 
to hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more

Driver stacktrace:
        at 
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1602)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1590)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1589)
        at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at 
org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1589)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
        at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
        at scala.Option.foreach(Option.scala:257)
        at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1823)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1772)
        at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1761)
        at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
        at 
org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
        at org.apache.spark.SparkContext.runJob(SparkContext.scala:2034)
        at 
org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:194)
        ... 89 more
Caused by: java.io.IOException: Failed to connect to hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more
org.apache.spark.SparkException: Job aborted.
  at 
org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:224)
  at 
org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:154)
  at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:104)
  at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:102)
  at 
org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:122)
  at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
  at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
  at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
  at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
  at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152)
  at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127)
  at 
org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
  at 
org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80)
  at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
  at 
org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:654)
  at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:77)
  at org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:654)
  at 
org.apache.spark.sql.DataFrameWriter.saveToV1Source(DataFrameWriter.scala:273)
  at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:267)
  at org.apache.spark.sql.DataFrameWriter.save(DataFrameWriter.scala:225)
  at $anonfun$1.apply$mcV$sp(<console>:50)
  at $anonfun$1.apply(<console>:50)
  at $anonfun$1.apply(<console>:50)
  at 
com.hortonworks.spark.cloud.utils.TimeOperations$class.durationOf(TimeOperations.scala:90)
  at 
com.hortonworks.spark.cloud.ObjectStoreOperations$.durationOf(ObjectStoreOperations.scala:504)
  at 
com.hortonworks.spark.cloud.utils.TimeOperations$class.logDuration(TimeOperations.scala:62)
  at 
com.hortonworks.spark.cloud.ObjectStoreOperations$.logDuration(ObjectStoreOperations.scala:504)
  ... 63 elided
Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: 
Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 
0.0 (TID 0, localhost, executor driver): java.io.IOException: Failed to connect 
to hw13176.lan:52730
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
        at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
        at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
        at 
org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
        at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
        at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
        at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
        at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
        at 
scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
        at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
        at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
        at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
        at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
        at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
        Suppressed: java.lang.NullPointerException
                at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
                ... 17 more
Caused by: java.net.UnknownHostException: hw13176.lan
        at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
        at java.net.InetAddress.getAllByName(InetAddress.java:1192)
        at java.net.InetAddress.getAllByName(InetAddress.java:1126)
        at java.net.InetAddress.getByName(InetAddress.java:1076)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
        at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
        at java.security.AccessController.doPrivileged(Native Method)
        at 
io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
        at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
        at 
io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
        at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
        at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
        at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
        at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
        at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
        at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
        at 
io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
        at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
        at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
        at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
        at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
        at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
        ... 1 more

Driver stacktrace:
  at 
org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1602)
  at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1590)
  at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1589)
  at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1589)
  at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
  at 
org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831)
  at scala.Option.foreach(Option.scala:257)
  at 
org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831)
  at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1823)
  at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1772)
  at 
org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1761)
  at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
  at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642)
  at org.apache.spark.SparkContext.runJob(SparkContext.scala:2034)
  at 
org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:194)
  ... 89 more
Caused by: java.io.IOException: Failed to connect to hw13176.lan:52730
  at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:245)
  at 
org.apache.spark.network.client.TransportClientFactory.createClient(TransportClientFactory.java:187)
  at 
org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$downloadClient(NettyRpcEnv.scala:368)
  at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply$mcV$sp(NettyRpcEnv.scala:336)
  at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
  at 
org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$openChannel$1.apply(NettyRpcEnv.scala:335)
  at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1414)
  at org.apache.spark.rpc.netty.NettyRpcEnv.openChannel(NettyRpcEnv.scala:339)
  at org.apache.spark.util.Utils$.doFetchFile(Utils.scala:664)
  at org.apache.spark.util.Utils$.fetchFile(Utils.scala:488)
  at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:755)
  at 
org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$5.apply(Executor.scala:747)
  at 
scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:733)
  at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
  at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99)
  at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230)
  at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40)
  at scala.collection.mutable.HashMap.foreach(HashMap.scala:99)
  at 
scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:732)
  at 
org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:747)
  at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:312)
  at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
  at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
  at java.lang.Thread.run(Thread.java:745)
Caused by: java.net.UnknownHostException: hw13176.lan
  at java.net.InetAddress.getAllByName0(InetAddress.java:1280)
  at java.net.InetAddress.getAllByName(InetAddress.java:1192)
  at java.net.InetAddress.getAllByName(InetAddress.java:1126)
  at java.net.InetAddress.getByName(InetAddress.java:1076)
  at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:146)
  at io.netty.util.internal.SocketUtils$8.run(SocketUtils.java:143)
  at java.security.AccessController.doPrivileged(Native Method)
  at io.netty.util.internal.SocketUtils.addressByName(SocketUtils.java:143)
  at 
io.netty.resolver.DefaultNameResolver.doResolve(DefaultNameResolver.java:43)
  at io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:63)
  at io.netty.resolver.SimpleNameResolver.resolve(SimpleNameResolver.java:55)
  at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:57)
  at 
io.netty.resolver.InetSocketAddressResolver.doResolve(InetSocketAddressResolver.java:32)
  at 
io.netty.resolver.AbstractAddressResolver.resolve(AbstractAddressResolver.java:108)
  at io.netty.bootstrap.Bootstrap.doResolveAndConnect0(Bootstrap.java:208)
  at io.netty.bootstrap.Bootstrap.access$000(Bootstrap.java:49)
  at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:188)
  at io.netty.bootstrap.Bootstrap$1.operationComplete(Bootstrap.java:174)
  at 
io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:507)
  at 
io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:481)
  at 
io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:420)
  at io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104)
  at 
io.netty.channel.DefaultChannelPromise.trySuccess(DefaultChannelPromise.java:82)
  at 
io.netty.channel.AbstractChannel$AbstractUnsafe.safeSetSuccess(AbstractChannel.java:978)
  at 
io.netty.channel.AbstractChannel$AbstractUnsafe.register0(AbstractChannel.java:512)
  at 
io.netty.channel.AbstractChannel$AbstractUnsafe.access$200(AbstractChannel.java:423)
  at 
io.netty.channel.AbstractChannel$AbstractUnsafe$1.run(AbstractChannel.java:482)
  at 
io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:163)
  at 
io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:403)
  at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:463)
  at 
io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
  at 
io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:138)
  ... 1 more
  Suppressed: java.lang.NullPointerException
    at 
org.apache.spark.util.Utils$.tryWithSafeFinallyAndFailureCallbacks(Utils.scala:1422)
    ... 17 more

{code}

> Spark standalone failure in Utils.doFetchFile() if nslookup of local hostname 
> fails
> -----------------------------------------------------------------------------------
>
>                 Key: SPARK-25180
>                 URL: https://issues.apache.org/jira/browse/SPARK-25180
>             Project: Spark
>          Issue Type: Bug
>          Components: Spark Core
>    Affects Versions: 2.3.0
>         Environment: mac laptop running on a corporate guest wifi, presumably 
> a wifi with odd DNS settings.
>            Reporter: Steve Loughran
>            Priority: Minor
>
> trying to save work on spark standalone can fail if netty RPC cannot 
> determine the hostname. While that's a valid failure on a real cluster, in 
> standalone falling back to localhost rather than inferred "hw13176.lan" value 
> may be the better option.
> note also, the abort* call failed; NPE.
>  



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to