Re: Running spark from Eclipse and then Jar

2016-12-10 Thread Iman Mohtashemi
nknown Source)
> at
>
> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5$$anonfun$apply$1.apply(DataSource.scala:132)
> at
>
> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5$$anonfun$apply$1.apply(DataSource.scala:132)
> at scala.util.Try$.apply(Try.scala:192)
> at
>
> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5.apply(DataSource.scala:132)
> at
>
> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5.apply(DataSource.scala:132)
> at scala.util.Try.orElse(Try.scala:84)
> at
>
> org.apache.spark.sql.execution.datasources.DataSource.lookupDataSource(DataSource.scala:132)
> ... 8 more
> 16/12/07 15:16:46 INFO SparkContext: Invoking stop() from shutdown hook
> 16/12/07 15:16:46 INFO SparkUI: Stopped Spark web UI at
> http://192.168.19.2:4040
> 16/12/07 15:16:46 INFO MapOutputTrackerMasterEndpoint:
> MapOutputTrackerMasterEndpoint stopped!
> 16/12/07 15:16:46 INFO MemoryStore: MemoryStore cleared
> 16/12/07 15:16:46 INFO BlockManager: BlockManager stopped
> 16/12/07 15:16:46 INFO BlockManagerMaster: BlockManagerMaster stopped
> 16/12/07 15:16:46 INFO
> OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
> OutputCommitCoordinator stopped!
> 16/12/07 15:16:46 INFO SparkContext: Successfully stopped SparkContext
> 16/12/07 15:16:46 INFO ShutdownHookManager: Shutdown hook called
> 16/12/07 15:16:46 INFO ShutdownHookManager: Deleting directory
>
> C:\Users\Owner\AppData\Local\Temp\spark-dab2587b-a794-4947-ac13-d40056cf71d8
>
> C:\Users\Owner>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
> public final class JavaWordCount {
>   private static final Pattern SPACE = Pattern.compile(" ");
>
>   public static void main(String[] args) throws Exception {
>
> if (args.length < 1) {
>   System.err.println("Usage: JavaWordCount ");
>   System.exit(1);
> }
>
> //boiler plate needed to run locally
> SparkConf conf = new SparkConf().setAppName("Word Count
> Application").setMaster("local[*]");
> JavaSparkContext sc = new JavaSparkContext(conf);
>
> SparkSession spark = SparkSession
> .builder()
> .appName("Word Count")
> .getOrCreate()
> .newSession();
>
>
> JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
>
>
> JavaRDD words = lines.flatMap(new FlatMapFunction<String,
> String>() {
>   @Override
>   public Iterator call(String s) {
> return Arrays.asList(SPACE.split(s)).iterator();
>   }
> });
>
> JavaPairRDD<String, Integer> ones = words.mapToPair(
>   new PairFunction<String, String, Integer>() {
> @Override
> public Tuple2<String, Integer> call(String s) {
>   return new Tuple2<>(s, 1);
> }
>   });
>
> JavaPairRDD<String, Integer> counts = ones.reduceByKey(
>   new Function2<Integer, Integer, Integer>() {
> @Override
> public Integer call(Integer i1, Integer i2) {
>   return i1 + i2;
> }
>   });
>
> List<Tuple2String, Integer>> output = counts.collect();
> for (Tuple2 tuple : output) {
>   System.out.println(tuple._1() + ": " + tuple._2());
> }
> spark.stop();
>   }
> }
>
>
>
>
> --
> View this message in context:
> http://apache-spark-user-list.1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-Jar-tp28182.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>
> -
> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>
>
>
>
>


Re: Running spark from Eclipse and then Jar

2016-12-10 Thread Md. Rezaul Karim
Reader.load(DataFrameReader.scala:149)
>> at
>> org.apache.spark.sql.DataFrameReader.text(DataFrameReader.scala:504)
>> at
>> org.apache.spark.sql.DataFrameReader.textFile(DataFrameReader.scala:540)
>> at
>> org.apache.spark.sql.DataFrameReader.textFile(DataFrameReader.scala:513)
>> at JavaWordCount.main(JavaWordCount.java:57)
>> Caused by: java.lang.ClassNotFoundException: text.DefaultSource
>> at java.net.URLClassLoader.findClass(Unknown Source)
>> at java.lang.ClassLoader.loadClass(Unknown Source)
>> at sun.misc.Launcher$AppClassLoader.loadClass(Unknown Source)
>> at java.lang.ClassLoader.loadClass(Unknown Source)
>> at
>> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5$$
>> anonfun$apply$1.apply(DataSource.scala:132)
>> at
>> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5$$
>> anonfun$apply$1.apply(DataSource.scala:132)
>> at scala.util.Try$.apply(Try.scala:192)
>> at
>> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5.apply(
>> DataSource.scala:132)
>> at
>> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5.apply(
>> DataSource.scala:132)
>> at scala.util.Try.orElse(Try.scala:84)
>> at
>> org.apache.spark.sql.execution.datasources.DataSource.lookupDataSource(
>> DataSource.scala:132)
>> ... 8 more
>> 16/12/07 15:16:46 INFO SparkContext: Invoking stop() from shutdown hook
>> 16/12/07 15:16:46 INFO SparkUI: Stopped Spark web UI at
>> http://192.168.19.2:4040
>> 16/12/07 15:16:46 INFO MapOutputTrackerMasterEndpoint:
>> MapOutputTrackerMasterEndpoint stopped!
>> 16/12/07 15:16:46 INFO MemoryStore: MemoryStore cleared
>> 16/12/07 15:16:46 INFO BlockManager: BlockManager stopped
>> 16/12/07 15:16:46 INFO BlockManagerMaster: BlockManagerMaster stopped
>> 16/12/07 15:16:46 INFO
>> OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
>> OutputCommitCoordinator stopped!
>> 16/12/07 15:16:46 INFO SparkContext: Successfully stopped SparkContext
>> 16/12/07 15:16:46 INFO ShutdownHookManager: Shutdown hook called
>> 16/12/07 15:16:46 INFO ShutdownHookManager: Deleting directory
>> C:\Users\Owner\AppData\Local\Temp\spark-dab2587b-a794-4947-
>> ac13-d40056cf71d8
>>
>> C:\Users\Owner>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>> public final class JavaWordCount {
>>   private static final Pattern SPACE = Pattern.compile(" ");
>>
>>   public static void main(String[] args) throws Exception {
>>
>> if (args.length < 1) {
>>   System.err.println("Usage: JavaWordCount ");
>>   System.exit(1);
>> }
>>
>> //boiler plate needed to run locally
>> SparkConf conf = new SparkConf().setAppName("Word Count
>> Application").setMaster("local[*]");
>> JavaSparkContext sc = new JavaSparkContext(conf);
>>
>> SparkSession spark = SparkSession
>> .builder()
>> .appName("Word Count")
>> .getOrCreate()
>> .newSession();
>>
>>
>> JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
>>
>>
>> JavaRDD words = lines.flatMap(new FlatMapFunction<String,
>> String>() {
>>   @Override
>>   public Iterator call(String s) {
>> return Arrays.asList(SPACE.split(s)).iterator();
>>   }
>> });
>>
>> JavaPairRDD<String, Integer> ones = words.mapToPair(
>>   new PairFunction<String, String, Integer>() {
>> @Override
>> public Tuple2<String, Integer> call(String s) {
>>   return new Tuple2<>(s, 1);
>> }
>>   });
>>
>> JavaPairRDD<String, Integer> counts = ones.reduceByKey(
>>   new Function2<Integer, Integer, Integer>() {
>> @Override
>> public Integer call(Integer i1, Integer i2) {
>>   return i1 + i2;
>> }
>>   });
>>
>> List<Tuple2String, Integer>> output = counts.collect();
>> for (Tuple2 tuple : output) {
>>   System.out.println(tuple._1() + ": " + tuple._2());
>> }
>> spark.stop();
>>   }
>> }
>>
>>
>>
>>
>> --
>> View this message in context: http://apache-spark-user-list.
>> 1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-
>> Jar-tp28182.html
>> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>>
>> -
>> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>>
>>
>>
>>


Re: Running spark from Eclipse and then Jar

2016-12-07 Thread Iman Mohtashemi
r: BlockManager stopped
> 16/12/07 15:16:46 INFO BlockManagerMaster: BlockManagerMaster stopped
> 16/12/07 15:16:46 INFO
> OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
> OutputCommitCoordinator stopped!
> 16/12/07 15:16:46 INFO SparkContext: Successfully stopped SparkContext
> 16/12/07 15:16:46 INFO ShutdownHookManager: Shutdown hook called
> 16/12/07 15:16:46 INFO ShutdownHookManager: Deleting directory
>
> C:\Users\Owner\AppData\Local\Temp\spark-dab2587b-a794-4947-ac13-d40056cf71d8
>
> C:\Users\Owner>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
>
> public final class JavaWordCount {
>   private static final Pattern SPACE = Pattern.compile(" ");
>
>   public static void main(String[] args) throws Exception {
>
> if (args.length < 1) {
>   System.err.println("Usage: JavaWordCount ");
>   System.exit(1);
> }
>
> //boiler plate needed to run locally
> SparkConf conf = new SparkConf().setAppName("Word Count
> Application").setMaster("local[*]");
> JavaSparkContext sc = new JavaSparkContext(conf);
>
> SparkSession spark = SparkSession
> .builder()
> .appName("Word Count")
>         .getOrCreate()
> .newSession();
>
>
> JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
>
>
> JavaRDD words = lines.flatMap(new FlatMapFunction<String,
> String>() {
>   @Override
>   public Iterator call(String s) {
> return Arrays.asList(SPACE.split(s)).iterator();
>   }
> });
>
> JavaPairRDD<String, Integer> ones = words.mapToPair(
>   new PairFunction<String, String, Integer>() {
> @Override
> public Tuple2<String, Integer> call(String s) {
>   return new Tuple2<>(s, 1);
> }
>   });
>
> JavaPairRDD<String, Integer> counts = ones.reduceByKey(
>   new Function2<Integer, Integer, Integer>() {
> @Override
> public Integer call(Integer i1, Integer i2) {
>   return i1 + i2;
> }
>   });
>
> List<Tuple2String, Integer>> output = counts.collect();
> for (Tuple2 tuple : output) {
>   System.out.println(tuple._1() + ": " + tuple._2());
> }
> spark.stop();
>   }
> }
>
>
>
>
> --
> View this message in context:
> http://apache-spark-user-list.1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-Jar-tp28182.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>
> -
> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>
>
>
>


Re: Running spark from Eclipse and then Jar

2016-12-07 Thread Md. Rezaul Karim
at
>> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5.apply(
>> DataSource.scala:132)
>> at
>> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5.apply(
>> DataSource.scala:132)
>> at scala.util.Try.orElse(Try.scala:84)
>> at
>> org.apache.spark.sql.execution.datasources.DataSource.lookupDataSource(
>> DataSource.scala:132)
>> ... 8 more
>> 16/12/07 15:16:46 INFO SparkContext: Invoking stop() from shutdown hook
>> 16/12/07 15:16:46 INFO SparkUI: Stopped Spark web UI at
>> http://192.168.19.2:4040
>> 16/12/07 15:16:46 INFO MapOutputTrackerMasterEndpoint:
>> MapOutputTrackerMasterEndpoint stopped!
>> 16/12/07 15:16:46 INFO MemoryStore: MemoryStore cleared
>> 16/12/07 15:16:46 INFO BlockManager: BlockManager stopped
>> 16/12/07 15:16:46 INFO BlockManagerMaster: BlockManagerMaster stopped
>> 16/12/07 15:16:46 INFO
>> OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
>> OutputCommitCoordinator stopped!
>> 16/12/07 15:16:46 INFO SparkContext: Successfully stopped SparkContext
>> 16/12/07 15:16:46 INFO ShutdownHookManager: Shutdown hook called
>> 16/12/07 15:16:46 INFO ShutdownHookManager: Deleting directory
>> C:\Users\Owner\AppData\Local\Temp\spark-dab2587b-a794-4947-
>> ac13-d40056cf71d8
>>
>> C:\Users\Owner>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>> public final class JavaWordCount {
>>   private static final Pattern SPACE = Pattern.compile(" ");
>>
>>   public static void main(String[] args) throws Exception {
>>
>> if (args.length < 1) {
>>   System.err.println("Usage: JavaWordCount ");
>>   System.exit(1);
>> }
>>
>> //boiler plate needed to run locally
>> SparkConf conf = new SparkConf().setAppName("Word Count
>> Application").setMaster("local[*]");
>> JavaSparkContext sc = new JavaSparkContext(conf);
>>
>> SparkSession spark = SparkSession
>> .builder()
>> .appName("Word Count")
>> .getOrCreate()
>> .newSession();
>>
>>
>> JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
>>
>>
>> JavaRDD words = lines.flatMap(new FlatMapFunction<String,
>> String>() {
>>   @Override
>>   public Iterator call(String s) {
>> return Arrays.asList(SPACE.split(s)).iterator();
>>   }
>> });
>>
>> JavaPairRDD<String, Integer> ones = words.mapToPair(
>>   new PairFunction<String, String, Integer>() {
>> @Override
>> public Tuple2<String, Integer> call(String s) {
>>   return new Tuple2<>(s, 1);
>> }
>>   });
>>
>> JavaPairRDD<String, Integer> counts = ones.reduceByKey(
>>   new Function2<Integer, Integer, Integer>() {
>> @Override
>> public Integer call(Integer i1, Integer i2) {
>>   return i1 + i2;
>> }
>>   });
>>
>> List<Tuple2String, Integer>> output = counts.collect();
>> for (Tuple2 tuple : output) {
>>   System.out.println(tuple._1() + ": " + tuple._2());
>> }
>> spark.stop();
>>   }
>> }
>>
>>
>>
>>
>> --
>> View this message in context: http://apache-spark-user-list.
>> 1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-
>> Jar-tp28182.html
>> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>>
>> -
>> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>>
>>
>>


Re: Running spark from Eclipse and then Jar

2016-12-07 Thread Iman Mohtashemi
nt ");
>   System.exit(1);
> }
>
> //boiler plate needed to run locally
> SparkConf conf = new SparkConf().setAppName("Word Count
> Application").setMaster("local[*]");
> JavaSparkContext sc = new JavaSparkContext(conf);
>
> SparkSession spark = SparkSession
> .builder()
> .appName("Word Count")
> .getOrCreate()
> .newSession();
>
>
> JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
>
>
> JavaRDD words = lines.flatMap(new FlatMapFunction<String,
> String>() {
>   @Override
>   public Iterator call(String s) {
> return Arrays.asList(SPACE.split(s)).iterator();
>   }
> });
>
> JavaPairRDD<String, Integer> ones = words.mapToPair(
>   new PairFunction<String, String, Integer>() {
> @Override
> public Tuple2<String, Integer> call(String s) {
>   return new Tuple2<>(s, 1);
> }
>   });
>
> JavaPairRDD<String, Integer> counts = ones.reduceByKey(
>   new Function2<Integer, Integer, Integer>() {
> @Override
> public Integer call(Integer i1, Integer i2) {
>   return i1 + i2;
> }
>   });
>
> List<Tuple2String, Integer>> output = counts.collect();
> for (Tuple2 tuple : output) {
>   System.out.println(tuple._1() + ": " + tuple._2());
> }
> spark.stop();
>   }
> }
>
>
>
>
> --
> View this message in context:
> http://apache-spark-user-list.1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-Jar-tp28182.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>
> -
> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>
>
>


Re: Running spark from Eclipse and then Jar

2016-12-07 Thread Md. Rezaul Karim
ublic Iterator call(String s) {
> return Arrays.asList(SPACE.split(s)).iterator();
>   }
>     });
>
>     JavaPairRDD<String, Integer> ones = words.mapToPair(
>   new PairFunction<String, String, Integer>() {
> @Override
> public Tuple2<String, Integer> call(String s) {
>   return new Tuple2<>(s, 1);
> }
>   });
>
> JavaPairRDD<String, Integer> counts = ones.reduceByKey(
>   new Function2<Integer, Integer, Integer>() {
> @Override
> public Integer call(Integer i1, Integer i2) {
>   return i1 + i2;
> }
>   });
>
> List<Tuple2String, Integer>> output = counts.collect();
> for (Tuple2 tuple : output) {
>   System.out.println(tuple._1() + ": " + tuple._2());
> }
> spark.stop();
>   }
> }
>
>
>
>
> --
> View this message in context: http://apache-spark-user-list.
> 1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-Jar-tp28182.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
>
> -
> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
>
>
http://maven.apache.org/POM/4.0.0; xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd;>
	4.0.0

	com.examples
	MillionSongsDatabase
	0.0.1-SNAPSHOT
	jar

	MillionSongsDatabase
	http://maven.apache.org

	
		UTF-8
		1.8
		2.0.0
	

	
		
			org.apache.spark
			spark-core_2.11
			${spark.version}
		
		
			org.apache.spark
			spark-sql_2.11
			${spark.version}
		
		
			org.apache.spark
			spark-streaming_2.11
			${spark.version}
		

		
			org.apache.bahir
			spark-streaming-twitter_2.11
			${spark.version}
		
		
			org.apache.spark
			spark-mllib_2.11
			${spark.version}
		
		
			org.apache.spark
			spark-hive_2.11
			${spark.version}
		
		
			org.apache.spark
			spark-graphx_2.11
			${spark.version}
		

		
			org.apache.spark
			spark-yarn_2.11
			${spark.version}
		
		
			org.apache.spark
			spark-network-shuffle_2.11
			${spark.version}
		
		
			org.apache.spark
			spark-streaming-kafka_2.10
			1.6.2
		
		
			org.apache.spark
			spark-streaming-flume_2.11
			${spark.version}
		
		
			com.databricks
			spark-csv_2.11
			1.3.0
		
		
			mysql
			mysql-connector-java
			5.1.38
		
		
			junit
			junit
			3.8.1
			test
		
	

	
		
			
			
org.apache.maven.plugins
maven-eclipse-plugin
2.9

	true
	false

			
			
			
org.apache.maven.plugins
maven-compiler-plugin
3.5.1

	${jdk.version}
	${jdk.version}

			
			
org.apache.maven.plugins
maven-shade-plugin
2.4.3

	true

			
			
			
org.apache.maven.plugins
maven-assembly-plugin
2.4.1

	
	
		jar-with-dependencies
	
	
	
		
			com.example.RandomForest.SongPredictionusingLinear
		
	

	
		oozie.launcher.mapreduce.job.user.classpath.first
		true
	



	
		make-assembly
		
		package
		
			single
		
	

			
		
	



-
To unsubscribe e-mail: user-unsubscr...@spark.apache.org

Re: Running spark from Eclipse and then Jar

2016-12-07 Thread Gmail
rces.DataSource$$anonfun$5.apply(DataSource.scala:132)
>at
> org.apache.spark.sql.execution.datasources.DataSource$$anonfun$5.apply(DataSource.scala:132)
>at scala.util.Try.orElse(Try.scala:84)
>at
> org.apache.spark.sql.execution.datasources.DataSource.lookupDataSource(DataSource.scala:132)
>... 8 more
> 16/12/07 15:16:46 INFO SparkContext: Invoking stop() from shutdown hook
> 16/12/07 15:16:46 INFO SparkUI: Stopped Spark web UI at
> http://192.168.19.2:4040
> 16/12/07 15:16:46 INFO MapOutputTrackerMasterEndpoint:
> MapOutputTrackerMasterEndpoint stopped!
> 16/12/07 15:16:46 INFO MemoryStore: MemoryStore cleared
> 16/12/07 15:16:46 INFO BlockManager: BlockManager stopped
> 16/12/07 15:16:46 INFO BlockManagerMaster: BlockManagerMaster stopped
> 16/12/07 15:16:46 INFO
> OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
> OutputCommitCoordinator stopped!
> 16/12/07 15:16:46 INFO SparkContext: Successfully stopped SparkContext
> 16/12/07 15:16:46 INFO ShutdownHookManager: Shutdown hook called
> 16/12/07 15:16:46 INFO ShutdownHookManager: Deleting directory
> C:\Users\Owner\AppData\Local\Temp\spark-dab2587b-a794-4947-ac13-d40056cf71d8
> 
> C:\Users\Owner>
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> 
> public final class JavaWordCount {
>  private static final Pattern SPACE = Pattern.compile(" ");
> 
>  public static void main(String[] args) throws Exception {
> 
>if (args.length < 1) {
>  System.err.println("Usage: JavaWordCount ");
>  System.exit(1);
>}
> 
>//boiler plate needed to run locally  
>SparkConf conf = new SparkConf().setAppName("Word Count
> Application").setMaster("local[*]");
>JavaSparkContext sc = new JavaSparkContext(conf);
> 
>SparkSession spark = SparkSession
>.builder()
>.appName("Word Count")
>.getOrCreate()
>.newSession();
> 
> 
>JavaRDD lines = spark.read().textFile(args[0]).javaRDD();
> 
> 
>JavaRDD words = lines.flatMap(new FlatMapFunction<String,
> String>() {
>  @Override
>  public Iterator call(String s) {
>return Arrays.asList(SPACE.split(s)).iterator();
>  }
>});
> 
>JavaPairRDD<String, Integer> ones = words.mapToPair(
>  new PairFunction<String, String, Integer>() {
>@Override
>public Tuple2<String, Integer> call(String s) {
>  return new Tuple2<>(s, 1);
>}
>  });
> 
>JavaPairRDD<String, Integer> counts = ones.reduceByKey(
>  new Function2<Integer, Integer, Integer>() {
>@Override
>public Integer call(Integer i1, Integer i2) {
>  return i1 + i2;
>}
>  });
> 
>List<Tuple2String, Integer>> output = counts.collect();
>for (Tuple2 tuple : output) {
>  System.out.println(tuple._1() + ": " + tuple._2());
>}
>spark.stop();
>  }
> }
> 
> 
> 
> 
> --
> View this message in context: 
> http://apache-spark-user-list.1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-Jar-tp28182.html
> Sent from the Apache Spark User List mailing list archive at Nabble.com.
> 
> -
> To unsubscribe e-mail: user-unsubscr...@spark.apache.org
> 

-
To unsubscribe e-mail: user-unsubscr...@spark.apache.org



Running spark from Eclipse and then Jar

2016-12-07 Thread im281
yStore cleared
16/12/07 15:16:46 INFO BlockManager: BlockManager stopped
16/12/07 15:16:46 INFO BlockManagerMaster: BlockManagerMaster stopped
16/12/07 15:16:46 INFO
OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:
OutputCommitCoordinator stopped!
16/12/07 15:16:46 INFO SparkContext: Successfully stopped SparkContext
16/12/07 15:16:46 INFO ShutdownHookManager: Shutdown hook called
16/12/07 15:16:46 INFO ShutdownHookManager: Deleting directory
C:\Users\Owner\AppData\Local\Temp\spark-dab2587b-a794-4947-ac13-d40056cf71d8

C:\Users\Owner>









































































public final class JavaWordCount {
  private static final Pattern SPACE = Pattern.compile(" ");

  public static void main(String[] args) throws Exception {

if (args.length < 1) {
  System.err.println("Usage: JavaWordCount ");
  System.exit(1);
}

//boiler plate needed to run locally  
SparkConf conf = new SparkConf().setAppName("Word Count
Application").setMaster("local[*]");
JavaSparkContext sc = new JavaSparkContext(conf);

SparkSession spark = SparkSession
.builder()
.appName("Word Count")
.getOrCreate()
.newSession();


JavaRDD lines = spark.read().textFile(args[0]).javaRDD();


JavaRDD words = lines.flatMap(new FlatMapFunction<String,
String>() {
  @Override
  public Iterator call(String s) {
return Arrays.asList(SPACE.split(s)).iterator();
  }
});

JavaPairRDD<String, Integer> ones = words.mapToPair(
  new PairFunction<String, String, Integer>() {
@Override
public Tuple2<String, Integer> call(String s) {
  return new Tuple2<>(s, 1);
}
  });

JavaPairRDD<String, Integer> counts = ones.reduceByKey(
  new Function2<Integer, Integer, Integer>() {
@Override
public Integer call(Integer i1, Integer i2) {
  return i1 + i2;
}
  });

List<Tuple2String, Integer>> output = counts.collect();
for (Tuple2 tuple : output) {
  System.out.println(tuple._1() + ": " + tuple._2());
    }
    spark.stop();
  }
}




--
View this message in context: 
http://apache-spark-user-list.1001560.n3.nabble.com/Running-spark-from-Eclipse-and-then-Jar-tp28182.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

-
To unsubscribe e-mail: user-unsubscr...@spark.apache.org