NotesWhat is notes.io?

Notes brand slogan

Notes - notes.io

import scala.Tuple2;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
//import org.apache.spark.api.java.function.*;

import java.io.IOException;
import java.io.NotSerializableException;
import java.io.Serializable;
import java.text.NumberFormat;
import java.util.*;
import java.util.regex.Pattern;

public final class wordcount implements Serializable{
private static final Pattern SPACE = Pattern.compile(" ");
private final static IntWritable one = new IntWritable(1);

//public static List<Tuple2<Text,IntWritable>> mapper(Text value, OutputCollector<Text, IntWritable> output) throws IOException
public static List<Tuple2<Text,IntWritable>> mapper(Text value, OutputCollector<Text, IntWritable> output) throws IOException
{
//NotSerializableException notSerializable = new NotSerializableException();
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
Text word = new Text();
while (tokenizer.hasMoreTokens()){
word = new Text();
word.set(tokenizer.nextToken());
//sending to output collector which inturn passes the same to reducer
output.collect(word, one);
}
return ((outputcollector<Text, IntWritable>)output).getList();
}


/*public static String memusage(){
Runtime runtime = Runtime.getRuntime();
NumberFormat format = NumberFormat.getInstance();
StringBuilder sb = new StringBuilder();
long maxMemory = runtime.maxMemory();
long allocatedMemory = runtime.totalMemory();
long freeMemory = runtime.freeMemory();

sb.append("free memory: " + format.format(freeMemory / 1024) + "<br/>");
sb.append("allocated memory: " + format.format(allocatedMemory / 1024) + "<br/>");
sb.append("max memory: " + format.format(maxMemory / 1024) + "<br/>");
sb.append("total free memory: " + format.format((freeMemory + (maxMemory - allocatedMemory)) / 1024) + "<br/>");
return sb.toString();
}*/



public static void main(String[] args) throws Exception {

if (args.length < 1) {
System.err.println("Use arguments to give input filesnnn");
System.exit(1);
}

SparkConf sparkConf = new SparkConf().setMaster("spark://IMPETUS-I0203:7077").setAppName("wordcount").set("spark.executor.memory", "2g");
JavaSparkContext ctx = new JavaSparkContext(sparkConf);
JavaRDD<String> lines = ctx.textFile(args[0], 1);
//List<Tuple2<Text,IntWritable>> list;
JavaRDD<String> words = lines.flatMap(s -> Arrays.asList(SPACE.split(s)));

JavaPairRDD<Text,IntWritable> ones = lines.flatMapToPair(s -> {
OutputCollector<Text,IntWritable> output = new outputcollector<Text,IntWritable>() ;
//System.out.println(memusage());
return wordcount.mapper(new Text(s),output);
}
);

//JavaPairRDD<Text,IntWritable> one = words.mapToPair(s -> new Tuple2<Text,IntWritable>(new Text(s),new IntWritable(1)));
//JavaPairRDD<String,Integer> one=words.mapToPair(s->new Tuple2<String,Integer>(s,1));
JavaPairRDD<String,Integer> one = ones.mapToPair(a -> new Tuple2<String,Integer>(a._1.toString(),a._2.get() ));
JavaPairRDD<String,Integer> counts = one.reduceByKey((a,b)->a+b);
//JavaPairRDD<Text,IntWritable> counts = ones.reduceByKey((i1,i2)-> new IntWritable(i1.get()+i2.get()));


counts.saveAsTextFile(args[1]);
ctx.stop();
}
}









/*
import scala.Tuple2;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.PairFunction;

import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;

public final class wordcount {
private static final Pattern SPACE = Pattern.compile(" ");

public static void main(String[] args) throws Exception {

if (args.length < 2) {
System.err.println("Use arguments to give input & output file locations");
System.exit(1);
}

SparkConf sparkConf = new SparkConf().setMaster("spark://IMPETUS-I0203:7077").setAppName("wordcount");
JavaSparkContext ctx = new JavaSparkContext(sparkConf);
JavaRDD<String> lines = ctx.textFile(args[0], 1);

// A Pair RDD whose key=product_name, value=entire record
JavaPairRDD<String, String> product_name =
lines.mapToPair(s->new Tuple2<String, String>(s.split(" ")[0], s));

// reduce product name RDD by key
JavaPairRDD<String, String> reduced_product_name =
product_name.reduceByKey(new Function2<String, String, String>() {
@Override
public String call(String s, String d) {
return new String(s.split(" ")[0] + " " + s.split(" ")[1])
+ " "
+ (Integer.parseInt(s.split(" ")[2]) + Integer.parseInt(d
.split(" ")[2]));
}
});

reduced_product_name.cache();

reduced_product_name
.saveAsTextFile(args[1]+1);

// A pair RDD whose key=product_type, value=entire record
JavaPairRDD<String, String> product_type =
reduced_product_name
.mapToPair(new PairFunction<Tuple2<String, String>, String,String>() {
public Tuple2<String, String> call(Tuple2<String, String> keyValue) {
return new Tuple2<String, String>(keyValue._2().split(" ")[1],
keyValue._2());
}
});

// reduce product_name RDD by key
JavaPairRDD<String, String> reduced_product_type =
product_type.reduceByKey((s,d)->new String(s.split(" ")[0]
+ " "+s.split(" ")[1]
+ " "
+ (Integer.parseInt(s.split(" ")[2]) + Integer.parseInt(d
.split(" ")[2]))));

reduced_product_type
.saveAsTextFile(args[1]+2);

ctx.stop();
}
}
*/




/*
* JavaRDD<String> words = lines.flatMap(new FlatMapFunction<String,
* String>() {
*
* @Override public Iterable<String> call(String s) { return
* Arrays.asList(SPACE.split(s)); } });
*/


/*
* JavaPairRDD<String, Integer> ones = words.mapToPair(new
* PairFunction<String, String, Integer>() {
*
* @Override public Tuple2<String, Integer> call(String s) { return new
* Tuple2<String, Integer>(s,1); } });
*/

/*
* JavaPairRDD<String, Integer> counts = ones.reduceByKey(new
* Function2<Integer, Integer, Integer>() {
*
* @Override public Integer call(Integer i1, Integer i2) { return i1 + i2; }
* });
*/



     
 
what is notes.io
 

Notes.io is a web-based application for taking notes. You can take your notes and share with others people. If you like taking long notes, notes.io is designed for you. To date, over 8,000,000,000 notes created and continuing...

With notes.io;

  • * You can take a note from anywhere and any device with internet connection.
  • * You can share the notes in social platforms (YouTube, Facebook, Twitter, instagram etc.).
  • * You can quickly share your contents without website, blog and e-mail.
  • * You don't need to create any Account to share a note. As you wish you can use quick, easy and best shortened notes with sms, websites, e-mail, or messaging services (WhatsApp, iMessage, Telegram, Signal).
  • * Notes.io has fabulous infrastructure design for a short link and allows you to share the note as an easy and understandable link.

Fast: Notes.io is built for speed and performance. You can take a notes quickly and browse your archive.

Easy: Notes.io doesn’t require installation. Just write and share note!

Short: Notes.io’s url just 8 character. You’ll get shorten link of your note when you want to share. (Ex: notes.io/q )

Free: Notes.io works for 12 years and has been free since the day it was started.


You immediately create your first note and start sharing with the ones you wish. If you want to contact us, you can use the following communication channels;


Email: [email protected]

Twitter: http://twitter.com/notesio

Instagram: http://instagram.com/notes.io

Facebook: http://facebook.com/notesio



Regards;
Notes.io Team

     
 
Shortened Note Link
 
 
Looding Image
 
     
 
Long File
 
 

For written notes was greater than 18KB Unable to shorten.

To be smaller than 18KB, please organize your notes, or sign in.