@Override public String call(String arg0, String arg1) throws Exception { int a = Integer.parseInt(arg0.split("-")[0]); int a2 = Integer.parseInt(arg1.split("-")[0]); String aa = String.valueOf(a+a2); int b = Integer.parseInt(arg0.split("-")[1]); int b2 = Integer.parseInt(arg1.split("-")[1]); String bb = String.valueOf(b+b2); return aa + "-" + bb; } });
public Tuple2<String, String> call(String arg0) throws Exception {
String temp = arg0.split(" ")[0];
String temp2 = arg0.split(" ")[1];
String temp3 = arg0.split(" ")[2];
return new Tuple2<String, String>(temp, temp2 + "-" + temp3 );
}
});
JavaPairRDD<String, String> rdd2 = rdd1.reduceByKey(new Function2<String, String, String>() {
@Override
public String call(String arg0, String arg1) throws Exception {
int a = Integer.parseInt(arg0.split("-")[0]);
int a2 = Integer.parseInt(arg1.split("-")[0]);
String aa = String.valueOf(a+a2);
int b = Integer.parseInt(arg0.split("-")[1]);
int b2 = Integer.parseInt(arg1.split("-")[1]);
String bb = String.valueOf(b+b2);
return aa + "-" + bb;
}
});
JavaRDD<String> rdd3 = rdd2.map(new Function<Tuple2<String,String>, String>() { @Override
public String call(Tuple2<String, String> arg0) throws Exception {
String lines = arg0._1() + " " + arg0._2.split("-")[0] + " " + arg0._2.split("-")[1];
return lines;
}
});
System.out.println(rdd3.collect());输入
num 10 20
num 11 22
name 22 33
cmj 332 21
输出
[cmj 332 21, num 21 42, name 22 33]
scala> val cm = c.map(e => (e._1, (e._2, 0)))
cm: org.apache.spark.rdd.RDD[(String, (Int, Int))] = MapPartitionsRDD[25] at map at <console>:23scala> val cr = cm.reduceByKey((e1, e2) => (e1._1 + e2._1, e1._1/2 + e2._1/2))
cr: org.apache.spark.rdd.RDD[(String, (Int, Int))] = ShuffledRDD[26] at reduceByKey at <console>:25scala> val cz = cr.map(e => (e._1, e._2._1, e._2._2))
cz: org.apache.spark.rdd.RDD[(String, Int, Int)] = MapPartitionsRDD[27] at map at <console>:27scala> cz.collect
res15: Array[(String, Int, Int)] = Array((b,3,1), (a,6,2), (c,1,0))scala> val c = sc.parallelize(List(("a", 1), ("a", 2), ("a", 3), ("b", 1), ("b", 2), ("c", 1)))
c: org.apache.spark.rdd.RDD[(String, Int)] = ParallelCollectionRDD[28] at parallelize at <console>:21
.reduceByKey((x,y)=>(x._1+y._1,x._2+y._2))
楼上是对的
num 10 20 转成(num, (10, 20)) key为num
然后按照key进行求和, (key, (10+10,20+20)
map(x=>(x.1,(x.2, x.3))).reduceBykey((s, h)=>(s._1+h._1, s._2+h._2))