From 6a35ea80b45e3d1c507fc1d82b0b44c4adef87f5 Mon Sep 17 00:00:00 2001 From: Larry Xiao Date: Mon, 25 Aug 2014 10:27:31 +0800 Subject: [PATCH] [TEST] VertexRDD.apply mergeFunc --- .../apache/spark/graphx/VertexRDDSuite.scala | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/graphx/src/test/scala/org/apache/spark/graphx/VertexRDDSuite.scala b/graphx/src/test/scala/org/apache/spark/graphx/VertexRDDSuite.scala index cc86bafd2d644..f8fc7ace09773 100644 --- a/graphx/src/test/scala/org/apache/spark/graphx/VertexRDDSuite.scala +++ b/graphx/src/test/scala/org/apache/spark/graphx/VertexRDDSuite.scala @@ -99,4 +99,21 @@ class VertexRDDSuite extends FunSuite with LocalSparkContext { } } + test("mergeFunc") { + // test to see if the mergeFunc is working correctly + withSpark { sc => + // VertexRDD default constructor: Duplicate entries are removed arbitrarily. + // val verts = VertexRDD(sc.parallelize(List((0L, 1), (0L, 2), (1L, 3), (1L, 3), (1L, 3)))) + // ensure constructor preserve duplicate vertex + // assert(verts.collect.toSet == Set((0L, 1), (0L, 2), (1L, 3), (1L, 3), (1L, 3))) + // won't pass + + val verts = sc.parallelize(List((0L, 1), (0L, 2), (1L, 3), (1L, 3), (1L, 3))) + val edges = EdgeRDD.fromEdges(sc.parallelize(List.empty[Edge[Int]])) + val rdd = VertexRDD(verts, edges, 0, (a: Int, b: Int) => a + b) + // test merge function + assert(rdd.collect.toSet == Set((0L, 3), (1L, 9))) + } + } + }