1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56
| import org.apache.log4j.{Level, Logger} import org.apache.spark.graphx._ import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext}
object RDD_println {
def main(args: Array[String]) {
Logger.getLogger("org.apache.spark").setLevel(Level.ERROR) Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
val conf = new SparkConf().setAppName("XXXAppName").setMaster("local") val sc = new SparkContext(conf)
val vertexArray = Array( (1L, ("Alice", 28)),(2L, ("Bob", 27)), (3L, ("Charlie", 65)),(4L, ("David", 42)), (5L, ("Ed", 55)),(6L, ("Fran", 50)) ) val edgeArray = Array( Edge(2L, 1L, 7),Edge(2L, 4L, 2),Edge(3L, 2L, 4), Edge(3L, 6L, 3),Edge(4L, 1L, 1),Edge(5L, 2L, 2), Edge(5L, 3L, 8),Edge(5L, 6L, 3) )
val vertexRDD: RDD[(Long, (String, Int))] = sc.parallelize(vertexArray) val edgeRDD: RDD[Edge[Int]] = sc.parallelize(edgeArray)
val graph: Graph[(String, Int), Int] = Graph(vertexRDD, edgeRDD)
} }
|