0% found this document useful (0 votes)
5 views

Graphx

process of graphx

Uploaded by

sahithiturubati
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
5 views

Graphx

process of graphx

Uploaded by

sahithiturubati
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as TXT, PDF, TXT or read online on Scribd
You are on page 1/ 3

scala> import org.apache.spark.

_
import org.apache.spark._

scala> import org.apache.spark.rdd.RDD


import org.apache.spark.rdd.RDD

scala> import org.apache.spark.graphx._


import org.apache.spark.graphx._

................................................................................
Creating Vertics Table ---->
-----------------------------
scala> val vertices=Array((1L, ("A")),(2L, ("B")),(3L, ("C")))
vertices: Array[(Long, String)] = Array((1,A), (2,B), (3,C))

scala> val vRDD= sc.parallelize(vertices)


vRDD: org.apache.spark.rdd.RDD[(Long, String)] = ParallelCollectionRDD[0] at
parallelize at <console>:31

scala> vRDD.take(1)
[Stage 0:> (0 + 0) [Stage
0:> (0 + 1)
res0: Array[(Long, String)] = Array((1,A))

................................................................................
Creating Edges TAble ------>
-------------------------------
scala> val edges=Array(Edge(1L,2L,1800),Edge(2L,3L,800),Edge(3L,1L,1400))
edges: Array[org.apache.spark.graphx.Edge[Int]] = Array(Edge(1,2,1800),
Edge(2,3,800), Edge(3,1,1400))

scala> val eRDD=sc.parallelize(edges)


eRDD: org.apache.spark.rdd.RDD[org.apache.spark.graphx.Edge[Int]] =
ParallelCollectionRDD[1] at parallelize at <console>:31

scala> eRDD.take(2)
res1: Array[org.apache.spark.graphx.Edge[Int]] = Array(Edge(1,2,1800),
Edge(2,3,800))
..................................................................................

scala> val nowhere = "nowwhere"


nowhere: String = nowwhere

scala> val graph = Graph(vRDD,eRDD, nowhere)


graph: org.apache.spark.graphx.Graph[String,Int] =
org.apache.spark.graphx.impl.GraphImpl@27af34df

scala> graph.vertices.collect.foreach(println)
(1,A)
(2,B)
(3,C)

scala> graph.edges.collect.foreach(println)
Edge(1,2,1800)
Edge(2,3,800)
Edge(3,1,1400)
..................................................................................

val numvertice = graph.numVertices


val numvertice: Long = 3

scala> val numroutes = graph.numEdges


numroutes: Long = 3

scala>graph.edges.filter { case Edge(src, dst, prop) => prop >


1000 }.collect.foreach(println)
Edge(1,2,1800)
Edge(3,1,1400)

scala> graph.triplets.take(3).foreach(println)
((1,A),(2,B),1800)
((2,B),(3,C),800)
((3,C),(1,A),1400)

scala> val f= graph.inDegrees


f: org.apache.spark.graphx.VertexRDD[Int] = VertexRDDImpl[25] at RDD at
VertexRDD.scala:57

scala> f.collect()
res6: Array[(org.apache.spark.graphx.VertexId, Int)] = Array((1,1), (2,1), (3,1))
..................................................................................

scala> val fv= graph.outDegrees


fv: org.apache.spark.graphx.VertexRDD[Int] = VertexRDDImpl[33] at RDD at
VertexRDD.scala:57

scala> fv.collect()
res8: Array[(org.apache.spark.graphx.VertexId, Int)] = Array((1,1), (2,1), (3,1))

scala> val totalDegrees = graph.degrees


totalDegrees: org.apache.spark.graphx.VertexRDD[Int] = VertexRDDImpl[29] at RDD at
VertexRDD.scala:57

scala> totalDegrees.collect()
res9: Array[(org.apache.spark.graphx.VertexId, Int)] = Array((1,2), (2,2), (3,2))
..................................................................................

scala> import org.apache.spark._


import org.apache.spark._

scala> import org.apache.spark.rdd.RDD


import org.apache.spark.rdd.RDD

scala> import org.apache.spark.graphx._


import org.apache.spark.graphx._

scala> val vertices=Array((3L, ("rxin","student")),(7L, ("jgonzal","postdoc")),(5L,


("franklin","prof")), (2L, ("istoica", "prof")))
vertices: Array[(Long, (String, String))] = Array((3,(rxin,student)), (7,
(jgonzal,postdoc)), (5,(franklin,prof)), (2,(istoica,prof)))

scala> val vRDD= sc.parallelize(vertices)


vRDD: org.apache.spark.rdd.RDD[(Long, (String, String))] = ParallelCollectionRDD[0]
at parallelize at <console>:31

scala> val edges=Array(Edge(3L, 7L, "collab"), Edge(5L, 3L, "advisor"), Edge(2L,


5L, "colleague"), Edge(5L, 7L, "pi"))
edges: Array[org.apache.spark.graphx.Edge[String]] = Array(Edge(3,7,collab),
Edge(5,3,advisor), Edge(2,5,colleague), Edge(5,7,pi))

scala> val eRDD=sc.parallelize(edges)


eRDD: org.apache.spark.rdd.RDD[org.apache.spark.graphx.Edge[String]] =
ParallelCollectionRDD[1] at parallelize at <console>:31

scala> val defaultUser = ("john doe", "missing")


defaultUser: (String, String) = (john doe,missing)

scala> val graph = Graph(vRDD,eRDD,defaultUser)


graph: org.apache.spark.graphx.Graph[(String, String),String] =
org.apache.spark.graphx.impl.GraphImpl@1f55bb2
...................................................................................
.................................................

scala> graph.vertices.filter {case (id, (name, pos)) => pos == "postdoc" }.count
res0: Long = 1

scala> graph.edges.filter(e => e.srcId > e.dstId).count


res2: Long = 1

scala> graph.edges.filter { case Edge(src, dst, prop) => src > dst }.count
res4: Long = 1
...................................................................................
...............................................
PAGE RANK ::>
scala> val rank = graph.pageRank(0.0001).vertices
rank: org.apache.spark.graphx.VertexRDD[Double] = VertexRDDImpl[106] at RDD at
VertexRDD.scala:57

scala> println(rank.collect().mkString("\n"))
(2,0.5037267911816329)
(3,0.8997819807481918)
(5,0.9318945636860209)
(7,1.6645966643841548)

scala> graph.vertices.collect.foreach(println)
(2,(istoica,prof))
(3,(rxin,student))
(5,(franklin,prof))
(7,(jgonzal,postdoc))

scala> graph.edges.collect.foreach(println)
Edge(3,7,collab)
Edge(5,3,advisor)
Edge(2,5,colleague)
Edge(5,7,pi)
...................................................................................
.................................

You might also like