spark SparkTC 源码
spark SparkTC 代码
文件路径:/examples/src/main/scala/org/apache/spark/examples/SparkTC.scala
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import scala.collection.mutable
import scala.util.Random
import org.apache.spark.sql.SparkSession
/**
* Transitive closure on a graph.
*/
object SparkTC {
val numEdges = 200
val numVertices = 100
val rand = new Random(42)
def generateGraph: Seq[(Int, Int)] = {
val edges: mutable.Set[(Int, Int)] = mutable.Set.empty
while (edges.size < numEdges) {
val from = rand.nextInt(numVertices)
val to = rand.nextInt(numVertices)
if (from != to) edges.+=((from, to))
}
edges.toSeq
}
def main(args: Array[String]): Unit = {
val spark = SparkSession
.builder
.appName("SparkTC")
.getOrCreate()
val slices = if (args.length > 0) args(0).toInt else 2
var tc = spark.sparkContext.parallelize(generateGraph, slices).cache()
// Linear transitive closure: each round grows paths by one edge,
// by joining the graph's edges with the already-discovered paths.
// e.g. join the path (y, z) from the TC with the edge (x, y) from
// the graph to obtain the path (x, z).
// Because join() joins on keys, the edges are stored in reversed order.
val edges = tc.map(x => (x._2, x._1))
// This join is iterated until a fixed point is reached.
var oldCount = 0L
var nextCount = tc.count()
do {
oldCount = nextCount
// Perform the join, obtaining an RDD of (y, (z, x)) pairs,
// then project the result to obtain the new (x, z) paths.
tc = tc.union(tc.join(edges).map(x => (x._2._2, x._2._1))).distinct().cache()
nextCount = tc.count()
} while (nextCount != oldCount)
println(s"TC has ${tc.count()} edges.")
spark.stop()
}
}
// scalastyle:on println
相关信息
相关文章
spark AccumulatorMetricsTest 源码
0
赞
- 所属分类: 前端技术
- 本文标签:
热门推荐
-
2、 - 优质文章
-
3、 gate.io
-
8、 golang
-
9、 openharmony
-
10、 Vue中input框自动聚焦