spark JavaSerializer 源码
spark JavaSerializer 代码
文件路径:/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import java.io._
import java.nio.ByteBuffer
import scala.reflect.ClassTag
import org.apache.spark.SparkConf
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.internal.config._
import org.apache.spark.util.{ByteBufferInputStream, ByteBufferOutputStream, Utils}
private[spark] class JavaSerializationStream(
out: OutputStream,
counterReset: Int,
extraDebugInfo: Boolean)
extends SerializationStream {
private val objOut = new ObjectOutputStream(out)
private var counter = 0
/**
* Calling reset to avoid memory leak:
* http://stackoverflow.com/questions/1281549/memory-leak-traps-in-the-java-standard-api
* But only call it every 100th time to avoid bloated serialization streams (when
* the stream 'resets' object class descriptions have to be re-written)
*/
def writeObject[T: ClassTag](t: T): SerializationStream = {
try {
objOut.writeObject(t)
} catch {
case e: NotSerializableException if extraDebugInfo =>
throw SerializationDebugger.improveException(t, e)
}
counter += 1
if (counterReset > 0 && counter >= counterReset) {
objOut.reset()
counter = 0
}
this
}
def flush(): Unit = { objOut.flush() }
def close(): Unit = { objOut.close() }
}
private[spark] class JavaDeserializationStream(in: InputStream, loader: ClassLoader)
extends DeserializationStream {
private val objIn = new ObjectInputStream(in) {
override def resolveClass(desc: ObjectStreamClass): Class[_] =
try {
// scalastyle:off classforname
Class.forName(desc.getName, false, loader)
// scalastyle:on classforname
} catch {
case e: ClassNotFoundException =>
JavaDeserializationStream.primitiveMappings.getOrElse(desc.getName, throw e)
}
override def resolveProxyClass(ifaces: Array[String]): Class[_] = {
// scalastyle:off classforname
val resolved = ifaces.map(iface => Class.forName(iface, false, loader))
// scalastyle:on classforname
java.lang.reflect.Proxy.getProxyClass(loader, resolved: _*)
}
}
def readObject[T: ClassTag](): T = objIn.readObject().asInstanceOf[T]
def close(): Unit = { objIn.close() }
}
private object JavaDeserializationStream {
val primitiveMappings = Map[String, Class[_]](
"boolean" -> classOf[Boolean],
"byte" -> classOf[Byte],
"char" -> classOf[Char],
"short" -> classOf[Short],
"int" -> classOf[Int],
"long" -> classOf[Long],
"float" -> classOf[Float],
"double" -> classOf[Double],
"void" -> classOf[Unit])
}
private[spark] class JavaSerializerInstance(
counterReset: Int,
extraDebugInfo: Boolean,
defaultClassLoader: ClassLoader)
extends SerializerInstance {
override def serialize[T: ClassTag](t: T): ByteBuffer = {
val bos = new ByteBufferOutputStream()
val out = serializeStream(bos)
out.writeObject(t)
out.close()
bos.toByteBuffer
}
override def deserialize[T: ClassTag](bytes: ByteBuffer): T = {
val bis = new ByteBufferInputStream(bytes)
val in = deserializeStream(bis)
in.readObject()
}
override def deserialize[T: ClassTag](bytes: ByteBuffer, loader: ClassLoader): T = {
val bis = new ByteBufferInputStream(bytes)
val in = deserializeStream(bis, loader)
in.readObject()
}
override def serializeStream(s: OutputStream): SerializationStream = {
new JavaSerializationStream(s, counterReset, extraDebugInfo)
}
override def deserializeStream(s: InputStream): DeserializationStream = {
new JavaDeserializationStream(s, defaultClassLoader)
}
def deserializeStream(s: InputStream, loader: ClassLoader): DeserializationStream = {
new JavaDeserializationStream(s, loader)
}
}
/**
* :: DeveloperApi ::
* A Spark serializer that uses Java's built-in serialization.
*
* @note This serializer is not guaranteed to be wire-compatible across different versions of
* Spark. It is intended to be used to serialize/de-serialize data within a single
* Spark application.
*/
@DeveloperApi
class JavaSerializer(conf: SparkConf) extends Serializer with Externalizable {
private var counterReset = conf.get(SERIALIZER_OBJECT_STREAM_RESET)
private var extraDebugInfo = conf.get(SERIALIZER_EXTRA_DEBUG_INFO)
protected def this() = this(new SparkConf()) // For deserialization only
override def newInstance(): SerializerInstance = {
val classLoader = defaultClassLoader.getOrElse(Thread.currentThread.getContextClassLoader)
new JavaSerializerInstance(counterReset, extraDebugInfo, classLoader)
}
override def writeExternal(out: ObjectOutput): Unit =
Utils.tryOrIOException {
out.writeInt(counterReset)
out.writeBoolean(extraDebugInfo)
}
override def readExternal(in: ObjectInput): Unit =
Utils.tryOrIOException {
counterReset = in.readInt()
extraDebugInfo = in.readBoolean()
}
}
相关信息
相关文章
spark GenericAvroSerializer 源码
0
赞
- 所属分类: 前端技术
- 本文标签:
热门推荐
-
2、 - 优质文章
-
3、 gate.io
-
8、 golang
-
9、 openharmony
-
10、 Vue中input框自动聚焦