hadoop CompressedWritable 源码
haddop CompressedWritable 代码
文件路径:/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import java.io.IOException;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.DataInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;
import java.util.zip.InflaterInputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** A base-class for Writables which store themselves compressed and lazily
* inflate on field access. This is useful for large objects whose fields are
* not be altered during a map or reduce operation: leaving the field data
* compressed makes copying the instance from one file to another much
* faster. */
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class CompressedWritable implements Writable {
// if non-null, the compressed field data of this instance.
private byte[] compressed;
public CompressedWritable() {}
@Override
public final void readFields(DataInput in) throws IOException {
compressed = new byte[in.readInt()];
in.readFully(compressed, 0, compressed.length);
}
/** Must be called by all methods which access fields to ensure that the data
* has been uncompressed. */
protected void ensureInflated() {
if (compressed != null) {
try {
ByteArrayInputStream deflated = new ByteArrayInputStream(compressed);
DataInput inflater =
new DataInputStream(new InflaterInputStream(deflated));
readFieldsCompressed(inflater);
compressed = null;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
/**
* Subclasses implement this instead of {@link #readFields(DataInput)}.
* @param in data input.
* @throws IOException raised on errors performing I/O.
*/
protected abstract void readFieldsCompressed(DataInput in)
throws IOException;
@Override
public final void write(DataOutput out) throws IOException {
if (compressed == null) {
ByteArrayOutputStream deflated = new ByteArrayOutputStream();
Deflater deflater = new Deflater(Deflater.BEST_SPEED);
DataOutputStream dout =
new DataOutputStream(new DeflaterOutputStream(deflated, deflater));
writeCompressed(dout);
dout.close();
deflater.end();
compressed = deflated.toByteArray();
}
out.writeInt(compressed.length);
out.write(compressed);
}
/**
* Subclasses implement this instead of {@link #write(DataOutput)}.
*
* @param out data output.
* @throws IOException raised on errors performing I/O.
*/
protected abstract void writeCompressed(DataOutput out) throws IOException;
}
相关信息
相关文章
hadoop ArrayPrimitiveWritable 源码
0
赞
热门推荐
-
2、 - 优质文章
-
3、 gate.io
-
8、 golang
-
9、 openharmony
-
10、 Vue中input框自动聚焦