Java 类org.apache.hadoop.hbase.protobuf.generated.HFileProtos 实例源码

项目:ditb    文件:FixedFileTrailer.java   
/**
 * Write trailer data as protobuf
 * @param outputStream
 * @throws IOException
 */
void serializeAsPB(DataOutputStream output) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  HFileProtos.FileTrailerProto.Builder builder = HFileProtos.FileTrailerProto.newBuilder()
    .setFileInfoOffset(fileInfoOffset)
    .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
    .setUncompressedDataIndexSize(uncompressedDataIndexSize)
    .setTotalUncompressedBytes(totalUncompressedBytes)
    .setDataIndexCount(dataIndexCount)
    .setMetaIndexCount(metaIndexCount)
    .setEntryCount(entryCount)
    .setNumDataIndexLevels(numDataIndexLevels)
    .setFirstDataBlockOffset(firstDataBlockOffset)
    .setLastDataBlockOffset(lastDataBlockOffset)
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    .setComparatorClassName(comparatorClassName)
    .setCompressionCodec(compressionCodec.ordinal());
  if (encryptionKey != null) {
    builder.setEncryptionKey(ByteStringer.wrap(encryptionKey));
  }
  // We need this extra copy unfortunately to determine the final size of the
  // delimited output, see use of baos.size() below.
  builder.build().writeDelimitedTo(baos);
  baos.writeTo(output);
  // Pad to make up the difference between variable PB encoding length and the
  // length when encoded as writable under earlier V2 formats. Failure to pad
  // properly or if the PB encoding is too big would mean the trailer wont be read
  // in properly by HFile.
  int padding = getTrailerSize() - NOT_PB_SIZE - baos.size();
  if (padding < 0) {
    throw new IOException("Pbuf encoding size exceeded fixed trailer size limit");
  }
  for (int i = 0; i < padding; i++) {
    output.write(0);
  }
}
项目:ditb    文件:HFile.java   
/**
 * Write out this instance on the passed in <code>out</code> stream.
 * We write it as a protobuf.
 * @param out
 * @throws IOException
 * @see #read(DataInputStream)
 */
void write(final DataOutputStream out) throws IOException {
  HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
  for (Map.Entry<byte[], byte[]> e : this.map.entrySet()) {
    HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
    bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));
    bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));
    builder.addMapEntry(bbpBuilder.build());
  }
  out.write(ProtobufUtil.PB_MAGIC);
  builder.build().writeDelimitedTo(out);
}
项目:ditb    文件:HFile.java   
/**
 * Fill our map with content of the pb we read off disk
 * @param fip protobuf message to read
 */
void parsePB(final HFileProtos.FileInfoProto fip) {
  this.map.clear();
  for (BytesBytesPair pair : fip.getMapEntryList()) {
    this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
  }
}
项目:pbase    文件:FixedFileTrailer.java   
/**
 * Write trailer data as protobuf
 * @param outputStream
 * @throws IOException
 */
void serializeAsPB(DataOutputStream output) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  HFileProtos.FileTrailerProto.Builder builder = HFileProtos.FileTrailerProto.newBuilder()
    .setFileInfoOffset(fileInfoOffset)
    .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
    .setUncompressedDataIndexSize(uncompressedDataIndexSize)
    .setTotalUncompressedBytes(totalUncompressedBytes)
    .setDataIndexCount(dataIndexCount)
    .setMetaIndexCount(metaIndexCount)
    .setEntryCount(entryCount)
    .setNumDataIndexLevels(numDataIndexLevels)
    .setFirstDataBlockOffset(firstDataBlockOffset)
    .setLastDataBlockOffset(lastDataBlockOffset)
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    .setComparatorClassName(comparatorClassName)
    .setCompressionCodec(compressionCodec.ordinal());
  if (encryptionKey != null) {
    builder.setEncryptionKey(ByteStringer.wrap(encryptionKey));
  }
  // We need this extra copy unfortunately to determine the final size of the
  // delimited output, see use of baos.size() below.
  builder.build().writeDelimitedTo(baos);
  baos.writeTo(output);
  // Pad to make up the difference between variable PB encoding length and the
  // length when encoded as writable under earlier V2 formats. Failure to pad
  // properly or if the PB encoding is too big would mean the trailer wont be read
  // in properly by HFile.
  int padding = getTrailerSize() - NOT_PB_SIZE - baos.size();
  if (padding < 0) {
    throw new IOException("Pbuf encoding size exceeded fixed trailer size limit");
  }
  for (int i = 0; i < padding; i++) {
    output.write(0);
  }
}
项目:pbase    文件:HFile.java   
/**
 * Write out this instance on the passed in <code>out</code> stream.
 * We write it as a protobuf.
 * @param out
 * @throws IOException
 * @see #read(DataInputStream)
 */
void write(final DataOutputStream out) throws IOException {
  HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
  for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
    HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
    bbpBuilder.setFirst(ByteStringer.wrap(e.getKey()));
    bbpBuilder.setSecond(ByteStringer.wrap(e.getValue()));
    builder.addMapEntry(bbpBuilder.build());
  }
  out.write(ProtobufUtil.PB_MAGIC);
  builder.build().writeDelimitedTo(out);
}
项目:pbase    文件:HFile.java   
/**
 * Fill our map with content of the pb we read off disk
 * @param fip protobuf message to read
 */
void parsePB(final HFileProtos.FileInfoProto fip) {
  this.map.clear();
  for (BytesBytesPair pair: fip.getMapEntryList()) {
    this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
  }
}
项目:HIndex    文件:FixedFileTrailer.java   
/**
 * Write trailer data as protobuf
 * @param outputStream
 * @throws IOException
 */
void serializeAsPB(DataOutputStream output) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  HFileProtos.FileTrailerProto.Builder builder = HFileProtos.FileTrailerProto.newBuilder()
    .setFileInfoOffset(fileInfoOffset)
    .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
    .setUncompressedDataIndexSize(uncompressedDataIndexSize)
    .setTotalUncompressedBytes(totalUncompressedBytes)
    .setDataIndexCount(dataIndexCount)
    .setMetaIndexCount(metaIndexCount)
    .setEntryCount(entryCount)
    .setNumDataIndexLevels(numDataIndexLevels)
    .setFirstDataBlockOffset(firstDataBlockOffset)
    .setLastDataBlockOffset(lastDataBlockOffset)
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    .setComparatorClassName(comparatorClassName)
    .setCompressionCodec(compressionCodec.ordinal());
  if (encryptionKey != null) {
    builder.setEncryptionKey(HBaseZeroCopyByteString.wrap(encryptionKey));
  }
  // We need this extra copy unfortunately to determine the final size of the
  // delimited output, see use of baos.size() below.
  builder.build().writeDelimitedTo(baos);
  baos.writeTo(output);
  // Pad to make up the difference between variable PB encoding length and the
  // length when encoded as writable under earlier V2 formats. Failure to pad
  // properly or if the PB encoding is too big would mean the trailer wont be read
  // in properly by HFile.
  int padding = getTrailerSize() - NOT_PB_SIZE - baos.size();
  if (padding < 0) {
    throw new IOException("Pbuf encoding size exceeded fixed trailer size limit");
  }
  for (int i = 0; i < padding; i++) {
    output.write(0);
  }
}
项目:HIndex    文件:HFile.java   
/**
 * Write out this instance on the passed in <code>out</code> stream.
 * We write it as a protobuf.
 * @param out
 * @throws IOException
 * @see #read(DataInputStream)
 */
void write(final DataOutputStream out) throws IOException {
  HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
  for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
    HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
    bbpBuilder.setFirst(HBaseZeroCopyByteString.wrap(e.getKey()));
    bbpBuilder.setSecond(HBaseZeroCopyByteString.wrap(e.getValue()));
    builder.addMapEntry(bbpBuilder.build());
  }
  out.write(ProtobufUtil.PB_MAGIC);
  builder.build().writeDelimitedTo(out);
}
项目:HIndex    文件:HFile.java   
/**
 * Fill our map with content of the pb we read off disk
 * @param fip protobuf message to read
 */
void parsePB(final HFileProtos.FileInfoProto fip) {
  this.map.clear();
  for (BytesBytesPair pair: fip.getMapEntryList()) {
    this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
  }
}
项目:PyroDB    文件:FixedFileTrailer.java   
/**
 * Write trailer data as protobuf
 * @param outputStream
 * @throws IOException
 */
void serializeAsPB(DataOutputStream output) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  HFileProtos.FileTrailerProto.Builder builder = HFileProtos.FileTrailerProto.newBuilder()
    .setFileInfoOffset(fileInfoOffset)
    .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
    .setUncompressedDataIndexSize(uncompressedDataIndexSize)
    .setTotalUncompressedBytes(totalUncompressedBytes)
    .setDataIndexCount(dataIndexCount)
    .setMetaIndexCount(metaIndexCount)
    .setEntryCount(entryCount)
    .setNumDataIndexLevels(numDataIndexLevels)
    .setFirstDataBlockOffset(firstDataBlockOffset)
    .setLastDataBlockOffset(lastDataBlockOffset)
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    .setComparatorClassName(comparatorClassName)
    .setCompressionCodec(compressionCodec.ordinal());
  if (encryptionKey != null) {
    builder.setEncryptionKey(HBaseZeroCopyByteString.wrap(encryptionKey));
  }
  // We need this extra copy unfortunately to determine the final size of the
  // delimited output, see use of baos.size() below.
  builder.build().writeDelimitedTo(baos);
  baos.writeTo(output);
  // Pad to make up the difference between variable PB encoding length and the
  // length when encoded as writable under earlier V2 formats. Failure to pad
  // properly or if the PB encoding is too big would mean the trailer wont be read
  // in properly by HFile.
  int padding = getTrailerSize() - NOT_PB_SIZE - baos.size();
  if (padding < 0) {
    throw new IOException("Pbuf encoding size exceeded fixed trailer size limit");
  }
  for (int i = 0; i < padding; i++) {
    output.write(0);
  }
}
项目:PyroDB    文件:HFile.java   
/**
 * Write out this instance on the passed in <code>out</code> stream.
 * We write it as a protobuf.
 * @param out
 * @throws IOException
 * @see #read(DataInputStream)
 */
void write(final DataOutputStream out) throws IOException {
  HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
  for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
    HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
    bbpBuilder.setFirst(HBaseZeroCopyByteString.wrap(e.getKey()));
    bbpBuilder.setSecond(HBaseZeroCopyByteString.wrap(e.getValue()));
    builder.addMapEntry(bbpBuilder.build());
  }
  out.write(ProtobufUtil.PB_MAGIC);
  builder.build().writeDelimitedTo(out);
}
项目:PyroDB    文件:HFile.java   
/**
 * Fill our map with content of the pb we read off disk
 * @param fip protobuf message to read
 */
void parsePB(final HFileProtos.FileInfoProto fip) {
  this.map.clear();
  for (BytesBytesPair pair: fip.getMapEntryList()) {
    this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
  }
}
项目:c5    文件:FixedFileTrailer.java   
/**
 * Write trailer data as protobuf
 * @param outputStream
 * @throws IOException
 */
void serializeAsPB(DataOutputStream output) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  HFileProtos.FileTrailerProto.newBuilder()
    .setFileInfoOffset(fileInfoOffset)
    .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
    .setUncompressedDataIndexSize(uncompressedDataIndexSize)
    .setTotalUncompressedBytes(totalUncompressedBytes)
    .setDataIndexCount(dataIndexCount)
    .setMetaIndexCount(metaIndexCount)
    .setEntryCount(entryCount)
    .setNumDataIndexLevels(numDataIndexLevels)
    .setFirstDataBlockOffset(firstDataBlockOffset)
    .setLastDataBlockOffset(lastDataBlockOffset)
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    .setComparatorClassName(comparatorClassName)
    .setCompressionCodec(compressionCodec.ordinal())
    .build().writeDelimitedTo(baos);
  output.write(baos.toByteArray());
  // Pad to make up the difference between variable PB encoding length and the
  // length when encoded as writable under earlier V2 formats. Failure to pad
  // properly or if the PB encoding is too big would mean the trailer wont be read
  // in properly by HFile.
  int padding = getTrailerSize() - NOT_PB_SIZE - baos.size();
  if (padding < 0) {
    throw new IOException("Pbuf encoding size exceeded fixed trailer size limit");
  }
  for (int i = 0; i < padding; i++) {
    output.write(0);
  }
}
项目:c5    文件:HFile.java   
/**
 * Write out this instance on the passed in <code>out</code> stream.
 * We write it as a protobuf.
 * @param out
 * @throws IOException
 * @see #read(DataInputStream)
 */
void write(final DataOutputStream out) throws IOException {
  HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
  for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
    HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
    bbpBuilder.setFirst(ZeroCopyLiteralByteString.wrap(e.getKey()));
    bbpBuilder.setSecond(ZeroCopyLiteralByteString.wrap(e.getValue()));
    builder.addMapEntry(bbpBuilder.build());
  }
  out.write(ProtobufUtil.PB_MAGIC);
  builder.build().writeDelimitedTo(out);
}
项目:c5    文件:HFile.java   
/**
 * Fill our map with content of the pb we read off disk
 * @param fip protobuf message to read
 */
void parsePB(final HFileProtos.FileInfoProto fip) {
  this.map.clear();
  for (BytesBytesPair pair: fip.getMapEntryList()) {
    this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
  }
}
项目:DominoHBase    文件:FixedFileTrailer.java   
/**
 * Write trailer data as protobuf
 * @param outputStream
 * @throws IOException
 */
void serializeAsPB(DataOutputStream output) throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  HFileProtos.FileTrailerProto.newBuilder()
    .setFileInfoOffset(fileInfoOffset)
    .setLoadOnOpenDataOffset(loadOnOpenDataOffset)
    .setUncompressedDataIndexSize(uncompressedDataIndexSize)
    .setTotalUncompressedBytes(totalUncompressedBytes)
    .setDataIndexCount(dataIndexCount)
    .setMetaIndexCount(metaIndexCount)
    .setEntryCount(entryCount)
    .setNumDataIndexLevels(numDataIndexLevels)
    .setFirstDataBlockOffset(firstDataBlockOffset)
    .setLastDataBlockOffset(lastDataBlockOffset)
    .setComparatorClassName(comparatorClassName)
    .setCompressionCodec(compressionCodec.ordinal())
    .build().writeDelimitedTo(baos);
  output.write(baos.toByteArray());
  // Pad to make up the difference between variable PB encoding length and the
  // length when encoded as writable under earlier V2 formats. Failure to pad
  // properly or if the PB encoding is too big would mean the trailer wont be read
  // in properly by HFile.
  int padding = getTrailerSize() - NOT_PB_SIZE - baos.size();
  if (padding < 0) {
    throw new IOException("Pbuf encoding size exceeded fixed trailer size limit");
  }
  for (int i = 0; i < padding; i++) {
    output.write(0);
  }
}
项目:DominoHBase    文件:HFile.java   
/**
 * Write out this instance on the passed in <code>out</code> stream.
 * We write it as a protobuf.
 * @param out
 * @throws IOException
 * @see {@link #read(DataInputStream)}
 */
void write(final DataOutputStream out) throws IOException {
  HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder();
  for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) {
    HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder();
    bbpBuilder.setFirst(ByteString.copyFrom(e.getKey()));
    bbpBuilder.setSecond(ByteString.copyFrom(e.getValue()));
    builder.addMapEntry(bbpBuilder.build());
  }
  out.write(ProtobufUtil.PB_MAGIC);
  builder.build().writeDelimitedTo(out);
}
项目:DominoHBase    文件:HFile.java   
/**
 * Fill our map with content of the pb we read off disk
 * @param fip protobuf message to read
 */
void parsePB(final HFileProtos.FileInfoProto fip) {
  this.map.clear();
  for (BytesBytesPair pair: fip.getMapEntryList()) {
    this.map.put(pair.getFirst().toByteArray(), pair.getSecond().toByteArray());
  }
}
项目:ditb    文件:FixedFileTrailer.java   
/**
 * Deserialize the file trailer as protobuf
 * @param inputStream
 * @throws IOException
 */
void deserializeFromPB(DataInputStream inputStream) throws IOException {
  // read PB and skip padding
  int start = inputStream.available();
  HFileProtos.FileTrailerProto trailerProto =
      HFileProtos.FileTrailerProto.PARSER.parseDelimitedFrom(inputStream);
  int size = start - inputStream.available();
  inputStream.skip(getTrailerSize() - NOT_PB_SIZE - size);

  // process the PB
  if (trailerProto.hasFileInfoOffset()) {
    fileInfoOffset = trailerProto.getFileInfoOffset();
  }
  if (trailerProto.hasLoadOnOpenDataOffset()) {
    loadOnOpenDataOffset = trailerProto.getLoadOnOpenDataOffset();
  }
  if (trailerProto.hasUncompressedDataIndexSize()) {
    uncompressedDataIndexSize = trailerProto.getUncompressedDataIndexSize();
  }
  if (trailerProto.hasTotalUncompressedBytes()) {
    totalUncompressedBytes = trailerProto.getTotalUncompressedBytes();
  }
  if (trailerProto.hasDataIndexCount()) {
    dataIndexCount = trailerProto.getDataIndexCount();
  }
  if (trailerProto.hasMetaIndexCount()) {
    metaIndexCount = trailerProto.getMetaIndexCount();
  }
  if (trailerProto.hasEntryCount()) {
    entryCount = trailerProto.getEntryCount();
  }
  if (trailerProto.hasNumDataIndexLevels()) {
    numDataIndexLevels = trailerProto.getNumDataIndexLevels();
  }
  if (trailerProto.hasFirstDataBlockOffset()) {
    firstDataBlockOffset = trailerProto.getFirstDataBlockOffset();
  }
  if (trailerProto.hasLastDataBlockOffset()) {
    lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
  }
  if (trailerProto.hasComparatorClassName()) {
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    setComparatorClass(getComparatorClass(trailerProto.getComparatorClassName()));
  }
  if (trailerProto.hasCompressionCodec()) {
    compressionCodec = Compression.Algorithm.values()[trailerProto.getCompressionCodec()];
  } else {
    compressionCodec = Compression.Algorithm.NONE;
  }
  if (trailerProto.hasEncryptionKey()) {
    encryptionKey = trailerProto.getEncryptionKey().toByteArray();
  }
}
项目:pbase    文件:FixedFileTrailer.java   
/**
 * Deserialize the file trailer as protobuf
 * @param inputStream
 * @throws IOException
 */
void deserializeFromPB(DataInputStream inputStream) throws IOException {
  // read PB and skip padding
  int start = inputStream.available();
  HFileProtos.FileTrailerProto trailerProto =
      HFileProtos.FileTrailerProto.PARSER.parseDelimitedFrom(inputStream);
  int size = start - inputStream.available();
  inputStream.skip(getTrailerSize() - NOT_PB_SIZE - size);

  // process the PB
  if (trailerProto.hasFileInfoOffset()) {
    fileInfoOffset = trailerProto.getFileInfoOffset();
  }
  if (trailerProto.hasLoadOnOpenDataOffset()) {
    loadOnOpenDataOffset = trailerProto.getLoadOnOpenDataOffset();
  }
  if (trailerProto.hasUncompressedDataIndexSize()) {
    uncompressedDataIndexSize = trailerProto.getUncompressedDataIndexSize();
  }
  if (trailerProto.hasTotalUncompressedBytes()) {
    totalUncompressedBytes = trailerProto.getTotalUncompressedBytes();
  }
  if (trailerProto.hasDataIndexCount()) {
    dataIndexCount = trailerProto.getDataIndexCount();
  }
  if (trailerProto.hasMetaIndexCount()) {
    metaIndexCount = trailerProto.getMetaIndexCount();
  }
  if (trailerProto.hasEntryCount()) {
    entryCount = trailerProto.getEntryCount();
  }
  if (trailerProto.hasNumDataIndexLevels()) {
    numDataIndexLevels = trailerProto.getNumDataIndexLevels();
  }
  if (trailerProto.hasFirstDataBlockOffset()) {
    firstDataBlockOffset = trailerProto.getFirstDataBlockOffset();
  }
  if (trailerProto.hasLastDataBlockOffset()) {
    lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
  }
  if (trailerProto.hasComparatorClassName()) {
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    setComparatorClass(getComparatorClass(trailerProto.getComparatorClassName()));
  }
  if (trailerProto.hasCompressionCodec()) {
    compressionCodec = Compression.Algorithm.values()[trailerProto.getCompressionCodec()];
  } else {
    compressionCodec = Compression.Algorithm.NONE;
  }
  if (trailerProto.hasEncryptionKey()) {
    encryptionKey = trailerProto.getEncryptionKey().toByteArray();
  }
}
项目:HIndex    文件:FixedFileTrailer.java   
/**
 * Deserialize the file trailer as protobuf
 * @param inputStream
 * @throws IOException
 */
void deserializeFromPB(DataInputStream inputStream) throws IOException {
  // read PB and skip padding
  int start = inputStream.available();
  HFileProtos.FileTrailerProto trailerProto =
      HFileProtos.FileTrailerProto.PARSER.parseDelimitedFrom(inputStream);
  int size = start - inputStream.available();
  inputStream.skip(getTrailerSize() - NOT_PB_SIZE - size);

  // process the PB
  if (trailerProto.hasFileInfoOffset()) {
    fileInfoOffset = trailerProto.getFileInfoOffset();
  }
  if (trailerProto.hasLoadOnOpenDataOffset()) {
    loadOnOpenDataOffset = trailerProto.getLoadOnOpenDataOffset();
  }
  if (trailerProto.hasUncompressedDataIndexSize()) {
    uncompressedDataIndexSize = trailerProto.getUncompressedDataIndexSize();
  }
  if (trailerProto.hasTotalUncompressedBytes()) {
    totalUncompressedBytes = trailerProto.getTotalUncompressedBytes();
  }
  if (trailerProto.hasDataIndexCount()) {
    dataIndexCount = trailerProto.getDataIndexCount();
  }
  if (trailerProto.hasMetaIndexCount()) {
    metaIndexCount = trailerProto.getMetaIndexCount();
  }
  if (trailerProto.hasEntryCount()) {
    entryCount = trailerProto.getEntryCount();
  }
  if (trailerProto.hasNumDataIndexLevels()) {
    numDataIndexLevels = trailerProto.getNumDataIndexLevels();
  }
  if (trailerProto.hasFirstDataBlockOffset()) {
    firstDataBlockOffset = trailerProto.getFirstDataBlockOffset();
  }
  if (trailerProto.hasLastDataBlockOffset()) {
    lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
  }
  if (trailerProto.hasComparatorClassName()) {
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    setComparatorClass(getComparatorClass(trailerProto.getComparatorClassName()));
  }
  if (trailerProto.hasCompressionCodec()) {
    compressionCodec = Compression.Algorithm.values()[trailerProto.getCompressionCodec()];
  } else {
    compressionCodec = Compression.Algorithm.NONE;
  }
  if (trailerProto.hasEncryptionKey()) {
    encryptionKey = trailerProto.getEncryptionKey().toByteArray();
  }
}
项目:PyroDB    文件:FixedFileTrailer.java   
/**
 * Deserialize the file trailer as protobuf
 * @param inputStream
 * @throws IOException
 */
void deserializeFromPB(DataInputStream inputStream) throws IOException {
  // read PB and skip padding
  int start = inputStream.available();
  HFileProtos.FileTrailerProto trailerProto =
      HFileProtos.FileTrailerProto.PARSER.parseDelimitedFrom(inputStream);
  int size = start - inputStream.available();
  inputStream.skip(getTrailerSize() - NOT_PB_SIZE - size);

  // process the PB
  if (trailerProto.hasFileInfoOffset()) {
    fileInfoOffset = trailerProto.getFileInfoOffset();
  }
  if (trailerProto.hasLoadOnOpenDataOffset()) {
    loadOnOpenDataOffset = trailerProto.getLoadOnOpenDataOffset();
  }
  if (trailerProto.hasUncompressedDataIndexSize()) {
    uncompressedDataIndexSize = trailerProto.getUncompressedDataIndexSize();
  }
  if (trailerProto.hasTotalUncompressedBytes()) {
    totalUncompressedBytes = trailerProto.getTotalUncompressedBytes();
  }
  if (trailerProto.hasDataIndexCount()) {
    dataIndexCount = trailerProto.getDataIndexCount();
  }
  if (trailerProto.hasMetaIndexCount()) {
    metaIndexCount = trailerProto.getMetaIndexCount();
  }
  if (trailerProto.hasEntryCount()) {
    entryCount = trailerProto.getEntryCount();
  }
  if (trailerProto.hasNumDataIndexLevels()) {
    numDataIndexLevels = trailerProto.getNumDataIndexLevels();
  }
  if (trailerProto.hasFirstDataBlockOffset()) {
    firstDataBlockOffset = trailerProto.getFirstDataBlockOffset();
  }
  if (trailerProto.hasLastDataBlockOffset()) {
    lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
  }
  if (trailerProto.hasComparatorClassName()) {
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    setComparatorClass(getComparatorClass(trailerProto.getComparatorClassName()));
  }
  if (trailerProto.hasCompressionCodec()) {
    compressionCodec = Compression.Algorithm.values()[trailerProto.getCompressionCodec()];
  } else {
    compressionCodec = Compression.Algorithm.NONE;
  }
  if (trailerProto.hasEncryptionKey()) {
    encryptionKey = trailerProto.getEncryptionKey().toByteArray();
  }
}
项目:c5    文件:FixedFileTrailer.java   
/**
 * Deserialize the file trailer as protobuf
 * @param inputStream
 * @throws IOException
 */
void deserializeFromPB(DataInputStream inputStream) throws IOException {
  // read PB and skip padding
  int start = inputStream.available();
  HFileProtos.FileTrailerProto.Builder builder = HFileProtos.FileTrailerProto.newBuilder();
  builder.mergeDelimitedFrom(inputStream);
  int size = start - inputStream.available();
  inputStream.skip(getTrailerSize() - NOT_PB_SIZE - size);

  // process the PB
  if (builder.hasFileInfoOffset()) {
    fileInfoOffset = builder.getFileInfoOffset();
  }
  if (builder.hasLoadOnOpenDataOffset()) {
    loadOnOpenDataOffset = builder.getLoadOnOpenDataOffset();
  }
  if (builder.hasUncompressedDataIndexSize()) {
    uncompressedDataIndexSize = builder.getUncompressedDataIndexSize();
  }
  if (builder.hasTotalUncompressedBytes()) {
    totalUncompressedBytes = builder.getTotalUncompressedBytes();
  }
  if (builder.hasDataIndexCount()) {
    dataIndexCount = builder.getDataIndexCount();
  }
  if (builder.hasMetaIndexCount()) {
    metaIndexCount = builder.getMetaIndexCount();
  }
  if (builder.hasEntryCount()) {
    entryCount = builder.getEntryCount();
  }
  if (builder.hasNumDataIndexLevels()) {
    numDataIndexLevels = builder.getNumDataIndexLevels();
  }
  if (builder.hasFirstDataBlockOffset()) {
    firstDataBlockOffset = builder.getFirstDataBlockOffset();
  }
  if (builder.hasLastDataBlockOffset()) {
    lastDataBlockOffset = builder.getLastDataBlockOffset();
  }
  if (builder.hasComparatorClassName()) {
    // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
    // some compat code here.
    setComparatorClass(getComparatorClass(builder.getComparatorClassName()));
  }
  if (builder.hasCompressionCodec()) {
    compressionCodec = Compression.Algorithm.values()[builder.getCompressionCodec()];
  } else {
    compressionCodec = Compression.Algorithm.NONE;
  }
}
项目:DominoHBase    文件:FixedFileTrailer.java   
/**
 * Deserialize the file trailer as protobuf
 * @param inputStream
 * @throws IOException
 */
void deserializeFromPB(DataInputStream inputStream) throws IOException {
  // read PB and skip padding
  int start = inputStream.available();
  HFileProtos.FileTrailerProto.Builder builder = HFileProtos.FileTrailerProto.newBuilder();
  builder.mergeDelimitedFrom(inputStream);
  int size = start - inputStream.available();
  inputStream.skip(getTrailerSize() - NOT_PB_SIZE - size);

  // process the PB
  if (builder.hasFileInfoOffset()) {
    fileInfoOffset = builder.getFileInfoOffset();
  }
  if (builder.hasLoadOnOpenDataOffset()) {
    loadOnOpenDataOffset = builder.getLoadOnOpenDataOffset();
  }
  if (builder.hasUncompressedDataIndexSize()) {
    uncompressedDataIndexSize = builder.getUncompressedDataIndexSize();
  }
  if (builder.hasTotalUncompressedBytes()) {
    totalUncompressedBytes = builder.getTotalUncompressedBytes();
  }
  if (builder.hasDataIndexCount()) {
    dataIndexCount = builder.getDataIndexCount();
  }
  if (builder.hasMetaIndexCount()) {
    metaIndexCount = builder.getMetaIndexCount();
  }
  if (builder.hasEntryCount()) {
    entryCount = builder.getEntryCount();
  }
  if (builder.hasNumDataIndexLevels()) {
    numDataIndexLevels = builder.getNumDataIndexLevels();
  }
  if (builder.hasFirstDataBlockOffset()) {
    firstDataBlockOffset = builder.getFirstDataBlockOffset();
  }
  if (builder.hasLastDataBlockOffset()) {
    lastDataBlockOffset = builder.getLastDataBlockOffset();
  }
  if (builder.hasComparatorClassName()) {
    comparatorClassName = builder.getComparatorClassName();
  }
  if (builder.hasCompressionCodec()) {
    compressionCodec = Compression.Algorithm.values()[builder.getCompressionCodec()];
  } else {
    compressionCodec = Compression.Algorithm.NONE;
  }
}