Java 类org.apache.hadoop.hbase.io.encoding.HFileBlockDecodingContext 实例源码

项目:hbase    文件:HFileBlock.java   
/**
 * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its
 * encoded structure. Internal structures are shared between instances where applicable.
 */
HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {
  if (!fileContext.isCompressedOrEncrypted()) {
    // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),
    // which is used for block serialization to L2 cache, does not preserve encoding and
    // encryption details.
    return this;
  }

  HFileBlock unpacked = new HFileBlock(this);
  unpacked.allocateBuffer(); // allocates space for the decompressed block

  HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?
    reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();

  ByteBuff dup = this.buf.duplicate();
  dup.position(this.headerSize());
  dup = dup.slice();
  ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),
    unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),
    dup);
  return unpacked;
}
项目:ditb    文件:HFileDataBlockEncoderImpl.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext fileContext) {
  DataBlockEncoder encoder = encoding.getEncoder();
  if (encoder != null) {
    return encoder.newDataBlockDecodingContext(fileContext);
  }
  return new HFileBlockDefaultDecodingContext(fileContext);
}
项目:ditb    文件:PrefixTreeCodec.java   
/**
 * I don't think this method is called during normal HBase operation, so efficiency is not
 * important.
 */
public ByteBuffer decodeKeyValues(DataInputStream source, int allocateHeaderLength,
    int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException {
  ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste
  sourceAsBuffer.mark();
  PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer);
  sourceAsBuffer.rewind();
  int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
  byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
  ByteBuffer result = ByteBuffer.wrap(keyValueBytesWithHeader);
  result.rewind();
  CellSearcher searcher = null;
  try {
    boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
    searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc);
    while (searcher.advance()) {
      KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current());
      // needs to be modified for DirectByteBuffers. no existing methods to
      // write VLongs to byte[]
      int offset = result.arrayOffset() + result.position();
      System.arraycopy(currentCell.getBuffer(), currentCell.getOffset(), result.array(), offset,
          currentCell.getLength());
      int keyValueLength = KeyValueUtil.length(currentCell);
      ByteBufferUtils.skip(result, keyValueLength);
      offset += keyValueLength;
      if (includesMvcc) {
        ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion());
      }
    }
    result.position(result.limit());//make it appear as if we were appending
    return result;
  } finally {
    DecoderFactory.checkIn(searcher);
  }
}
项目:ditb    文件:PrefixTreeCodec.java   
/**
 * Is this the correct handling of an illegal comparator?  How to prevent that from getting all
 * the way to this point.
 */
@Override
public EncodedSeeker createSeeker(KVComparator comparator, HFileBlockDecodingContext decodingCtx) {
  if (comparator instanceof RawBytesComparator){
    throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator");
  } else if (comparator instanceof MetaComparator){
    throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with hbase:meta "
        +"table");
  }

  return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc());
}
项目:pbase    文件:HFileDataBlockEncoderImpl.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext fileContext) {
  DataBlockEncoder encoder = encoding.getEncoder();
  if (encoder != null) {
    return encoder.newDataBlockDecodingContext(fileContext);
  }
  return new HFileBlockDefaultDecodingContext(fileContext);
}
项目:pbase    文件:PrefixTreeCodec.java   
/**
 * I don't think this method is called during normal HBase operation, so efficiency is not
 * important.
 */
public ByteBuffer decodeKeyValues(DataInputStream source, int allocateHeaderLength,
    int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException {
  ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste
  sourceAsBuffer.mark();
  PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer);
  sourceAsBuffer.rewind();
  int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
  byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
  ByteBuffer result = ByteBuffer.wrap(keyValueBytesWithHeader);
  result.rewind();
  CellSearcher searcher = null;
  try {
    boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
    searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc);
    while (searcher.advance()) {
      KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current());
      // needs to be modified for DirectByteBuffers. no existing methods to
      // write VLongs to byte[]
      int offset = result.arrayOffset() + result.position();
      System.arraycopy(currentCell.getBuffer(), currentCell.getOffset(), result.array(), offset,
          currentCell.getLength());
      int keyValueLength = KeyValueUtil.length(currentCell);
      ByteBufferUtils.skip(result, keyValueLength);
      offset += keyValueLength;
      if (includesMvcc) {
        ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion());
      }
    }
    result.position(result.limit());//make it appear as if we were appending
    return result;
  } finally {
    DecoderFactory.checkIn(searcher);
  }
}
项目:pbase    文件:PrefixTreeCodec.java   
/**
 * Is this the correct handling of an illegal comparator?  How to prevent that from getting all
 * the way to this point.
 */
@Override
public EncodedSeeker createSeeker(KVComparator comparator, HFileBlockDecodingContext decodingCtx) {
  if (comparator instanceof RawBytesComparator){
    throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator");
  } else if (comparator instanceof MetaComparator){
    throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with hbase:meta "
        +"table");
  }

  return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc());
}
项目:HIndex    文件:HFileDataBlockEncoderImpl.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext fileContext) {
  DataBlockEncoder encoder = encoding.getEncoder();
  if (encoder != null) {
    return encoder.newDataBlockDecodingContext(fileContext);
  }
  return new HFileBlockDefaultDecodingContext(fileContext);
}
项目:HIndex    文件:PrefixTreeCodec.java   
/**
 * I don't think this method is called during normal HBase operation, so efficiency is not
 * important.
 */
public ByteBuffer decodeKeyValues(DataInputStream source, int allocateHeaderLength,
    int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException {
  ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste
  sourceAsBuffer.mark();
  PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer);
  sourceAsBuffer.rewind();
  int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
  byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
  ByteBuffer result = ByteBuffer.wrap(keyValueBytesWithHeader);
  result.rewind();
  CellSearcher searcher = null;
  try {
    boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
    searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc);
    while (searcher.advance()) {
      KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current());
      // needs to be modified for DirectByteBuffers. no existing methods to
      // write VLongs to byte[]
      int offset = result.arrayOffset() + result.position();
      System.arraycopy(currentCell.getBuffer(), currentCell.getOffset(), result.array(), offset,
          currentCell.getLength());
      int keyValueLength = KeyValueUtil.length(currentCell);
      ByteBufferUtils.skip(result, keyValueLength);
      offset += keyValueLength;
      if (includesMvcc) {
        ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion());
      }
    }
    result.position(result.limit());//make it appear as if we were appending
    return result;
  } finally {
    DecoderFactory.checkIn(searcher);
  }
}
项目:HIndex    文件:PrefixTreeCodec.java   
/**
 * Is this the correct handling of an illegal comparator?  How to prevent that from getting all
 * the way to this point.
 */
@Override
public EncodedSeeker createSeeker(KVComparator comparator, HFileBlockDecodingContext decodingCtx) {
  if (comparator instanceof RawBytesComparator){
    throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator");
  } else if (comparator instanceof MetaComparator){
    throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with hbase:meta "
        +"table");
  }

  return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc());
}
项目:hbase    文件:HFileDataBlockEncoderImpl.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext fileContext) {
  DataBlockEncoder encoder = encoding.getEncoder();
  if (encoder != null) {
    return encoder.newDataBlockDecodingContext(fileContext);
  }
  return new HFileBlockDefaultDecodingContext(fileContext);
}
项目:PyroDB    文件:HFileDataBlockEncoderImpl.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext fileContext) {
  DataBlockEncoder encoder = encoding.getEncoder();
  if (encoder != null) {
    return encoder.newDataBlockDecodingContext(fileContext);
  }
  return new HFileBlockDefaultDecodingContext(fileContext);
}
项目:PyroDB    文件:PrefixTreeCodec.java   
/**
 * I don't think this method is called during normal HBase operation, so efficiency is not
 * important.
 */
public ByteBuffer decodeKeyValues(DataInputStream source, int allocateHeaderLength,
    int skipLastBytes, HFileBlockDecodingContext decodingCtx) throws IOException {
  ByteBuffer sourceAsBuffer = ByteBufferUtils.drainInputStreamToBuffer(source);// waste
  sourceAsBuffer.mark();
  PrefixTreeBlockMeta blockMeta = new PrefixTreeBlockMeta(sourceAsBuffer);
  sourceAsBuffer.rewind();
  int numV1BytesWithHeader = allocateHeaderLength + blockMeta.getNumKeyValueBytes();
  byte[] keyValueBytesWithHeader = new byte[numV1BytesWithHeader];
  ByteBuffer result = ByteBuffer.wrap(keyValueBytesWithHeader);
  result.rewind();
  CellSearcher searcher = null;
  try {
    boolean includesMvcc = decodingCtx.getHFileContext().isIncludesMvcc();
    searcher = DecoderFactory.checkOut(sourceAsBuffer, includesMvcc);
    while (searcher.advance()) {
      KeyValue currentCell = KeyValueUtil.copyToNewKeyValue(searcher.current());
      // needs to be modified for DirectByteBuffers. no existing methods to
      // write VLongs to byte[]
      int offset = result.arrayOffset() + result.position();
      System.arraycopy(currentCell.getBuffer(), currentCell.getOffset(), result.array(), offset,
          currentCell.getLength());
      int keyValueLength = KeyValueUtil.length(currentCell);
      ByteBufferUtils.skip(result, keyValueLength);
      offset += keyValueLength;
      if (includesMvcc) {
        ByteBufferUtils.writeVLong(result, currentCell.getMvccVersion());
      }
    }
    result.position(result.limit());//make it appear as if we were appending
    return result;
  } finally {
    DecoderFactory.checkIn(searcher);
  }
}
项目:PyroDB    文件:PrefixTreeCodec.java   
/**
 * Is this the correct handling of an illegal comparator?  How to prevent that from getting all
 * the way to this point.
 */
@Override
public EncodedSeeker createSeeker(KVComparator comparator, HFileBlockDecodingContext decodingCtx) {
  if (comparator instanceof RawBytesComparator){
    throw new IllegalArgumentException("comparator must be KeyValue.KeyComparator");
  } else if (comparator instanceof MetaComparator){
    throw new IllegalArgumentException("DataBlockEncoding.PREFIX_TREE not compatible with hbase:meta "
        +"table");
  }

  return new PrefixTreeSeeker(decodingCtx.getHFileContext().isIncludesMvcc());
}
项目:c5    文件:HFileDataBlockEncoderImpl.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(
    Algorithm compressionAlgorithm) {
  DataBlockEncoder encoder = encoding.getEncoder();
  if (encoder != null) {
    return encoder.newDataBlockDecodingContext(compressionAlgorithm);
  }
  return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
}
项目:DominoHBase    文件:HFileDataBlockEncoderImpl.java   
@Override
public HFileBlockDecodingContext newOnDiskDataBlockDecodingContext(
    Algorithm compressionAlgorithm) {
  if (onDisk != null) {
    DataBlockEncoder encoder = onDisk.getEncoder();
    if (encoder != null) {
      return encoder.newDataBlockDecodingContext(
          compressionAlgorithm);
    }
  }
  return new HFileBlockDefaultDecodingContext(compressionAlgorithm);
}
项目:ditb    文件:NoOpDataBlockEncoder.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
  return new HFileBlockDefaultDecodingContext(meta);
}
项目:ditb    文件:HFileBlock.java   
/**
 * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its
 * encoded structure. Internal structures are shared between instances where applicable.
 */
HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {
  if (!fileContext.isCompressedOrEncrypted()) {
    // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),
    // which is used for block serialization to L2 cache, does not preserve encoding and
    // encryption details.
    return this;
  }

  HFileBlock unpacked = new HFileBlock(this);
  unpacked.allocateBuffer(); // allocates space for the decompressed block

  HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?
    reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();

  ByteBuffer dup = this.buf.duplicate();
  dup.position(this.headerSize());
  dup = dup.slice();
  ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),
    unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),
    dup);

  // Preserve the next block's header bytes in the new block if we have them.
  if (unpacked.hasNextBlockHeader()) {
    // Both the buffers are limited till checksum bytes and avoid the next block's header.
    // Below call to copyFromBufferToBuffer() will try positional read/write from/to buffers when
    // any of the buffer is DBB. So we change the limit on a dup buffer. No copying just create
    // new BB objects
    ByteBuffer inDup = this.buf.duplicate();
    inDup.limit(inDup.limit() + headerSize());
    ByteBuffer outDup = unpacked.buf.duplicate();
    outDup.limit(outDup.limit() + unpacked.headerSize());
    ByteBufferUtils.copyFromBufferToBuffer(
        outDup,
        inDup,
        this.onDiskDataSizeWithHeader,
        unpacked.headerSize() + unpacked.uncompressedSizeWithoutHeader
            + unpacked.totalChecksumBytes(), unpacked.headerSize());
  }
  return unpacked;
}
项目:ditb    文件:HFileBlock.java   
/** Get a decoder for {@link BlockType#ENCODED_DATA} blocks from this file. */
HFileBlockDecodingContext getBlockDecodingContext();
项目:ditb    文件:HFileBlock.java   
/** Get the default decoder for blocks from this file. */
HFileBlockDecodingContext getDefaultBlockDecodingContext();
项目:ditb    文件:HFileBlock.java   
@Override
public HFileBlockDecodingContext getBlockDecodingContext() {
  return this.encodedBlockDecodingCtx;
}
项目:ditb    文件:HFileBlock.java   
@Override
public HFileBlockDecodingContext getDefaultBlockDecodingContext() {
  return this.defaultDecodingCtx;
}
项目:ditb    文件:PrefixTreeCodec.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)
    throws IOException {
  return decodeKeyValues(source, 0, 0, decodingCtx);
}
项目:ditb    文件:PrefixTreeCodec.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
  return new HFileBlockDefaultDecodingContext(meta);
}
项目:pbase    文件:NoOpDataBlockEncoder.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
  return new HFileBlockDefaultDecodingContext(meta);
}
项目:pbase    文件:HFileBlock.java   
/**
 * Retrieves the decompressed/decrypted view of this block. An encoded block remains in its
 * encoded structure. Internal structures are shared between instances where applicable.
 */
HFileBlock unpack(HFileContext fileContext, FSReader reader) throws IOException {
  if (!fileContext.isCompressedOrEncrypted()) {
    // TODO: cannot use our own fileContext here because HFileBlock(ByteBuffer, boolean),
    // which is used for block serialization to L2 cache, does not preserve encoding and
    // encryption details.
    return this;
  }

  HFileBlock unpacked = new HFileBlock(this);
  unpacked.allocateBuffer(); // allocates space for the decompressed block

  HFileBlockDecodingContext ctx = blockType == BlockType.ENCODED_DATA ?
    reader.getBlockDecodingContext() : reader.getDefaultBlockDecodingContext();

  ByteBuffer dup = this.buf.duplicate();
  dup.position(this.headerSize());
  dup = dup.slice();
  ctx.prepareDecoding(unpacked.getOnDiskSizeWithoutHeader(),
    unpacked.getUncompressedSizeWithoutHeader(), unpacked.getBufferWithoutHeader(),
    dup);

  // Preserve the next block's header bytes in the new block if we have them.
  if (unpacked.hasNextBlockHeader()) {
    // Both the buffers are limited till checksum bytes and avoid the next block's header.
    // Below call to copyFromBufferToBuffer() will try positional read/write from/to buffers when
    // any of the buffer is DBB. So we change the limit on a dup buffer. No copying just create
    // new BB objects
    ByteBuffer inDup = this.buf.duplicate();
    inDup.limit(inDup.limit() + headerSize());
    ByteBuffer outDup = unpacked.buf.duplicate();
    outDup.limit(outDup.limit() + unpacked.headerSize());
    ByteBufferUtils.copyFromBufferToBuffer(
        outDup,
        inDup,
        this.onDiskDataSizeWithHeader,
        unpacked.headerSize() + unpacked.uncompressedSizeWithoutHeader
            + unpacked.totalChecksumBytes(), unpacked.headerSize());
  }
  return unpacked;
}
项目:pbase    文件:HFileBlock.java   
/** Get a decoder for {@link BlockType#ENCODED_DATA} blocks from this file. */
HFileBlockDecodingContext getBlockDecodingContext();
项目:pbase    文件:HFileBlock.java   
/** Get the default decoder for blocks from this file. */
HFileBlockDecodingContext getDefaultBlockDecodingContext();
项目:pbase    文件:HFileBlock.java   
@Override
public HFileBlockDecodingContext getBlockDecodingContext() {
  return this.encodedBlockDecodingCtx;
}
项目:pbase    文件:HFileBlock.java   
@Override
public HFileBlockDecodingContext getDefaultBlockDecodingContext() {
  return this.defaultDecodingCtx;
}
项目:pbase    文件:PrefixTreeCodec.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)
    throws IOException {
  return decodeKeyValues(source, 0, 0, decodingCtx);
}
项目:pbase    文件:PrefixTreeCodec.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
  return new HFileBlockDefaultDecodingContext(meta);
}
项目:HIndex    文件:NoOpDataBlockEncoder.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
  return new HFileBlockDefaultDecodingContext(meta);
}
项目:HIndex    文件:PrefixTreeCodec.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext decodingCtx)
    throws IOException {
  return decodeKeyValues(source, 0, 0, decodingCtx);
}
项目:HIndex    文件:PrefixTreeCodec.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
  return new HFileBlockDefaultDecodingContext(meta);
}
项目:hbase    文件:NoOpDataBlockEncoder.java   
@Override
public HFileBlockDecodingContext newDataBlockDecodingContext(HFileContext meta) {
  return new HFileBlockDefaultDecodingContext(meta);
}
项目:hbase    文件:HFileBlock.java   
/** Get a decoder for {@link BlockType#ENCODED_DATA} blocks from this file. */
HFileBlockDecodingContext getBlockDecodingContext();
项目:hbase    文件:HFileBlock.java   
/** Get the default decoder for blocks from this file. */
HFileBlockDecodingContext getDefaultBlockDecodingContext();
项目:hbase    文件:HFileBlock.java   
@Override
public HFileBlockDecodingContext getBlockDecodingContext() {
  return this.encodedBlockDecodingCtx;
}
项目:hbase    文件:HFileBlock.java   
@Override
public HFileBlockDecodingContext getDefaultBlockDecodingContext() {
  return this.defaultDecodingCtx;
}