Java 类org.apache.hadoop.hbase.io.TagCompressionContext 实例源码

项目:ditb    文件:CompressionContext.java   
public CompressionContext(Class<? extends Dictionary> dictType, boolean recoveredEdits,
    boolean hasTagCompression) throws SecurityException, NoSuchMethodException,
    InstantiationException, IllegalAccessException, InvocationTargetException {
  Constructor<? extends Dictionary> dictConstructor =
      dictType.getConstructor();
  regionDict = dictConstructor.newInstance();
  tableDict = dictConstructor.newInstance();
  familyDict = dictConstructor.newInstance();
  qualifierDict = dictConstructor.newInstance();
  rowDict = dictConstructor.newInstance();
  if (recoveredEdits) {
    // This will never change
    regionDict.init(1);
    tableDict.init(1);
  } else {
    regionDict.init(Short.MAX_VALUE);
    tableDict.init(Short.MAX_VALUE);
  }
  rowDict.init(Short.MAX_VALUE);
  familyDict.init(Byte.MAX_VALUE);
  qualifierDict.init(Byte.MAX_VALUE);
  if (hasTagCompression) {
    tagCompressionContext = new TagCompressionContext(dictType, Short.MAX_VALUE);
  }
}
项目:ditb    文件:BufferedDataBlockEncoder.java   
protected ClonedSeekerState(ByteBuffer currentBuffer, byte[] keyBuffer, short rowLength,
    int familyOffset, byte familyLength, int keyLength, int qualOffset, int qualLength,
    long timeStamp, byte typeByte, int valueLen, int valueOffset, long seqId,
    int tagsOffset, int tagsLength, TagCompressionContext tagCompressionContext,
    byte[] tagsBuffer) {
  this.currentBuffer = currentBuffer;
  keyOnlyBuffer = new byte[keyLength];
  this.tagCompressionContext = tagCompressionContext;
  this.rowLength = rowLength;
  this.familyOffset = familyOffset;
  this.familyLength = familyLength;
  this.qualifierOffset = qualOffset;
  this.qualifierLength = qualLength;
  this.timestamp = timeStamp;
  this.typeByte = typeByte;
  this.valueLength = valueLen;
  this.valueOffset = valueOffset;
  this.tagsOffset = tagsOffset;
  this.tagsLength = tagsLength;
  System.arraycopy(keyBuffer, 0, keyOnlyBuffer, 0, keyLength);
  if (tagCompressionContext != null) {
    this.cloneTagsBuffer = new byte[tagsLength];
    System.arraycopy(tagsBuffer, 0, this.cloneTagsBuffer, 0, tagsLength);
  }
  setSequenceId(seqId);
}
项目:pbase    文件:CompressionContext.java   
public CompressionContext(Class<? extends Dictionary> dictType, boolean recoveredEdits,
    boolean hasTagCompression) throws SecurityException, NoSuchMethodException,
    InstantiationException, IllegalAccessException, InvocationTargetException {
  Constructor<? extends Dictionary> dictConstructor =
      dictType.getConstructor();
  regionDict = dictConstructor.newInstance();
  tableDict = dictConstructor.newInstance();
  familyDict = dictConstructor.newInstance();
  qualifierDict = dictConstructor.newInstance();
  rowDict = dictConstructor.newInstance();
  if (recoveredEdits) {
    // This will never change
    regionDict.init(1);
    tableDict.init(1);
  } else {
    regionDict.init(Short.MAX_VALUE);
    tableDict.init(Short.MAX_VALUE);
  }
  rowDict.init(Short.MAX_VALUE);
  familyDict.init(Byte.MAX_VALUE);
  qualifierDict.init(Byte.MAX_VALUE);
  if (hasTagCompression) {
    tagCompressionContext = new TagCompressionContext(dictType, Short.MAX_VALUE);
  }
}
项目:pbase    文件:BufferedDataBlockEncoder.java   
protected ClonedSeekerState(ByteBuffer currentBuffer, byte[] keyBuffer, short rowLength,
    int familyOffset, byte familyLength, int keyLength, int qualOffset, int qualLength,
    long timeStamp, byte typeByte, int valueLen, int valueOffset, long seqId,
    int tagsOffset, int tagsLength, TagCompressionContext tagCompressionContext,
    byte[] tagsBuffer) {
  this.currentBuffer = currentBuffer;
  keyOnlyBuffer = new byte[keyLength];
  this.tagCompressionContext = tagCompressionContext;
  this.rowLength = rowLength;
  this.familyOffset = familyOffset;
  this.familyLength = familyLength;
  this.qualifierOffset = qualOffset;
  this.qualifierLength = qualLength;
  this.timestamp = timeStamp;
  this.typeByte = typeByte;
  this.valueLength = valueLen;
  this.valueOffset = valueOffset;
  this.tagsOffset = tagsOffset;
  this.tagsLength = tagsLength;
  System.arraycopy(keyBuffer, 0, keyOnlyBuffer, 0, keyLength);
  if (tagCompressionContext != null) {
    this.cloneTagsBuffer = new byte[tagsLength];
    System.arraycopy(tagsBuffer, 0, this.cloneTagsBuffer, 0, tagsLength);
  }
  setSequenceId(seqId);
}
项目:HIndex    文件:CompressionContext.java   
public CompressionContext(Class<? extends Dictionary> dictType, boolean recoveredEdits,
    boolean hasTagCompression) throws SecurityException, NoSuchMethodException,
    InstantiationException, IllegalAccessException, InvocationTargetException {
  Constructor<? extends Dictionary> dictConstructor =
      dictType.getConstructor();
  regionDict = dictConstructor.newInstance();
  tableDict = dictConstructor.newInstance();
  familyDict = dictConstructor.newInstance();
  qualifierDict = dictConstructor.newInstance();
  rowDict = dictConstructor.newInstance();
  if (recoveredEdits) {
    // This will never change
    regionDict.init(1);
    tableDict.init(1);
  } else {
    regionDict.init(Short.MAX_VALUE);
    tableDict.init(Short.MAX_VALUE);
  }
  rowDict.init(Short.MAX_VALUE);
  familyDict.init(Byte.MAX_VALUE);
  qualifierDict.init(Byte.MAX_VALUE);
  if (hasTagCompression) {
    tagCompressionContext = new TagCompressionContext(dictType, Short.MAX_VALUE);
  }
}
项目:hbase    文件:CompressionContext.java   
public CompressionContext(Class<? extends Dictionary> dictType, boolean recoveredEdits,
    boolean hasTagCompression) throws SecurityException, NoSuchMethodException,
    InstantiationException, IllegalAccessException, InvocationTargetException {
  Constructor<? extends Dictionary> dictConstructor =
      dictType.getConstructor();
  regionDict = dictConstructor.newInstance();
  tableDict = dictConstructor.newInstance();
  familyDict = dictConstructor.newInstance();
  qualifierDict = dictConstructor.newInstance();
  rowDict = dictConstructor.newInstance();
  if (recoveredEdits) {
    // This will never change
    regionDict.init(1);
    tableDict.init(1);
  } else {
    regionDict.init(Short.MAX_VALUE);
    tableDict.init(Short.MAX_VALUE);
  }
  rowDict.init(Short.MAX_VALUE);
  familyDict.init(Byte.MAX_VALUE);
  qualifierDict.init(Byte.MAX_VALUE);
  if (hasTagCompression) {
    tagCompressionContext = new TagCompressionContext(dictType, Short.MAX_VALUE);
  }
}
项目:PyroDB    文件:CompressionContext.java   
public CompressionContext(Class<? extends Dictionary> dictType, boolean recoveredEdits,
    boolean hasTagCompression) throws SecurityException, NoSuchMethodException,
    InstantiationException, IllegalAccessException, InvocationTargetException {
  Constructor<? extends Dictionary> dictConstructor =
      dictType.getConstructor();
  regionDict = dictConstructor.newInstance();
  tableDict = dictConstructor.newInstance();
  familyDict = dictConstructor.newInstance();
  qualifierDict = dictConstructor.newInstance();
  rowDict = dictConstructor.newInstance();
  if (recoveredEdits) {
    // This will never change
    regionDict.init(1);
    tableDict.init(1);
  } else {
    regionDict.init(Short.MAX_VALUE);
    tableDict.init(Short.MAX_VALUE);
  }
  rowDict.init(Short.MAX_VALUE);
  familyDict.init(Byte.MAX_VALUE);
  qualifierDict.init(Byte.MAX_VALUE);
  if (hasTagCompression) {
    tagCompressionContext = new TagCompressionContext(dictType, Short.MAX_VALUE);
  }
}
项目:PyroDB    文件:BufferedDataBlockEncoder.java   
protected ClonedSeekerState(ByteBuffer currentBuffer, byte[] keyBuffer, short rowLength,
    int familyOffset, byte familyLength, int keyLength, int qualOffset, int qualLength,
    long timeStamp, byte typeByte, int valueLen, int valueOffset, long memStoreTS,
    int tagsOffset, int tagsLength, TagCompressionContext tagCompressionContext,
    byte[] tagsBuffer) {
  this.currentBuffer = currentBuffer;
  keyOnlyBuffer = new byte[keyLength];
  this.tagCompressionContext = tagCompressionContext;
  this.rowLength = rowLength;
  this.familyOffset = familyOffset;
  this.familyLength = familyLength;
  this.qualifierOffset = qualOffset;
  this.qualifierLength = qualLength;
  this.timestamp = timeStamp;
  this.typeByte = typeByte;
  this.valueLength = valueLen;
  this.valueOffset = valueOffset;
  this.memstoreTS = memStoreTS;
  this.tagsOffset = tagsOffset;
  this.tagsLength = tagsLength;
  System.arraycopy(keyBuffer, 0, keyOnlyBuffer, 0, keyLength);
  if (tagCompressionContext != null) {
    this.cloneTagsBuffer = new byte[tagsLength];
    System.arraycopy(tagsBuffer, 0, this.cloneTagsBuffer, 0, tagsLength);
  }
}
项目:ditb    文件:BufferedDataBlockEncoder.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source,
    HFileBlockDecodingContext blkDecodingCtx) throws IOException {
  if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {
    throw new IOException(this.getClass().getName() + " only accepts "
        + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");
  }

  HFileBlockDefaultDecodingContext decodingCtx =
      (HFileBlockDefaultDecodingContext) blkDecodingCtx;
  if (decodingCtx.getHFileContext().isIncludesTags()
      && decodingCtx.getHFileContext().isCompressTags()) {
    if (decodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // decoding.
      decodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        decodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  return internalDecodeKeyValues(source, 0, 0, decodingCtx);
}
项目:ditb    文件:BufferedDataBlockEncoder.java   
public BufferedEncodedSeeker(KVComparator comparator,
    HFileBlockDecodingContext decodingCtx) {
  this.comparator = comparator;
  this.samePrefixComparator = comparator;
  this.decodingCtx = decodingCtx;
  if (decodingCtx.getHFileContext().isCompressTags()) {
    try {
      tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
    } catch (Exception e) {
      throw new RuntimeException("Failed to initialize TagCompressionContext", e);
    }
  }
}
项目:ditb    文件:BufferedDataBlockEncoder.java   
/**
 * @param cell
 * @param out
 * @param encodingCtx
 * @return unencoded size added
 * @throws IOException
 */
protected final int afterEncodingKeyValue(Cell cell, DataOutputStream out,
    HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  int size = 0;
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = cell.getTagsLength();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        tagCompressionContext
            .compressTags(out, cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
      } else {
        out.write(cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
      }
    }
    size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = cell.getSequenceId();
    WritableUtils.writeVLong(out, memstoreTS);
    // TODO use a writeVLong which returns the #bytes written so that 2 time parsing can be
    // avoided.
    size += WritableUtils.getVIntSize(memstoreTS);
  }
  return size;
}
项目:ditb    文件:BufferedDataBlockEncoder.java   
protected final void afterDecodingKeyValue(DataInputStream source,
    ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
  if (decodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = ByteBufferUtils.readCompressedInt(source);
    // Put as unsigned short
    dest.put((byte) ((tagsLength >> 8) & 0xff));
    dest.put((byte) (tagsLength & 0xff));
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = decodingCtx.getTagCompressionContext();
      // When tag compression is been used in this file, tagCompressionContext will have a not
      // null value passed.
      if (tagCompressionContext != null) {
        tagCompressionContext.uncompressTags(source, dest, tagsLength);
      } else {
        ByteBufferUtils.copyFromStreamToBuffer(dest, source, tagsLength);
      }
    }
  }
  if (decodingCtx.getHFileContext().isIncludesMvcc()) {
    long memstoreTS = -1;
    try {
      // Copy memstore timestamp from the data input stream to the byte
      // buffer.
      memstoreTS = WritableUtils.readVLong(source);
      ByteBufferUtils.writeVLong(dest, memstoreTS);
    } catch (IOException ex) {
      throw new RuntimeException("Unable to copy memstore timestamp " +
          memstoreTS + " after decoding a key/value");
    }
  }
}
项目:ditb    文件:BufferedDataBlockEncoder.java   
@Override
public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx, DataOutputStream out)
    throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding(out);
  if (encodingCtx.getHFileContext().isIncludesTags()
      && encodingCtx.getHFileContext().isCompressTags()) {
    if (encodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // encoding.
      encodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        encodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  ByteBufferUtils.putInt(out, 0); // DUMMY length. This will be updated in endBlockEncoding()
  blkEncodingCtx.setEncodingState(new BufferedDataBlockEncodingState());
}
项目:pbase    文件:BufferedDataBlockEncoder.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source,
    HFileBlockDecodingContext blkDecodingCtx) throws IOException {
  if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {
    throw new IOException(this.getClass().getName() + " only accepts "
        + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");
  }

  HFileBlockDefaultDecodingContext decodingCtx =
      (HFileBlockDefaultDecodingContext) blkDecodingCtx;
  if (decodingCtx.getHFileContext().isIncludesTags()
      && decodingCtx.getHFileContext().isCompressTags()) {
    if (decodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // decoding.
      decodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        decodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  return internalDecodeKeyValues(source, 0, 0, decodingCtx);
}
项目:pbase    文件:BufferedDataBlockEncoder.java   
public BufferedEncodedSeeker(KVComparator comparator,
    HFileBlockDecodingContext decodingCtx) {
  this.comparator = comparator;
  this.samePrefixComparator = comparator;
  this.decodingCtx = decodingCtx;
  if (decodingCtx.getHFileContext().isCompressTags()) {
    try {
      tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
    } catch (Exception e) {
      throw new RuntimeException("Failed to initialize TagCompressionContext", e);
    }
  }
}
项目:pbase    文件:BufferedDataBlockEncoder.java   
/**
 * @param cell
 * @param out
 * @param encodingCtx
 * @return unencoded size added
 * @throws IOException
 */
protected final int afterEncodingKeyValue(Cell cell, DataOutputStream out,
    HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  int size = 0;
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = cell.getTagsLength();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        tagCompressionContext
            .compressTags(out, cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
      } else {
        out.write(cell.getTagsArray(), cell.getTagsOffset(), tagsLength);
      }
    }
    size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = cell.getSequenceId();
    WritableUtils.writeVLong(out, memstoreTS);
    // TODO use a writeVLong which returns the #bytes written so that 2 time parsing can be
    // avoided.
    size += WritableUtils.getVIntSize(memstoreTS);
  }
  return size;
}
项目:pbase    文件:BufferedDataBlockEncoder.java   
protected final void afterDecodingKeyValue(DataInputStream source,
    ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
  if (decodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = ByteBufferUtils.readCompressedInt(source);
    // Put as unsigned short
    dest.put((byte) ((tagsLength >> 8) & 0xff));
    dest.put((byte) (tagsLength & 0xff));
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = decodingCtx.getTagCompressionContext();
      // When tag compression is been used in this file, tagCompressionContext will have a not
      // null value passed.
      if (tagCompressionContext != null) {
        tagCompressionContext.uncompressTags(source, dest, tagsLength);
      } else {
        ByteBufferUtils.copyFromStreamToBuffer(dest, source, tagsLength);
      }
    }
  }
  if (decodingCtx.getHFileContext().isIncludesMvcc()) {
    long memstoreTS = -1;
    try {
      // Copy memstore timestamp from the data input stream to the byte
      // buffer.
      memstoreTS = WritableUtils.readVLong(source);
      ByteBufferUtils.writeVLong(dest, memstoreTS);
    } catch (IOException ex) {
      throw new RuntimeException("Unable to copy memstore timestamp " +
          memstoreTS + " after decoding a key/value");
    }
  }
}
项目:pbase    文件:BufferedDataBlockEncoder.java   
@Override
public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx, DataOutputStream out)
    throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding(out);
  if (encodingCtx.getHFileContext().isIncludesTags()
      && encodingCtx.getHFileContext().isCompressTags()) {
    if (encodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // encoding.
      encodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        encodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  ByteBufferUtils.putInt(out, 0); // DUMMY length. This will be updated in endBlockEncoding()
  blkEncodingCtx.setEncodingState(new BufferedDataBlockEncodingState());
}
项目:HIndex    文件:BufferedDataBlockEncoder.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source,
    HFileBlockDecodingContext blkDecodingCtx) throws IOException {
  if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {
    throw new IOException(this.getClass().getName() + " only accepts "
        + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");
  }

  HFileBlockDefaultDecodingContext decodingCtx =
      (HFileBlockDefaultDecodingContext) blkDecodingCtx;
  if (decodingCtx.getHFileContext().isIncludesTags()
      && decodingCtx.getHFileContext().isCompressTags()) {
    if (decodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // decoding.
      decodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        decodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  return internalDecodeKeyValues(source, 0, 0, decodingCtx);
}
项目:HIndex    文件:BufferedDataBlockEncoder.java   
public BufferedEncodedSeeker(KVComparator comparator,
    HFileBlockDecodingContext decodingCtx) {
  this.comparator = comparator;
  this.samePrefixComparator = comparator;
  this.decodingCtx = decodingCtx;
  if (decodingCtx.getHFileContext().isCompressTags()) {
    try {
      tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
    } catch (Exception e) {
      throw new RuntimeException("Failed to initialize TagCompressionContext", e);
    }
  }
}
项目:HIndex    文件:BufferedDataBlockEncoder.java   
protected final void afterEncodingKeyValue(ByteBuffer in,
    DataOutputStream out, HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    short tagsLength = in.getShort();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        tagCompressionContext.compressTags(out, in, tagsLength);
      } else {
        ByteBufferUtils.moveBufferToStream(out, in, tagsLength);
      }
    }
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = -1;
    try {
      memstoreTS = ByteBufferUtils.readVLong(in);
      WritableUtils.writeVLong(out, memstoreTS);
    } catch (IOException ex) {
      throw new RuntimeException("Unable to copy memstore timestamp " +
          memstoreTS + " after encoding a key/value");
    }
  }
}
项目:HIndex    文件:BufferedDataBlockEncoder.java   
protected final void afterDecodingKeyValue(DataInputStream source,
    ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
  if (decodingCtx.getHFileContext().isIncludesTags()) {
    short tagsLength = (short) ByteBufferUtils.readCompressedInt(source);
    dest.putShort(tagsLength);
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = decodingCtx.getTagCompressionContext();
      // When tag compression is been used in this file, tagCompressionContext will have a not
      // null value passed.
      if (tagCompressionContext != null) {
        tagCompressionContext.uncompressTags(source, dest, tagsLength);
      } else {
        ByteBufferUtils.copyFromStreamToBuffer(dest, source, tagsLength);
      }
    }
  }
  if (decodingCtx.getHFileContext().isIncludesMvcc()) {
    long memstoreTS = -1;
    try {
      // Copy memstore timestamp from the data input stream to the byte
      // buffer.
      memstoreTS = WritableUtils.readVLong(source);
      ByteBufferUtils.writeVLong(dest, memstoreTS);
    } catch (IOException ex) {
      throw new RuntimeException("Unable to copy memstore timestamp " +
          memstoreTS + " after decoding a key/value");
    }
  }
}
项目:HIndex    文件:BufferedDataBlockEncoder.java   
@Override
public void encodeKeyValues(ByteBuffer in,
    HFileBlockEncodingContext blkEncodingCtx) throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding();
  DataOutputStream dataOut = encodingCtx.getOutputStreamForEncoder();
  if (encodingCtx.getHFileContext().isIncludesTags()
      && encodingCtx.getHFileContext().isCompressTags()) {
    if (encodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // encoding.
      encodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        encodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  internalEncodeKeyValues(dataOut, in, encodingCtx);
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:hbase    文件:PrivateCellUtil.java   
/**
 * Compresses the tags to the given outputstream using the TagcompressionContext
 * @param out the outputstream to which the compression should happen
 * @param cell the cell which has tags
 * @param tagCompressionContext the TagCompressionContext
 * @throws IOException can throw IOException if the compression encounters issue
 */
public static void compressTags(OutputStream out, Cell cell,
    TagCompressionContext tagCompressionContext) throws IOException {
  if (cell instanceof ByteBufferExtendedCell) {
    tagCompressionContext.compressTags(out, ((ByteBufferExtendedCell) cell).getTagsByteBuffer(),
      ((ByteBufferExtendedCell) cell).getTagsPosition(), cell.getTagsLength());
  } else {
    tagCompressionContext.compressTags(out, cell.getTagsArray(), cell.getTagsOffset(),
      cell.getTagsLength());
  }
}
项目:hbase    文件:BufferedDataBlockEncoder.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source,
    HFileBlockDecodingContext blkDecodingCtx) throws IOException {
  if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {
    throw new IOException(this.getClass().getName() + " only accepts "
        + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");
  }

  HFileBlockDefaultDecodingContext decodingCtx =
      (HFileBlockDefaultDecodingContext) blkDecodingCtx;
  if (decodingCtx.getHFileContext().isIncludesTags()
      && decodingCtx.getHFileContext().isCompressTags()) {
    if (decodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // decoding.
      decodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        decodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  return internalDecodeKeyValues(source, 0, 0, decodingCtx);
}
项目:hbase    文件:BufferedDataBlockEncoder.java   
public BufferedEncodedSeeker(CellComparator comparator,
    HFileBlockDecodingContext decodingCtx) {
  super(comparator, decodingCtx);
  if (decodingCtx.getHFileContext().isCompressTags()) {
    try {
      tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
    } catch (Exception e) {
      throw new RuntimeException("Failed to initialize TagCompressionContext", e);
    }
  }
  current = createSeekerState(); // always valid
  previous = createSeekerState(); // may not be valid
}
项目:hbase    文件:BufferedDataBlockEncoder.java   
/**
 * @param cell
 * @param out
 * @param encodingCtx
 * @return unencoded size added
 * @throws IOException
 */
protected final int afterEncodingKeyValue(Cell cell, DataOutputStream out,
    HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  int size = 0;
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = cell.getTagsLength();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        // Not passing tagsLength considering that parsing of the tagsLength is not costly
        PrivateCellUtil.compressTags(out, cell, tagCompressionContext);
      } else {
        PrivateCellUtil.writeTags(out, cell, tagsLength);
      }
    }
    size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = cell.getSequenceId();
    WritableUtils.writeVLong(out, memstoreTS);
    // TODO use a writeVLong which returns the #bytes written so that 2 time parsing can be
    // avoided.
    size += WritableUtils.getVIntSize(memstoreTS);
  }
  return size;
}
项目:hbase    文件:BufferedDataBlockEncoder.java   
protected final void afterDecodingKeyValue(DataInputStream source,
    ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
  if (decodingCtx.getHFileContext().isIncludesTags()) {
    int tagsLength = ByteBufferUtils.readCompressedInt(source);
    // Put as unsigned short
    dest.put((byte) ((tagsLength >> 8) & 0xff));
    dest.put((byte) (tagsLength & 0xff));
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = decodingCtx.getTagCompressionContext();
      // When tag compression is been used in this file, tagCompressionContext will have a not
      // null value passed.
      if (tagCompressionContext != null) {
        tagCompressionContext.uncompressTags(source, dest, tagsLength);
      } else {
        ByteBufferUtils.copyFromStreamToBuffer(dest, source, tagsLength);
      }
    }
  }
  if (decodingCtx.getHFileContext().isIncludesMvcc()) {
    long memstoreTS = -1;
    try {
      // Copy memstore timestamp from the data input stream to the byte
      // buffer.
      memstoreTS = WritableUtils.readVLong(source);
      ByteBufferUtils.writeVLong(dest, memstoreTS);
    } catch (IOException ex) {
      throw new RuntimeException("Unable to copy memstore timestamp " +
          memstoreTS + " after decoding a key/value");
    }
  }
}
项目:hbase    文件:BufferedDataBlockEncoder.java   
@Override
public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx, DataOutputStream out)
    throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding(out);
  if (encodingCtx.getHFileContext().isIncludesTags()
      && encodingCtx.getHFileContext().isCompressTags()) {
    if (encodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // encoding.
      encodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        encodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  StreamUtils.writeInt(out, 0); // DUMMY length. This will be updated in endBlockEncoding()
  blkEncodingCtx.setEncodingState(new BufferedDataBlockEncodingState());
}
项目:PyroDB    文件:BufferedDataBlockEncoder.java   
@Override
public ByteBuffer decodeKeyValues(DataInputStream source,
    HFileBlockDecodingContext blkDecodingCtx) throws IOException {
  if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) {
    throw new IOException(this.getClass().getName() + " only accepts "
        + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context.");
  }

  HFileBlockDefaultDecodingContext decodingCtx =
      (HFileBlockDefaultDecodingContext) blkDecodingCtx;
  if (decodingCtx.getHFileContext().isIncludesTags()
      && decodingCtx.getHFileContext().isCompressTags()) {
    if (decodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // decoding.
      decodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        decodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  return internalDecodeKeyValues(source, 0, 0, decodingCtx);
}
项目:PyroDB    文件:BufferedDataBlockEncoder.java   
public BufferedEncodedSeeker(KVComparator comparator,
    HFileBlockDecodingContext decodingCtx) {
  this.comparator = comparator;
  this.samePrefixComparator = comparator;
  this.decodingCtx = decodingCtx;
  if (decodingCtx.getHFileContext().isCompressTags()) {
    try {
      tagCompressionContext = new TagCompressionContext(LRUDictionary.class, Byte.MAX_VALUE);
    } catch (Exception e) {
      throw new RuntimeException("Failed to initialize TagCompressionContext", e);
    }
  }
}
项目:PyroDB    文件:BufferedDataBlockEncoder.java   
/**
 * @param kv
 * @param out
 * @param encodingCtx
 * @return unencoded size added
 * @throws IOException
 */
protected final int afterEncodingKeyValue(KeyValue kv, DataOutputStream out,
    HFileBlockDefaultEncodingContext encodingCtx) throws IOException {
  int size = 0;
  if (encodingCtx.getHFileContext().isIncludesTags()) {
    short tagsLength = kv.getTagsLength();
    ByteBufferUtils.putCompressedInt(out, tagsLength);
    // There are some tags to be written
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = encodingCtx.getTagCompressionContext();
      // When tag compression is enabled, tagCompressionContext will have a not null value. Write
      // the tags using Dictionary compression in such a case
      if (tagCompressionContext != null) {
        tagCompressionContext
            .compressTags(out, kv.getTagsArray(), kv.getTagsOffset(), tagsLength);
      } else {
        out.write(kv.getTagsArray(), kv.getTagsOffset(), tagsLength);
      }
    }
    size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
  }
  if (encodingCtx.getHFileContext().isIncludesMvcc()) {
    // Copy memstore timestamp from the byte buffer to the output stream.
    long memstoreTS = kv.getMvccVersion();
    WritableUtils.writeVLong(out, memstoreTS);
    // TODO use a writeVLong which returns the #bytes written so that 2 time parsing can be
    // avoided.
    size += WritableUtils.getVIntSize(memstoreTS);
  }
  return size;
}
项目:PyroDB    文件:BufferedDataBlockEncoder.java   
protected final void afterDecodingKeyValue(DataInputStream source,
    ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
  if (decodingCtx.getHFileContext().isIncludesTags()) {
    short tagsLength = (short) ByteBufferUtils.readCompressedInt(source);
    dest.putShort(tagsLength);
    if (tagsLength > 0) {
      TagCompressionContext tagCompressionContext = decodingCtx.getTagCompressionContext();
      // When tag compression is been used in this file, tagCompressionContext will have a not
      // null value passed.
      if (tagCompressionContext != null) {
        tagCompressionContext.uncompressTags(source, dest, tagsLength);
      } else {
        ByteBufferUtils.copyFromStreamToBuffer(dest, source, tagsLength);
      }
    }
  }
  if (decodingCtx.getHFileContext().isIncludesMvcc()) {
    long memstoreTS = -1;
    try {
      // Copy memstore timestamp from the data input stream to the byte
      // buffer.
      memstoreTS = WritableUtils.readVLong(source);
      ByteBufferUtils.writeVLong(dest, memstoreTS);
    } catch (IOException ex) {
      throw new RuntimeException("Unable to copy memstore timestamp " +
          memstoreTS + " after decoding a key/value");
    }
  }
}
项目:PyroDB    文件:BufferedDataBlockEncoder.java   
@Override
public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx, DataOutputStream out)
    throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding(out);
  if (encodingCtx.getHFileContext().isIncludesTags()
      && encodingCtx.getHFileContext().isCompressTags()) {
    if (encodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // encoding.
      encodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        encodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  ByteBufferUtils.putInt(out, 0); // DUMMY length. This will be updated in endBlockEncoding()
  blkEncodingCtx.setEncodingState(new BufferedDataBlockEncodingState());
}
项目:ditb    文件:HFileBlockDefaultDecodingContext.java   
public TagCompressionContext getTagCompressionContext() {
  return tagCompressionContext;
}
项目:ditb    文件:HFileBlockDefaultDecodingContext.java   
public void setTagCompressionContext(TagCompressionContext tagCompressionContext) {
  this.tagCompressionContext = tagCompressionContext;
}
项目:ditb    文件:HFileBlockDefaultEncodingContext.java   
public TagCompressionContext getTagCompressionContext() {
  return tagCompressionContext;
}
项目:ditb    文件:HFileBlockDefaultEncodingContext.java   
public void setTagCompressionContext(TagCompressionContext tagCompressionContext) {
  this.tagCompressionContext = tagCompressionContext;
}
项目:pbase    文件:HFileBlockDefaultDecodingContext.java   
public TagCompressionContext getTagCompressionContext() {
  return tagCompressionContext;
}
项目:pbase    文件:HFileBlockDefaultDecodingContext.java   
public void setTagCompressionContext(TagCompressionContext tagCompressionContext) {
  this.tagCompressionContext = tagCompressionContext;
}