Java 类org.apache.hadoop.hbase.io.hfile.BlockType 实例源码

项目:HIndex    文件:PrefixTreeCodec.java   
/**
 * Copied from BufferedDataBlockEncoder. Almost definitely can be improved, but i'm not familiar
 * enough with the concept of the HFileBlockEncodingContext.
 */
@Override
public void encodeKeyValues(ByteBuffer in,
    HFileBlockEncodingContext blkEncodingCtx) throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException(this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " + "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx
      = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding();
  DataOutputStream dataOut = encodingCtx.getOutputStreamForEncoder();
  internalEncodeKeyValues(dataOut, in, encodingCtx.getHFileContext().isIncludesMvcc(),
      encodingCtx.getHFileContext().isIncludesTags());

  //do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:c5    文件:PrefixTreeCodec.java   
/**
 * Copied from BufferedDataBlockEncoder. Almost definitely can be improved, but i'm not familiar
 * enough with the concept of the HFileBlockEncodingContext.
 */
@Override
public void encodeKeyValues(ByteBuffer in, boolean includesMvccVersion,
    HFileBlockEncodingContext blkEncodingCtx) throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException(this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " + "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx
      = (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding();
  DataOutputStream dataOut = encodingCtx.getOutputStreamForEncoder();
  internalEncodeKeyValues(dataOut, in, includesMvccVersion);

  //do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:c5    文件:BufferedDataBlockEncoder.java   
@Override
public void encodeKeyValues(ByteBuffer in,
    boolean includesMemstoreTS,
    HFileBlockEncodingContext blkEncodingCtx) throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding();
  DataOutputStream dataOut =
      ((HFileBlockDefaultEncodingContext) encodingCtx)
      .getOutputStreamForEncoder();
  internalEncodeKeyValues(dataOut, in, includesMemstoreTS);
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:c5    文件:HFileBlockDefaultEncodingContext.java   
/**
 * @param uncompressedBytesWithHeader
 * @param blockType
 * @param headerBytes
 * @throws IOException
 */
protected void compressAfterEncoding(byte[] uncompressedBytesWithHeader,
    BlockType blockType, byte[] headerBytes) throws IOException {
  this.uncompressedBytesWithHeader = uncompressedBytesWithHeader;
  if (compressionAlgorithm != NONE) {
    compressedByteStream.reset();
    compressedByteStream.write(headerBytes);
    compressionStream.resetState();
    compressionStream.write(uncompressedBytesWithHeader,
        headerBytes.length, uncompressedBytesWithHeader.length
            - headerBytes.length);

    compressionStream.flush();
    compressionStream.finish();
    onDiskBytesWithHeader = compressedByteStream.toByteArray();
  } else {
    onDiskBytesWithHeader = uncompressedBytesWithHeader;
  }
  this.blockType = blockType;
}
项目:DominoHBase    文件:BufferedDataBlockEncoder.java   
@Override
public void encodeKeyValues(ByteBuffer in,
    boolean includesMemstoreTS,
    HFileBlockEncodingContext blkEncodingCtx) throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding();
  DataOutputStream dataOut =
      ((HFileBlockDefaultEncodingContext) encodingCtx)
      .getOutputStreamForEncoder();
  internalEncodeKeyValues(dataOut, in, includesMemstoreTS);
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:DominoHBase    文件:HFileBlockDefaultEncodingContext.java   
/**
 * @param uncompressedBytesWithHeader
 * @param blockType
 * @param headerBytes
 * @throws IOException
 */
protected void compressAfterEncoding(byte[] uncompressedBytesWithHeader,
    BlockType blockType, byte[] headerBytes) throws IOException {
  this.uncompressedBytesWithHeader = uncompressedBytesWithHeader;
  if (compressionAlgorithm != NONE) {
    compressedByteStream.reset();
    compressedByteStream.write(headerBytes);
    compressionStream.resetState();
    compressionStream.write(uncompressedBytesWithHeader,
        headerBytes.length, uncompressedBytesWithHeader.length
            - headerBytes.length);

    compressionStream.flush();
    compressionStream.finish();
    onDiskBytesWithHeader = compressedByteStream.toByteArray();
  } else {
    onDiskBytesWithHeader = uncompressedBytesWithHeader;
  }
  this.blockType = blockType;
}
项目:ditb    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset,
      keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false, true,
          BlockType.BLOOM_CHUNK, null);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf, bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:ditb    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:ditb    文件:PrefixTreeCodec.java   
@Override
public void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,
    byte[] uncompressedBytesWithHeader) throws IOException {
  PrefixTreeEncodingState state = (PrefixTreeEncodingState) encodingCtx.getEncodingState();
  PrefixTreeEncoder builder = state.builder;
  builder.flush();
  EncoderFactory.checkIn(builder);
  // do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:ditb    文件:BufferedDataBlockEncoder.java   
@Override
public void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,
    byte[] uncompressedBytesWithHeader) throws IOException {
  BufferedDataBlockEncodingState state = (BufferedDataBlockEncodingState) encodingCtx
      .getEncodingState();
  // Write the unencodedDataSizeWritten (with header size)
  Bytes.putInt(uncompressedBytesWithHeader, HConstants.HFILEBLOCK_HEADER_SIZE
      + DataBlockEncoding.ID_SIZE, state.unencodedDataSizeWritten
      );
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:LCIndex-HBase-0.94.16    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset, keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false,
          BlockType.BLOOM_CHUNK);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:LCIndex-HBase-0.94.16    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:LCIndex-HBase-0.94.16    文件:TestSchemaMetrics.java   
@Test
public void testIncrements() {
  Random rand = new Random(23982737L);
  for (int i = 1; i <= 3; ++i) {
    final String tableName = "table" + i;
    for (int j = 1; j <= 3; ++j) {
      final String cfName = "cf" + j;
      SchemaMetrics sm = SchemaMetrics.getInstance(tableName, cfName);
      for (boolean isInBloom : BOOL_VALUES) {
        sm.updateBloomMetrics(isInBloom);
        checkMetrics();
      }

      for (BlockCategory blockCat : BlockType.BlockCategory.values()) {
        if (blockCat == BlockCategory.ALL_CATEGORIES) {
          continue;
        }

        for (boolean isCompaction : BOOL_VALUES) {
          sm.updateOnCacheHit(blockCat, isCompaction);
          checkMetrics();
          sm.updateOnCacheMiss(blockCat, isCompaction, rand.nextInt());
          checkMetrics();
        }

        for (boolean isEviction : BOOL_VALUES) {
          sm.updateOnCachePutOrEvict(blockCat, (isEviction ? -1 : 1)
              * rand.nextInt(1024 * 1024), isEviction);
        }
      }
    }
  }
}
项目:pbase    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset,
      keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false, true,
          BlockType.BLOOM_CHUNK, null);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf, bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:pbase    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
    if (blockType == BlockType.GENERAL_BLOOM_META) {
        setGeneralBloomFilterFaulty();
    } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
        setDeleteFamilyBloomFilterFaulty();
    }
}
项目:pbase    文件:PrefixTreeCodec.java   
@Override
public void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,
    byte[] uncompressedBytesWithHeader) throws IOException {
  PrefixTreeEncodingState state = (PrefixTreeEncodingState) encodingCtx.getEncodingState();
  PrefixTreeEncoder builder = state.builder;
  builder.flush();
  EncoderFactory.checkIn(builder);
  // do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:pbase    文件:BufferedDataBlockEncoder.java   
@Override
public void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,
    byte[] uncompressedBytesWithHeader) throws IOException {
  BufferedDataBlockEncodingState state = (BufferedDataBlockEncodingState) encodingCtx
      .getEncodingState();
  // Write the unencodedDataSizeWritten (with header size)
  Bytes.putInt(uncompressedBytesWithHeader, HConstants.HFILEBLOCK_HEADER_SIZE
      + DataBlockEncoding.ID_SIZE, state.unencodedDataSizeWritten
      );
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:HIndex    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset, keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false, true,
          BlockType.BLOOM_CHUNK);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:HIndex    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:HIndex    文件:BufferedDataBlockEncoder.java   
@Override
public void encodeKeyValues(ByteBuffer in,
    HFileBlockEncodingContext blkEncodingCtx) throws IOException {
  if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) {
    throw new IOException (this.getClass().getName() + " only accepts "
        + HFileBlockDefaultEncodingContext.class.getName() + " as the " +
        "encoding context.");
  }

  HFileBlockDefaultEncodingContext encodingCtx =
      (HFileBlockDefaultEncodingContext) blkEncodingCtx;
  encodingCtx.prepareEncoding();
  DataOutputStream dataOut = encodingCtx.getOutputStreamForEncoder();
  if (encodingCtx.getHFileContext().isIncludesTags()
      && encodingCtx.getHFileContext().isCompressTags()) {
    if (encodingCtx.getTagCompressionContext() != null) {
      // It will be overhead to create the TagCompressionContext again and again for every block
      // encoding.
      encodingCtx.getTagCompressionContext().clear();
    } else {
      try {
        TagCompressionContext tagCompressionContext = new TagCompressionContext(
            LRUDictionary.class, Byte.MAX_VALUE);
        encodingCtx.setTagCompressionContext(tagCompressionContext);
      } catch (Exception e) {
        throw new IOException("Failed to initialize TagCompressionContext", e);
      }
    }
  }
  internalEncodeKeyValues(dataOut, in, encodingCtx);
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:HIndex    文件:HFileBlockDefaultEncodingContext.java   
@Override
public void postEncoding(BlockType blockType)
    throws IOException {
  dataOut.flush();
  compressAfterEncodingWithBlockType(encodedStream.toByteArray(), blockType);
  this.blockType = blockType;
}
项目:IRIndex    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset, keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false,
          BlockType.BLOOM_CHUNK);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:IRIndex    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:IRIndex    文件:TestSchemaMetrics.java   
@Test
public void testIncrements() {
  Random rand = new Random(23982737L);
  for (int i = 1; i <= 3; ++i) {
    final String tableName = "table" + i;
    for (int j = 1; j <= 3; ++j) {
      final String cfName = "cf" + j;
      SchemaMetrics sm = SchemaMetrics.getInstance(tableName, cfName);
      for (boolean isInBloom : BOOL_VALUES) {
        sm.updateBloomMetrics(isInBloom);
        checkMetrics();
      }

      for (BlockCategory blockCat : BlockType.BlockCategory.values()) {
        if (blockCat == BlockCategory.ALL_CATEGORIES) {
          continue;
        }

        for (boolean isCompaction : BOOL_VALUES) {
          sm.updateOnCacheHit(blockCat, isCompaction);
          checkMetrics();
          sm.updateOnCacheMiss(blockCat, isCompaction, rand.nextInt());
          checkMetrics();
        }

        for (boolean isEviction : BOOL_VALUES) {
          sm.updateOnCachePutOrEvict(blockCat, (isEviction ? -1 : 1)
              * rand.nextInt(1024 * 1024), isEviction);
        }
      }
    }
  }
}
项目:hbase    文件:StoreFileReader.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:hbase    文件:AbstractDataBlockEncoder.java   
protected void postEncoding(HFileBlockEncodingContext encodingCtx)
    throws IOException {
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:bigbase    文件:OffHeapBlockCache.java   
/**
 * Add block to cache.
 * @param cacheKey The block's cache key.
 * @param buf The block contents wrapped in a ByteBuffer.
 * @param inMemory Whether block should be treated as in-memory
 */
public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory){
    boolean contains = false;
  try {
    String blockName = cacheKey.toString();
    contains = offHeapCache.contains(blockName);
    if ( contains) {
      // TODO - what does it mean? Can we ignore this?
      throw new RuntimeException("Cached an already cached block: "+blockName);
    } 
    // Always cache block to off-heap cache first
    offHeapCache.put(blockName, buf);

    if(buf.getBlockType() != BlockType.DATA && onHeapEnabled()){
      // Cache on-heap only non-data blocks
      onHeapCache.cacheBlock(cacheKey, buf, inMemory);
    }
    if( isExternalStorageEnabled()){
      // FIXME This code disables storage in non-test mode???
      byte[] hashed = Utils.hash128(blockName);
      // TODO : do we really need to check this?
      if( extStorageCache.contains(hashed) == false){
        // Store external if we found object in a block cache and not in external cache
        // ONLY IN TEST MODE
        storeExternalWithCodec(blockName, buf, false);
      }
    }
  } catch (Exception e) {
    LOG.error(e);
    throw new RuntimeException(e);
  }   
}
项目:bigbase    文件:BlockCacheKeyTest.java   
/**
 * Instantiates a new block cache key.
 *
 * @param file the file
 * @param offset the offset
 * @param encoding the encoding
 * @param blockType the block type
 */
public BlockCacheKeyTest(String file, long offset, DataBlockEncoding encoding,
    BlockType blockType) {
  this.hfileName = file;
  this.offset = offset;
  // We add encoding to the cache key only for data blocks. If the block type
  // is unknown (this should never be the case in production), we just use
  // the provided encoding, because it might be a data block.
  this.encoding = (blockType == null || blockType.isData()) ? encoding :
      DataBlockEncoding.NONE;
}
项目:PyroDB    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset,
      keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false, true,
          BlockType.BLOOM_CHUNK, null);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:PyroDB    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:PyroDB    文件:PrefixTreeCodec.java   
@Override
public void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,
    byte[] uncompressedBytesWithHeader) throws IOException {
  PrefixTreeEncodingState state = (PrefixTreeEncodingState) encodingCtx.getEncodingState();
  PrefixTreeEncoder builder = state.builder;
  builder.flush();
  EncoderFactory.checkIn(builder);
  // do i need to check this, or will it always be DataBlockEncoding.PREFIX_TREE?
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:PyroDB    文件:BufferedDataBlockEncoder.java   
@Override
public void endBlockEncoding(HFileBlockEncodingContext encodingCtx, DataOutputStream out,
    byte[] uncompressedBytesWithHeader) throws IOException {
  BufferedDataBlockEncodingState state = (BufferedDataBlockEncodingState) encodingCtx
      .getEncodingState();
  // Write the unencodedDataSizeWritten (with header size)
  Bytes.putInt(uncompressedBytesWithHeader, HConstants.HFILEBLOCK_HEADER_SIZE
      + DataBlockEncoding.ID_SIZE, state.unencodedDataSizeWritten
      );
  if (encodingCtx.getDataBlockEncoding() != DataBlockEncoding.NONE) {
    encodingCtx.postEncoding(BlockType.ENCODED_DATA);
  } else {
    encodingCtx.postEncoding(BlockType.DATA);
  }
}
项目:c5    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset, keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false,
          BlockType.BLOOM_CHUNK);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:c5    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:c5    文件:HFileBlockDefaultEncodingContext.java   
@Override
public void postEncoding(BlockType blockType)
    throws IOException {
  dataOut.flush();
  compressAfterEncodingWithBlockType(encodedStream.toByteArray(), blockType);
  this.blockType = blockType;
}
项目:HBase-Research    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset, keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false,
          BlockType.BLOOM_CHUNK);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:HBase-Research    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}
项目:HBase-Research    文件:TestSchemaMetrics.java   
@Test
public void testIncrements() {
  Random rand = new Random(23982737L);
  for (int i = 1; i <= 3; ++i) {
    final String tableName = "table" + i;
    for (int j = 1; j <= 3; ++j) {
      final String cfName = "cf" + j;
      SchemaMetrics sm = SchemaMetrics.getInstance(tableName, cfName);
      for (boolean isInBloom : BOOL_VALUES) {
        sm.updateBloomMetrics(isInBloom);
        checkMetrics();
      }

      for (BlockCategory blockCat : BlockType.BlockCategory.values()) {
        if (blockCat == BlockCategory.ALL_CATEGORIES) {
          continue;
        }

        for (boolean isCompaction : BOOL_VALUES) {
          sm.updateOnCacheHit(blockCat, isCompaction);
          checkMetrics();
          sm.updateOnCacheMiss(blockCat, isCompaction, rand.nextInt());
          checkMetrics();
        }

        for (boolean isEviction : BOOL_VALUES) {
          sm.updateOnCachePutOrEvict(blockCat, (isEviction ? -1 : 1)
              * rand.nextInt(1024 * 1024), isEviction);
        }
      }
    }
  }
}
项目:hbase-0.94.8-qod    文件:CompoundBloomFilter.java   
@Override
public boolean contains(byte[] key, int keyOffset, int keyLength,
    ByteBuffer bloom) {
  // We try to store the result in this variable so we can update stats for
  // testing, but when an error happens, we log a message and return.
  boolean result;

  int block = index.rootBlockContainingKey(key, keyOffset, keyLength);
  if (block < 0) {
    result = false; // This key is not in the file.
  } else {
    HFileBlock bloomBlock;
    try {
      // We cache the block and use a positional read.
      bloomBlock = reader.readBlock(index.getRootBlockOffset(block),
          index.getRootBlockDataSize(block), true, true, false,
          BlockType.BLOOM_CHUNK);
    } catch (IOException ex) {
      // The Bloom filter is broken, turn it off.
      throw new IllegalArgumentException(
          "Failed to load Bloom block for key "
              + Bytes.toStringBinary(key, keyOffset, keyLength), ex);
    }

    ByteBuffer bloomBuf = bloomBlock.getBufferReadOnly();
    result = ByteBloomFilter.contains(key, keyOffset, keyLength,
        bloomBuf.array(), bloomBuf.arrayOffset() + bloomBlock.headerSize(),
        bloomBlock.getUncompressedSizeWithoutHeader(), hash, hashCount);
  }

  if (numQueriesPerChunk != null && block >= 0) {
    // Update statistics. Only used in unit tests.
    ++numQueriesPerChunk[block];
    if (result)
      ++numPositivesPerChunk[block];
  }

  return result;
}
项目:hbase-0.94.8-qod    文件:StoreFile.java   
private void setBloomFilterFaulty(BlockType blockType) {
  if (blockType == BlockType.GENERAL_BLOOM_META) {
    setGeneralBloomFilterFaulty();
  } else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
    setDeleteFamilyBloomFilterFaulty();
  }
}