Java 类org.apache.hadoop.hbase.io.hfile.HFileBlockIndex 实例源码

项目:ditb    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:LCIndex-HBase-0.94.16    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:pbase    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:HIndex    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:IRIndex    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:RStore    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:PyroDB    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:PyroDB    文件:PFileWriter.java   
@Override
protected void finishInit(final Configuration conf) {
  if (null != fsBlockWriter)
    throw new IllegalStateException("finishInit called twice");

  fsBlockWriter = new PFileBlockWriter(blockEncoder, hFileContext);

  boolean cacheIndexesOnWrite = cacheConf.shouldCacheIndexesOnWrite();
  dataBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter(fsBlockWriter,
      cacheIndexesOnWrite ? cacheConf.getBlockCache(): null,
      cacheIndexesOnWrite ? name : null);
  dataBlockIndexWriter.setMaxChunkSize(
      HFileBlockIndex.getMaxChunkSize(conf));
  inlineBlockWriters.add(dataBlockIndexWriter);

  // Meta data block index writer
  metaBlockIndexWriter = new HFileBlockIndex.BlockIndexWriter();
  if (LOG.isTraceEnabled()) LOG.trace("Initialized with " + cacheConf);
}
项目:c5    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:HBase-Research    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:hbase-0.94.8-qod    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:hbase-0.94.8-qod    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:DominoHBase    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:hindex    文件:CompoundBloomFilter.java   
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  comparator = FixedFileTrailer.createComparator(
      Bytes.toString(Bytes.readByteArray(meta)));

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }

  index = new HFileBlockIndex.BlockIndexReader(comparator, 1);
  index.readRootIndex(meta, numChunks);
}
项目:spliceengine    文件:HRegionUtil.java   
private static int addStoreFileCutpoints(List<byte[]> cutpoints, HFile.Reader fileReader, long storeFileInBytes, int carry, Pair<byte[], byte[]> range, int splitBlockSize) throws IOException {
    HFileBlockIndex.BlockIndexReader indexReader = fileReader.getDataBlockIndexReader();
    int size = indexReader.getRootBlockCount();
    int levels = fileReader.getTrailer().getNumDataIndexLevels();
    if (levels == 1) {
        int incrementalSize = (int) (size > 0 ? storeFileInBytes / (float) size : storeFileInBytes);
        int sizeCounter = 0;
        for (int i = 0; i < size; ++i) {
            if (sizeCounter >= splitBlockSize) {
                sizeCounter = 0;
                KeyValue tentative = KeyValue.createKeyValueFromKey(indexReader.getRootBlockKey(i));
                if (CellUtils.isKeyValueInRange(tentative, range)) {
                    cutpoints.add(tentative.getRow());
                }
            }
            sizeCounter += incrementalSize;
        }
     return sizeCounter;
    } else {
        for (int i = 0; i < size; ++i) {
            HFileBlock block = fileReader.readBlock(
                    indexReader.getRootBlockOffset(i),
                    indexReader.getRootBlockDataSize(i),
                    true, true, false, true,
                    levels == 2 ? BlockType.LEAF_INDEX : BlockType.INTERMEDIATE_INDEX,
                    fileReader.getDataBlockEncoding());
            carry = addIndexCutpoints(fileReader, block.getBufferWithoutHeader(), levels - 1,  cutpoints, storeFileInBytes / size, carry, range, splitBlockSize);
        }
     return carry;
    }
}