Java 类org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder 实例源码

项目:ditb    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Store store : r.getStores()) {
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    ((HStore)store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
    ((HStore)entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:LCIndex-HBase-0.94.16    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:LCIndex-HBase-0.94.16    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:pbase    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:HIndex    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:IRIndex    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:IRIndex    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hbase    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
  for (HStore store : r.getStores()) {
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    ((HStore)store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
    ((HStore)entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:PyroDB    文件:TestMajorCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:c5    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:HBase-Research    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:HBase-Research    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hbase-0.94.8-qod    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:hbase-0.94.8-qod    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hbase-0.94.8-qod    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:hbase-0.94.8-qod    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:DominoHBase    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<HStore, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<HStore, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    HStore store = (HStore) pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(store, blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<HStore, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:DominoHBase    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:hindex    文件:TestCompaction.java   
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
    throws Exception {
  Map<Store, HFileDataBlockEncoder> replaceBlockCache =
      new HashMap<Store, HFileDataBlockEncoder>();
  for (Entry<byte[], Store> pair : r.getStores().entrySet()) {
    Store store = pair.getValue();
    HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
    replaceBlockCache.put(pair.getValue(), blockEncoder);
    final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
    final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
        inCache;
    store.setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(
        onDisk, inCache));
  }

  majorCompaction();

  // restore settings
  for (Entry<Store, HFileDataBlockEncoder> entry :
      replaceBlockCache.entrySet()) {
    entry.getKey().setDataBlockEncoderInTest(entry.getValue());
  }
}
项目:hindex    文件:EncodedSeekPerformanceTest.java   
/**
 * Command line interface:
 * @param args Takes one argument - file size.
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws IOException {
  if (args.length < 1) {
    printUsage();
    System.exit(-1);
  }

  Path path = new Path(args[0]);
  List<HFileDataBlockEncoder> encoders =
      new ArrayList<HFileDataBlockEncoder>();

  encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE));
  for (DataBlockEncoding encodingAlgo : DataBlockEncoding.values()) {
    encoders.add(new HFileDataBlockEncoderImpl(DataBlockEncoding.NONE,
        encodingAlgo));
  }

  EncodedSeekPerformanceTest utility = new EncodedSeekPerformanceTest();
  utility.runTests(path, encoders);

  System.exit(0);
}
项目:ditb    文件:LoadIncrementalHFiles.java   
private static boolean shouldCopyHFileMetaKey(byte[] key) {
  // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085
  if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {
    return false;
  }

  return !HFile.isReservedFileInfoKey(key);
}
项目:ditb    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:LCIndex-HBase-0.94.16    文件:StoreFile.java   
/**
 * Constructor, loads a reader and it's indices, etc. May allocate a substantial amount of ram
 * depending on the underlying files (10-20MB?).
 * @param fs The current file system to use.
 * @param p The path of the file.
 * @param blockcache <code>true</code> if the block cache is enabled.
 * @param conf The current configuration.
 * @param cacheConf The cache configuration and block cache reference.
 * @param cfBloomType The bloom type to use for this store file as specified by column family
 *          configuration. This may or may not be the same as the Bloom filter type actually
 *          present in the HFile, because column family configuration might change. If this is
 *          {@link BloomType#NONE}, the existing Bloom filter is ignored.
 * @param dataBlockEncoder data block encoding algorithm.
 * @throws IOException When opening the reader fails.
 */
public StoreFile(final FileSystem fs, final Path p, final Configuration conf,
    final CacheConfig cacheConf, final BloomType cfBloomType,
    final HFileDataBlockEncoder dataBlockEncoder) throws IOException {
  this.fs = fs;
  this.path = p;
  this.cacheConf = cacheConf;
  this.dataBlockEncoder =
      dataBlockEncoder == null ? NoOpDataBlockEncoder.INSTANCE : dataBlockEncoder;
  if (BloomFilterFactory.isGeneralBloomEnabled(conf)) {
    this.cfBloomType = cfBloomType;
  } else {
    LOG.info("Ignoring bloom filter check for file " + path + ": " + "cfBloomType=" + cfBloomType
        + " (disabled in config)");
    this.cfBloomType = BloomType.NONE;
  }

  // cache the modification time stamp of this store file
  FileStatus[] stats = FSUtils.listStatus(fs, p, null);
  if (stats != null && stats.length == 1) {
    this.modificationTimeStamp = stats[0].getModificationTime();
  } else {
    this.modificationTimeStamp = 0;
  }
  SchemaMetrics.configureGlobally(conf);
  initPossibleIndexesAndReference(fs, p, conf);
}
项目:LCIndex-HBase-0.94.16    文件:EncodedSeekPerformanceTest.java   
/**
 * @param path Path to the HFile which will be used.
 * @param encoders List of encoders which will be used for tests.
 * @throws IOException if there is a bug while reading from disk
 */
public void runTests(Path path, List<HFileDataBlockEncoder> encoders)
    throws IOException {
  List<KeyValue> seeks = prepareListOfTestSeeks(path);

  for (HFileDataBlockEncoder blockEncoder : encoders) {
    runTest(path, blockEncoder, seeks);
  }
}
项目:LCIndex-HBase-0.94.16    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:pbase    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:HIndex    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:IRIndex    文件:EncodedSeekPerformanceTest.java   
/**
 * @param path Path to the HFile which will be used.
 * @param encoders List of encoders which will be used for tests.
 * @throws IOException if there is a bug while reading from disk
 */
public void runTests(Path path, List<HFileDataBlockEncoder> encoders)
    throws IOException {
  List<KeyValue> seeks = prepareListOfTestSeeks(path);

  for (HFileDataBlockEncoder blockEncoder : encoders) {
    runTest(path, blockEncoder, seeks);
  }
}
项目:IRIndex    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:hbase    文件:LoadIncrementalHFiles.java   
private static boolean shouldCopyHFileMetaKey(byte[] key) {
  // skip encoding to keep hfile meta consistent with data block info, see HBASE-15085
  if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {
    return false;
  }

  return !HFile.isReservedFileInfoKey(key);
}
项目:hbase    文件:TestHStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
@Test
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  HStoreFile storeFile =
      new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
  storeFile.initReader();
  StoreFileReader reader = storeFile.getReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:PyroDB    文件:PFileBlockWriter.java   
public PFileBlockWriter(HFileDataBlockEncoder dataBlockEncoder, 
                        HFileContext fileContext) {
  super(dataBlockEncoder, fileContext);
  this.offsets = new int[ARRAY_INIT_SIZE];
  this.ptrNum = new byte[ARRAY_INIT_SIZE];
  this.kvs = new ArrayList<KeyValue>();
  // this encoder encodes the pointer array by offer apis to encode int 
  // and byte
  this.pDataBlockEncoder = PNoOpDataBlockEncoder.INSTANCE;
  //TODO: the following two variables are for offset calculation, figure
  //out more efficient ways to avoid another cache
  this.tmpBaosInMemory = new ByteArrayOutputStream();
  //LOG.info("Shen Li: in PFileBlockWriter constructor");
  //TODO
}
项目:PyroDB    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
      .withChecksumType(CKTYPE)
      .withBytesPerCheckSum(CKBYTES)
      .withDataBlockEncoding(dataBlockEncoderAlgo)
      .build();
  // Make a store file and write data to it.
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, this.fs)
          .withFilePath(path)
          .withMaxKeyCount(2000)
          .withFileContext(meta)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
    cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:c5    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HConstants.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:HBase-Research    文件:EncodedSeekPerformanceTest.java   
/**
 * @param path Path to the HFile which will be used.
 * @param encoders List of encoders which will be used for tests.
 * @throws IOException if there is a bug while reading from disk
 */
public void runTests(Path path, List<HFileDataBlockEncoder> encoders)
    throws IOException {
  List<KeyValue> seeks = prepareListOfTestSeeks(path);

  for (HFileDataBlockEncoder blockEncoder : encoders) {
    runTest(path, blockEncoder, seeks);
  }
}
项目:HBase-Research    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:hbase-0.94.8-qod    文件:EncodedSeekPerformanceTest.java   
/**
 * @param path Path to the HFile which will be used.
 * @param encoders List of encoders which will be used for tests.
 * @throws IOException if there is a bug while reading from disk
 */
public void runTests(Path path, List<HFileDataBlockEncoder> encoders)
    throws IOException {
  List<KeyValue> seeks = prepareListOfTestSeeks(path);

  for (HFileDataBlockEncoder blockEncoder : encoders) {
    runTest(path, blockEncoder, seeks);
  }
}
项目:hbase-0.94.8-qod    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}
项目:hbase-0.94.8-qod    文件:EncodedSeekPerformanceTest.java   
/**
 * @param path Path to the HFile which will be used.
 * @param encoders List of encoders which will be used for tests.
 * @throws IOException if there is a bug while reading from disk
 */
public void runTests(Path path, List<HFileDataBlockEncoder> encoders)
    throws IOException {
  List<KeyValue> seeks = prepareListOfTestSeeks(path);

  for (HFileDataBlockEncoder blockEncoder : encoders) {
    runTest(path, blockEncoder, seeks);
  }
}
项目:hbase-0.94.8-qod    文件:TestStoreFile.java   
/**
 * Check if data block encoding information is saved correctly in HFile's
 * file info.
 */
public void testDataBlockEncodingMetaData() throws IOException {
  // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
  Path dir = new Path(new Path(this.testDir, "7e0102"), "familyname");
  Path path = new Path(dir, "1234567890");

  DataBlockEncoding dataBlockEncoderAlgo =
      DataBlockEncoding.FAST_DIFF;
  HFileDataBlockEncoder dataBlockEncoder =
      new HFileDataBlockEncoderImpl(
          dataBlockEncoderAlgo,
          dataBlockEncoderAlgo);
  cacheConf = new CacheConfig(conf);
  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf, cacheConf, fs,
      HFile.DEFAULT_BLOCKSIZE)
          .withFilePath(path)
          .withDataBlockEncoder(dataBlockEncoder)
          .withMaxKeyCount(2000)
          .withChecksumType(CKTYPE)
          .withBytesPerChecksum(CKBYTES)
          .build();
  writer.close();

  StoreFile storeFile = new StoreFile(fs, writer.getPath(), conf,
      cacheConf, BloomType.NONE, dataBlockEncoder);
  StoreFile.Reader reader = storeFile.createReader();

  Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
  byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);

  assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
}