Java 类org.apache.hadoop.hbase.io.hfile.HFilePrettyPrinter 实例源码

项目:ditb    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReader().getHFileReader().getPath();
      Path p2 = s2.getReader().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReader();

  new HFilePrettyPrinter(conf).run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<Cell> allResults = new ArrayList<Cell>();

  { // Limit the scope of results.
    List<Cell> results = new ArrayList<Cell>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (Cell kv : allResults) {
    String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:LCIndex-HBase-0.94.16    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  Store store = new Store(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:LCIndex-HBase-0.94.16    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<KeyValue> allResults = new ArrayList<KeyValue>();

  { // Limit the scope of results.
    List<KeyValue> results = new ArrayList<KeyValue>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (KeyValue kv : allResults) {
    String qual = Bytes.toString(kv.getQualifier());
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:pbase    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReader().getHFileReader().getPath();
      Path p2 = s2.getReader().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReader();

  new HFilePrettyPrinter(conf).run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<Cell> allResults = new ArrayList<Cell>();

  { // Limit the scope of results.
    List<Cell> results = new ArrayList<Cell>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (Cell kv : allResults) {
    String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:HIndex    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd, null);
  HStore store = new HStore(region, columnDescriptor, conf);

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false, region.getReadpoint(IsolationLevel.READ_COMMITTED));

  StoreFile.Writer writer = store.createWriterInTmp(maxKeyCount, compression, false, true, false);

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:HIndex    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<Cell> allResults = new ArrayList<Cell>();

  { // Limit the scope of results.
    List<Cell> results = new ArrayList<Cell>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (Cell kv : allResults) {
    String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:IRIndex    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  Store store = new Store(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:IRIndex    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<KeyValue> allResults = new ArrayList<KeyValue>();

  { // Limit the scope of results.
    List<KeyValue> results = new ArrayList<KeyValue>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (KeyValue kv : allResults) {
    String qual = Bytes.toString(kv.getQualifier());
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:hbase    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReader().getHFileReader().getPath();
      Path p2 = s2.getReader().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFileReader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReader();

  new HFilePrettyPrinter(conf).run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<Cell> allResults = new ArrayList<>();

  { // Limit the scope of results.
    List<Cell> results = new ArrayList<>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<>();
  for (Cell kv : allResults) {
    String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:PyroDB    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd, null);
  HStore store = new HStore(region, columnDescriptor, conf);

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false, region.getReadpoint(IsolationLevel.READ_COMMITTED));

  StoreFile.Writer writer = store.createWriterInTmp(maxKeyCount, compression, false, true, false);

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:PyroDB    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<Cell> allResults = new ArrayList<Cell>();

  { // Limit the scope of results.
    List<Cell> results = new ArrayList<Cell>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (Cell kv : allResults) {
    String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:c5    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd, null);
  HStore store = new HStore(region, columnDescriptor, conf);

  StoreFile.Writer writer = store.createWriterInTmp(maxKeyCount, compression, false, true);

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:c5    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<Cell> allResults = new ArrayList<Cell>();

  { // Limit the scope of results.
    List<Cell> results = new ArrayList<Cell>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (Cell kv : allResults) {
    String qual = Bytes.toString(CellUtil.cloneQualifier(kv));
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:HBase-Research    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  Store store = new Store(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:HBase-Research    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<KeyValue> allResults = new ArrayList<KeyValue>();

  { // Limit the scope of results.
    List<KeyValue> results = new ArrayList<KeyValue>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (KeyValue kv : allResults) {
    String qual = Bytes.toString(kv.getQualifier());
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:hbase-0.94.8-qod    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  Store store = new Store(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:hbase-0.94.8-qod    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<KeyValue> allResults = new ArrayList<KeyValue>();

  { // Limit the scope of results.
    List<KeyValue> results = new ArrayList<KeyValue>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (KeyValue kv : allResults) {
    String qual = Bytes.toString(kv.getQualifier());
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:hbase-0.94.8-qod    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  Store store = new Store(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:hbase-0.94.8-qod    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<KeyValue> allResults = new ArrayList<KeyValue>();

  { // Limit the scope of results.
    List<KeyValue> results = new ArrayList<KeyValue>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (KeyValue kv : allResults) {
    String qual = Bytes.toString(kv.getQualifier());
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:DominoHBase    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  HStore store = new HStore(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:DominoHBase    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<KeyValue> allResults = new ArrayList<KeyValue>();

  { // Limit the scope of results.
    List<KeyValue> results = new ArrayList<KeyValue>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (KeyValue kv : allResults) {
    String qual = Bytes.toString(kv.getQualifier());
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}
项目:hindex    文件:HFileReadWriteTest.java   
public void runMergeWorkload() throws IOException {
  long maxKeyCount = prepareForMerge();

  List<StoreFileScanner> scanners =
      StoreFileScanner.getScannersForStoreFiles(inputStoreFiles, false,
          false);

  HColumnDescriptor columnDescriptor = new HColumnDescriptor(
      HFileReadWriteTest.class.getSimpleName());
  columnDescriptor.setBlocksize(blockSize);
  columnDescriptor.setBloomFilterType(bloomType);
  columnDescriptor.setCompressionType(compression);
  columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
  HRegionInfo regionInfo = new HRegionInfo();
  HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
  HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
      null);
  Store store = new Store(outputDir, region, columnDescriptor, fs, conf);

  StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
      new CacheConfig(conf), fs, blockSize)
          .withOutputDir(outputDir)
          .withCompression(compression)
          .withDataBlockEncoder(dataBlockEncoder)
          .withBloomType(bloomType)
          .withMaxKeyCount(maxKeyCount)
          .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
          .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
          .build();

  StatisticsPrinter statsPrinter = new StatisticsPrinter();
  statsPrinter.startThread();

  try {
    performMerge(scanners, store, writer);
    writer.close();
  } finally {
    statsPrinter.requestStop();
  }

  Path resultPath = writer.getPath();

  resultPath = tryUsingSimpleOutputPath(resultPath);

  long fileSize = fs.getFileStatus(resultPath).getLen();
  LOG.info("Created " + resultPath + ", size " + fileSize);

  System.out.println();
  System.out.println("HFile information for " + resultPath);
  System.out.println();

  HFilePrettyPrinter hfpp = new HFilePrettyPrinter();
  hfpp.run(new String[] { "-m", "-f", resultPath.toString() });
}
项目:hindex    文件:TestScanWithBloomError.java   
private void scanColSet(int[] colSet, int[] expectedResultCols)
    throws IOException {
  LOG.info("Scanning column set: " + Arrays.toString(colSet));
  Scan scan = new Scan(ROW_BYTES, ROW_BYTES);
  addColumnSetToScan(scan, colSet);
  RegionScannerImpl scanner = (RegionScannerImpl) region.getScanner(scan);
  KeyValueHeap storeHeap = scanner.getStoreHeapForTesting();
  assertEquals(0, storeHeap.getHeap().size());
  StoreScanner storeScanner =
      (StoreScanner) storeHeap.getCurrentForTesting();
  @SuppressWarnings({ "unchecked", "rawtypes" })
  List<StoreFileScanner> scanners = (List<StoreFileScanner>)
      (List) storeScanner.getAllScannersForTesting();

  // Sort scanners by their HFile's modification time.
  Collections.sort(scanners, new Comparator<StoreFileScanner>() {
    @Override
    public int compare(StoreFileScanner s1, StoreFileScanner s2) {
      Path p1 = s1.getReaderForTesting().getHFileReader().getPath();
      Path p2 = s2.getReaderForTesting().getHFileReader().getPath();
      long t1, t2;
      try {
        t1 = fs.getFileStatus(p1).getModificationTime();
        t2 = fs.getFileStatus(p2).getModificationTime();
      } catch (IOException ex) {
        throw new RuntimeException(ex);
      }
      return t1 < t2 ? -1 : t1 == t2 ? 1 : 0;
    }
  });

  StoreFile.Reader lastStoreFileReader = null;
  for (StoreFileScanner sfScanner : scanners)
    lastStoreFileReader = sfScanner.getReaderForTesting();

  new HFilePrettyPrinter().run(new String[]{ "-m", "-p", "-f",
      lastStoreFileReader.getHFileReader().getPath().toString()});

  // Disable Bloom filter for the last store file. The disabled Bloom filter
  // will always return "true".
  LOG.info("Disabling Bloom filter for: "
      + lastStoreFileReader.getHFileReader().getName());
  lastStoreFileReader.disableBloomFilterForTesting();

  List<KeyValue> allResults = new ArrayList<KeyValue>();

  { // Limit the scope of results.
    List<KeyValue> results = new ArrayList<KeyValue>();
    while (scanner.next(results) || results.size() > 0) {
      allResults.addAll(results);
      results.clear();
    }
  }

  List<Integer> actualIds = new ArrayList<Integer>();
  for (KeyValue kv : allResults) {
    String qual = Bytes.toString(kv.getQualifier());
    assertTrue(qual.startsWith(QUALIFIER_PREFIX));
    actualIds.add(Integer.valueOf(qual.substring(
        QUALIFIER_PREFIX.length())));
  }
  List<Integer> expectedIds = new ArrayList<Integer>();
  for (int expectedId : expectedResultCols)
    expectedIds.add(expectedId);

  LOG.info("Column ids returned: " + actualIds + ", expected: "
      + expectedIds);
  assertEquals(expectedIds.toString(), actualIds.toString());
}