Java 类org.apache.hadoop.fs.FSInputChecker 实例源码

项目:RDFS    文件:BlockXCodingMerger.java   
/**
 * reads in the partial crc chunk and computes checksum of pre-existing data
 * in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff,
        int bytesPerChecksum) throws IOException {

    // find offset of the beginning of partial chunk.
    //
    int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
    int checksumSize = checksum.getChecksumSize();
    blkoff = blkoff - sizePartialChunk;

    // create an input stream from the block file
    // and read in partial crc chunk into temporary buffer
    byte[] buf = new byte[sizePartialChunk];
    byte[] crcbuf = new byte[checksumSize];
    FSDataset.BlockInputStreams instr = null;
    try {
        instr = datanode.data.getTmpInputStreams(namespaceId, block,
                blkoff, ckoff);
        IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

        // open meta file and read in crc value computer earlier
        IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
    } finally {
        IOUtils.closeStream(instr);
    }

    // compute crc of partial chunk from data read in the block file.
    partialCrc = new CRC32();
    partialCrc.update(buf, 0, sizePartialChunk);

    // paranoia! verify that the pre-computed crc matches what we
    // recalculated just now
    if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
        String msg = "Partial CRC " + partialCrc.getValue()
                + " does not match value computed the "
                + " last time file was closed "
                + FSInputChecker.checksum2long(crcbuf);
        throw new IOException(msg);
    }
}
项目:hadoop-EAR    文件:BlockInlineChecksumWriter.java   
/**
 * reads in the partial crc chunk and computes checksum of pre-existing data
 * in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, int bytesPerChecksum,
    DataChecksum checksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  long fileOff = BlockInlineChecksumReader.getPosFromBlockOffset(blkoff
      - sizePartialChunk, bytesPerChecksum, checksumSize);
  LOG.info("computePartialChunkCrc sizePartialChunk " + sizePartialChunk
      + " block " + block + " offset in block " + blkoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  // FileInputStream dataIn = null;

    /*
    RandomAccessFile blockInFile = new RandomAccessFile(blockFile, "r");
    dataIn = new FileInputStream(blockInFile.getFD());

    if (fileOff > 0) {
      blockInFile.seek(fileOff);
    }
    IOUtils.readFully(dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(dataIn, crcbuf, 0, crcbuf.length);
    */
  BlockDataFile.Reader blockReader = blockDataFile.getReader(datanode);
  blockReader.readFully(buf, 0, sizePartialChunk, fileOff, true);
  blockReader.readFully(crcbuf, 0, crcbuf.length, fileOff + sizePartialChunk,
      true);


  // compute crc of partial chunk from data read in the block file.
  Checksum partialCrc = new CRC32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue()
        + " does not match value computed the "
        + " last time file was closed "
        + FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  partialCrcInt = (int) partialCrc.getValue();
}
项目:hadoop-on-lustre    文件:BlockReceiver.java   
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new PureJavaCrc32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
项目:RDFS    文件:BlockReceiver.java   
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(namespaceId, block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new CRC32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
项目:hadoop-0.20    文件:BlockReceiver.java   
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new CRC32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
项目:hortonworks-extension    文件:BlockReceiver.java   
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new PureJavaCrc32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
项目:hortonworks-extension    文件:BlockReceiver.java   
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new PureJavaCrc32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}
项目:hadoop-gpu    文件:BlockReceiver.java   
/**
 * reads in the partial crc chunk and computes checksum
 * of pre-existing data in partial chunk.
 */
private void computePartialChunkCrc(long blkoff, long ckoff, 
                                    int bytesPerChecksum) throws IOException {

  // find offset of the beginning of partial chunk.
  //
  int sizePartialChunk = (int) (blkoff % bytesPerChecksum);
  int checksumSize = checksum.getChecksumSize();
  blkoff = blkoff - sizePartialChunk;
  LOG.info("computePartialChunkCrc sizePartialChunk " + 
            sizePartialChunk +
            " block " + block +
            " offset in block " + blkoff +
            " offset in metafile " + ckoff);

  // create an input stream from the block file
  // and read in partial crc chunk into temporary buffer
  //
  byte[] buf = new byte[sizePartialChunk];
  byte[] crcbuf = new byte[checksumSize];
  FSDataset.BlockInputStreams instr = null;
  try { 
    instr = datanode.data.getTmpInputStreams(block, blkoff, ckoff);
    IOUtils.readFully(instr.dataIn, buf, 0, sizePartialChunk);

    // open meta file and read in crc value computer earlier
    IOUtils.readFully(instr.checksumIn, crcbuf, 0, crcbuf.length);
  } finally {
    IOUtils.closeStream(instr);
  }

  // compute crc of partial chunk from data read in the block file.
  partialCrc = new CRC32();
  partialCrc.update(buf, 0, sizePartialChunk);
  LOG.info("Read in partial CRC chunk from disk for block " + block);

  // paranoia! verify that the pre-computed crc matches what we
  // recalculated just now
  if (partialCrc.getValue() != FSInputChecker.checksum2long(crcbuf)) {
    String msg = "Partial CRC " + partialCrc.getValue() +
                 " does not match value computed the " +
                 " last time file was closed " +
                 FSInputChecker.checksum2long(crcbuf);
    throw new IOException(msg);
  }
  //LOG.debug("Partial CRC matches 0x" + 
  //            Long.toHexString(partialCrc.getValue()));
}