Java 类org.apache.hadoop.hdfs.server.common.GenerationStamp 实例源码

项目:hadoop    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:hadoop    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hadoop    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:aliyun-oss-hadoop-fs    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:big-c    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:big-c    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:big-c    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:FSNamesystem.java   
/**
 * Clear all loaded data
 */
void clear() {
  dir.reset();
  dtSecretManager.reset();
  generationStampV1.setCurrentValue(GenerationStamp.LAST_RESERVED_STAMP);
  generationStampV2.setCurrentValue(GenerationStamp.LAST_RESERVED_STAMP);
  blockIdGenerator.setCurrentValue(
      SequentialBlockIdGenerator.LAST_RESERVED_BLOCK_ID);
  generationStampV1Limit = GenerationStamp.GRANDFATHER_GENERATION_STAMP;
  leaseManager.removeAllLeases();
  inodeId.setCurrentValue(INodeId.LAST_RESERVED_ID);
  snapshotManager.clearSnapshottableDirs();
  cacheManager.clear();
  setImageLoaded(false);
  blockManager.clear();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestComputeInvalidateWork.java   
/**
 * Reformatted DataNodes will replace the original UUID in the
 * {@link DatanodeManager#datanodeMap}. This tests if block
 * invalidation work on the original DataNode can be skipped.
 */
@Test(timeout=120000)
public void testDatanodeReformat() throws Exception {
  namesystem.writeLock();
  try {
    Block block = new Block(0, 0, GenerationStamp.LAST_RESERVED_STAMP);
    bm.addToInvalidates(block, nodes[0]);
    // Change the datanode UUID to emulate a reformation
    nodes[0].setDatanodeUuidForTesting("fortesting");
    // Since UUID has changed, the invalidation work should be skipped
    assertEquals(0, bm.computeInvalidateWork(1));
    assertEquals(0, bm.getPendingDeletionBlocksCount());
  } finally {
    namesystem.writeUnlock();
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:hadoop-EAR    文件:TestGetBlocks.java   
public void testGenerationStampWildCard() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" +  seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10]; 
  for(int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for(int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0, GenerationStamp.WILDCARD_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hadoop-EAR    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = new DatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.FIRST_VALID_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  BlockCommand bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.getBlocks().length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.getBlocks().length, REMAINING_BLOCKS);
}
项目:hadoop-EAR    文件:TestDirectoryScanner.java   
private void verifyAddition(long blockId, long genStamp, long size) throws IOException{
  final DatanodeBlockInfo replicainfo;
  replicainfo = fds.volumeMap.get(nsid, new Block(blockId, 0, GenerationStamp.WILDCARD_STAMP));
  assertNotNull(replicainfo);

  // Added block has the same file as the one created by the test
  File file = new File(getBlockFile(blockId));
  assertEquals(file.getName(), fds.getBlockFile(nsid, new Block(blockId)).getName());

  // Generation stamp is same as that of created file
  LOG.info("------------------: " + genStamp + " : " +
      replicainfo.getBlock().getGenerationStamp());
  assertEquals(genStamp, replicainfo.getBlock().getGenerationStamp());

  // File size matches
  assertEquals(size, replicainfo.getBlock().getNumBytes());
}
项目:hadoop-plus    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:hadoop-plus    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hadoop-plus    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:FlexMap    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:FlexMap    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:FlexMap    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:hops    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (File aListdir : listdir) {
    String fileName = aListdir.getName();
    if (!fileName.startsWith(blockName)) {
      continue;
    }
    if (blockFile == aListdir) {
      continue;
    }
    String blkName = blockFile.getName();
    if (fileName.startsWith(blkName + "_")) {
      return Block.getGenerationStamp(aListdir.getName());
    }
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:hops    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (long blkid : blkids) {
    Block b =
        new Block(blkid, 0, GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkid, v.longValue());
  }
}
项目:hops    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<>(MAX_BLOCKS);
  for (int i = 0; i < MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.FIRST_VALID_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:hadoop-TCP    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:hadoop-TCP    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hadoop-TCP    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:hadoop-on-lustre    文件:TestGetBlocks.java   
public void testGenerationStampWildCard() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" +  seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10]; 
  for(int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for(int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0, GenerationStamp.WILDCARD_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hadoop-on-lustre    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = new DatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.FIRST_VALID_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  BlockCommand bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.getBlocks().length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.getBlocks().length, REMAINING_BLOCKS);
}
项目:hardfs    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:hardfs    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hardfs    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:hadoop-on-lustre2    文件:FsDatasetUtil.java   
/**
 * Find the meta-file for the specified block file
 * and then return the generation stamp from the name of the meta-file.
 */
static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  FsDatasetImpl.LOG.warn("Block " + blockFile + " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:hadoop-on-lustre2    文件:TestGetBlocks.java   
@Test
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" + seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10];
  for (int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for (int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0,
        GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:hadoop-on-lustre2    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
@Test
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = DFSTestUtil.getLocalDatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.LAST_RESERVED_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  Block[] bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.length, REMAINING_BLOCKS);
}
项目:cumulus    文件:FSEditLogLoader.java   
static private BlockInfo[] readBlocks(
    DataInputStream in,
    int logVersion,
    boolean isFileUnderConstruction,
    short replication) throws IOException {
  int numBlocks = in.readInt();
  BlockInfo[] blocks = new BlockInfo[numBlocks];
  Block blk = new Block();
  BlockTwo oldblk = new BlockTwo();
  for (int i = 0; i < numBlocks; i++) {
    if (logVersion <= -14) {
      blk.readFields(in);
    } else {
      oldblk.readFields(in);
      blk.set(oldblk.blkid, oldblk.len,
              GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    }
    if(isFileUnderConstruction && i == numBlocks-1)
      blocks[i] = new BlockInfoUnderConstruction(blk, replication);
    else
      blocks[i] = new BlockInfo(blk, replication);
  }
  return blocks;
}
项目:cumulus    文件:FSDataset.java   
/** Find the metadata file for the specified block file.
 * Return the generation stamp from the name of the metafile.
 */
private static long getGenerationStampFromFile(File[] listdir, File blockFile) {
  String blockName = blockFile.getName();
  for (int j = 0; j < listdir.length; j++) {
    String path = listdir[j].getName();
    if (!path.startsWith(blockName)) {
      continue;
    }
    if (blockFile == listdir[j]) {
      continue;
    }
    return Block.getGenerationStamp(listdir[j].getName());
  }
  DataNode.LOG.warn("Block " + blockFile + 
                    " does not have a metafile!");
  return GenerationStamp.GRANDFATHER_GENERATION_STAMP;
}
项目:cumulus    文件:TestGetBlocks.java   
public void testBlockKey() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" +  seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10]; 
  for(int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for(int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0, GenerationStamp.GRANDFATHER_GENERATION_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}
项目:cumulus    文件:TestDatanodeDescriptor.java   
/**
 * Test that getInvalidateBlocks observes the maxlimit.
 */
public void testGetInvalidateBlocks() throws Exception {
  final int MAX_BLOCKS = 10;
  final int REMAINING_BLOCKS = 2;
  final int MAX_LIMIT = MAX_BLOCKS - REMAINING_BLOCKS;

  DatanodeDescriptor dd = new DatanodeDescriptor();
  ArrayList<Block> blockList = new ArrayList<Block>(MAX_BLOCKS);
  for (int i=0; i<MAX_BLOCKS; i++) {
    blockList.add(new Block(i, 0, GenerationStamp.FIRST_VALID_STAMP));
  }
  dd.addBlocksToBeInvalidated(blockList);
  BlockCommand bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.getBlocks().length, MAX_LIMIT);
  bc = dd.getInvalidateBlocks(MAX_LIMIT);
  assertEquals(bc.getBlocks().length, REMAINING_BLOCKS);
}
项目:RDFS    文件:TestGetBlocks.java   
public void testGenerationStampWildCard() {
  Map<Block, Long> map = new HashMap<Block, Long>();
  final Random RAN = new Random();
  final long seed = RAN.nextLong();
  System.out.println("seed=" +  seed);
  RAN.setSeed(seed);

  long[] blkids = new long[10]; 
  for(int i = 0; i < blkids.length; i++) {
    blkids[i] = 1000L + RAN.nextInt(100000);
    map.put(new Block(blkids[i], 0, blkids[i]), blkids[i]);
  }
  System.out.println("map=" + map.toString().replace(",", "\n  "));

  for(int i = 0; i < blkids.length; i++) {
    Block b = new Block(blkids[i], 0, GenerationStamp.WILDCARD_STAMP);
    Long v = map.get(b);
    System.out.println(b + " => " + v);
    assertEquals(blkids[i], v.longValue());
  }
}