Java 类org.apache.hadoop.fs.slive.DataVerifier.VerifyOutput 实例源码

项目:hadoop    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:aliyun-oss-hadoop-fs    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:aliyun-oss-hadoop-fs    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:big-c    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:big-c    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:hadoop-plus    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-plus    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:FlexMap    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:FlexMap    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:hops    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hops    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:hadoop-TCP    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-TCP    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:hardfs    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hardfs    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:hadoop-on-lustre2    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-on-lustre2    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}
项目:mapreduce-fork    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:mapreduce-fork    文件:TestSlive.java   
@Test
public void testBadChunks() throws Exception {
  File fn = getTestFile();
  int byteAm = 10000;
  FileOutputStream fout = new FileOutputStream(fn);
  byte[] bytes = new byte[byteAm];
  rnd.nextBytes(bytes);
  fout.write(bytes);
  fout.close();
  // attempt to read it
  DataVerifier vf = new DataVerifier();
  VerifyOutput vout = new VerifyOutput(0, 0, 0, 0);
  DataInputStream in = null;
  try {
    in = new DataInputStream(new FileInputStream(fn));
    vout = vf.verifyFile(byteAm, in);
  } catch (Exception e) {

  } finally {
    if(in != null) in.close();
  }
  assertTrue(vout.getChunksSame() == 0);
}