Java 类org.apache.hadoop.fs.slive.DataWriter.GenerateOutput 实例源码

项目:hadoop    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:aliyun-oss-hadoop-fs    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:big-c    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-plus    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:FlexMap    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hops    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-TCP    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hardfs    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:hadoop-on-lustre2    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}
项目:mapreduce-fork    文件:TestSlive.java   
@Test
public void testDataWriting() throws Exception {
  long byteAm = 100;
  File fn = getTestFile();
  DataWriter writer = new DataWriter(rnd);
  FileOutputStream fs = new FileOutputStream(fn);
  GenerateOutput ostat = writer.writeSegment(byteAm, fs);
  LOG.info(ostat);
  fs.close();
  assertTrue(ostat.getBytesWritten() == byteAm);
  DataVerifier vf = new DataVerifier();
  FileInputStream fin = new FileInputStream(fn);
  VerifyOutput vfout = vf.verifyFile(byteAm, new DataInputStream(fin));
  LOG.info(vfout);
  fin.close();
  assertEquals(vfout.getBytesRead(), byteAm);
  assertTrue(vfout.getChunksDifferent() == 0);
}