Java 类org.apache.hadoop.mapred.pipes.TestPipeApplication.FakeSplit 实例源码

项目:hadoop    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      StringUtils.escapeString(input1.getAbsolutePath()) + ","
          + StringUtils.escapeString(input2.getAbsolutePath()));
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:aliyun-oss-hadoop-fs    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      StringUtils.escapeString(input1.getAbsolutePath()) + ","
          + StringUtils.escapeString(input2.getAbsolutePath()));
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:big-c    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      StringUtils.escapeString(input1.getAbsolutePath()) + ","
          + StringUtils.escapeString(input2.getAbsolutePath()));
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      StringUtils.escapeString(input1.getAbsolutePath()) + ","
          + StringUtils.escapeString(input2.getAbsolutePath()));
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:hadoop-plus    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      input1.getAbsolutePath() + "," + input2.getAbsolutePath());
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:FlexMap    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      StringUtils.escapeString(input1.getAbsolutePath()) + ","
          + StringUtils.escapeString(input2.getAbsolutePath()));
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:hops    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      StringUtils.escapeString(input1.getAbsolutePath()) + ","
          + StringUtils.escapeString(input2.getAbsolutePath()));
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:hadoop-TCP    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      input1.getAbsolutePath() + "," + input2.getAbsolutePath());
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:hardfs    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      input1.getAbsolutePath() + "," + input2.getAbsolutePath());
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}
项目:hadoop-on-lustre2    文件:TestPipesNonJavaInputFormat.java   
/**
 *  test PipesNonJavaInputFormat
  */

@Test
public void testFormat() throws IOException {

  PipesNonJavaInputFormat inputFormat = new PipesNonJavaInputFormat();
  JobConf conf = new JobConf();

  Reporter reporter= mock(Reporter.class);
  RecordReader<FloatWritable, NullWritable> reader = inputFormat
      .getRecordReader(new FakeSplit(), conf, reporter);
  assertEquals(0.0f, reader.getProgress(), 0.001);

  // input and output files
  File input1 = new File(workSpace + File.separator + "input1");
  if (!input1.getParentFile().exists()) {
    Assert.assertTrue(input1.getParentFile().mkdirs());
  }

  if (!input1.exists()) {
    Assert.assertTrue(input1.createNewFile());
  }

  File input2 = new File(workSpace + File.separator + "input2");
  if (!input2.exists()) {
    Assert.assertTrue(input2.createNewFile());
  }
  // set data for splits
  conf.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR,
      input1.getAbsolutePath() + "," + input2.getAbsolutePath());
  InputSplit[] splits = inputFormat.getSplits(conf, 2);
  assertEquals(2, splits.length);

  PipesNonJavaInputFormat.PipesDummyRecordReader dummyRecordReader = new PipesNonJavaInputFormat.PipesDummyRecordReader(
      conf, splits[0]);
  // empty dummyRecordReader
  assertNull(dummyRecordReader.createKey());
  assertNull(dummyRecordReader.createValue());
  assertEquals(0, dummyRecordReader.getPos());
  assertEquals(0.0, dummyRecordReader.getProgress(), 0.001);
   // test method next
  assertTrue(dummyRecordReader.next(new FloatWritable(2.0f), NullWritable.get()));
  assertEquals(2.0, dummyRecordReader.getProgress(), 0.001);
  dummyRecordReader.close();
}