Java 类org.apache.hadoop.mapred.Task.TaskReporter 实例源码

项目:hadoop    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hadoop    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:aliyun-oss-hadoop-fs    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:aliyun-oss-hadoop-fs    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:big-c    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:big-c    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set("mapred.combine.recordsBeforeProgress", "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hadoop-EAR    文件:MapSpillSortCounters.java   
public MapSpillSortCounters(TaskReporter taskReporter) {
  this.reporter = taskReporter;
  numSpillsVal = 0;
  mapSpillCPUVal = 0;
  mapSpillWallClockVal = 0;
  mapSpillBytesVal = 0;
  mapMemSortCPUVal = 0;
  mapMemSortWallClockVal = 0;
  mapMergeCPUVal = 0;
  mapMergeWallClockVal = 0;
  mapSpillSingleRecordNum = 0;

  mapSpillJVMCPUVal = 0;
  mapMemSortJVMCPUVal = 0;
  mapMergeJVMCPUVal = 0;
}
项目:hadoop-plus    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hadoop-plus    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:FlexMap    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:FlexMap    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:FlexMap    文件:MultiMapTask.java   
NewMultiTrackingRecordReader(org.apache.hadoop.mapreduce.InputSplit splits[],
    org.apache.hadoop.mapreduce.InputFormat<K, V> inputFormat,
    TaskReporter reporter,
    org.apache.hadoop.mapreduce.TaskAttemptContext taskContext)
    throws InterruptedException, IOException {
  this.reporter = reporter;
  this.inputRecordCounter = reporter
      .getCounter(TaskCounter.MAP_INPUT_RECORDS);
  this.fileInputByteCounter = reporter
      .getCounter(FileInputFormatCounter.BYTES_READ);
  this.context=taskContext;
  this.splits=splits;
  this.inputFormat=inputFormat;
  this.splitsLength=0;       

}
项目:hops    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hops    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hadoop-TCP    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hadoop-TCP    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hadoop-on-lustre    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set("mapred.combine.recordsBeforeProgress", "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hadoop-on-lustre    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hardfs    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hardfs    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hadoop-on-lustre2    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set(MRJobConfig.COMBINE_RECORDS_BEFORE_PROGRESS, "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hadoop-on-lustre2    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);

  @SuppressWarnings("unchecked")
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hanoi-hadoop-2.0.0-cdh    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set("mapred.combine.recordsBeforeProgress", "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hanoi-hadoop-2.0.0-cdh    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:mammoth    文件:MemoryElement.java   
public void initialize(JobConf job, TaskReporter reporter, TaskAttemptID taskId) throws ClassNotFoundException, IOException{
    this.reporter = reporter;     
  this.taskId = taskId;
  mapOutputByteCounter = reporter.getCounter(MAP_OUTPUT_BYTES);
  mapOutputRecordCounter = reporter.getCounter(MAP_OUTPUT_RECORDS);    
  this.job = job;

  sorter = ReflectionUtils.newInstance(
      job.getClass("map.sort.class", QuickSort.class, IndexedSorter.class), job);
  partitions = job.getNumReduceTasks();
  if (partitionInd == null || partitions * 2 != partitionInd.length) {
    partitionInd = new int[partitions * 2];
  }
  comparator = job.getOutputKeyComparator();
  keyClass = (Class)job.getMapOutputKeyClass();
  valClass = (Class)job.getMapOutputValueClass();
  serializationFactory = new SerializationFactory(job);
  keySerializer = serializationFactory.getSerializer(keyClass);
  keySerializer.open(bb);
  valSerializer = serializationFactory.getSerializer(valClass);
  valSerializer.open(bb);
  reset();
}
项目:mammoth    文件:DefaultJvmMemoryManager.java   
MapSpiller(JobConf job,TaskAttemptID tid, TaskReporter rep) throws ClassNotFoundException {
    reporter = rep;
    conf = job;
    this.taskId = tid;
    mapOutputFile.setConf(conf);
    mapOutputByteCounter = reporter.getCounter(MAP_OUTPUT_BYTES);
    Counters.Counter combineInputCounter = 
       reporter.getCounter(COMBINE_INPUT_RECORDS);
     combineOutputCounter = reporter.getCounter(COMBINE_OUTPUT_RECORDS);
     fileOutputByteCounter = reporter.getCounter(MAP_OUTPUT_MATERIALIZED_BYTES);
// combiner
   combinerRunner = CombinerRunner.create(conf, taskId, 
                                          combineInputCounter,
                                          reporter, null);
   if (combinerRunner != null) {
     combineCollector= new CombineOutputCollector(combineOutputCounter, reporter, conf);
   } else {
     combineCollector = null;
   }            
    indexCacheList = new ArrayList<SpillRecord>();
    spilledRecordsCounter = reporter.getCounter(Counter.SPILLED_RECORDS);
}
项目:hortonworks-extension    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set("mapred.combine.recordsBeforeProgress", "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hortonworks-extension    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hortonworks-extension    文件:TestCombineOutputCollector.java   
@Test
public void testCustomCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();
  conf.set("mapred.combine.recordsBeforeProgress", "2");

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 1);
  verify(mockTaskReporter, never()).progress();

  coc.collect("dummy", 2);
  verify(mockTaskReporter, times(1)).progress();
}
项目:hortonworks-extension    文件:TestCombineOutputCollector.java   
@Test
public void testDefaultCollect() throws Throwable {
  //mock creation
  TaskReporter mockTaskReporter = mock(TaskReporter.class);
  Counters.Counter outCounter = new Counters.Counter();
  Writer<String, Integer> mockWriter = mock(Writer.class);

  Configuration conf = new Configuration();

  coc = new CombineOutputCollector<String, Integer>(outCounter, mockTaskReporter, conf);
  coc.setWriter(mockWriter);
  verify(mockTaskReporter, never()).progress();

  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(1)).progress();
  for(int i = 0; i < Task.DEFAULT_MR_COMBINE_RECORDS_BEFORE_PROGRESS; i++) {
    coc.collect("dummy", i);
  }
  verify(mockTaskReporter, times(2)).progress();
}
项目:hadoop    文件:TestCombineFileRecordReader.java   
@SuppressWarnings("unchecked")
@Test
public void testProgressIsReportedIfInputASeriesOfEmptyFiles() throws IOException, InterruptedException {
  JobConf conf = new JobConf();
  Path[] paths = new Path[3];
  File[] files = new File[3];
  long[] fileLength = new long[3];

  try {
    for(int i=0;i<3;i++){
      File dir = new File(outDir.toString());
      dir.mkdir();
      files[i] = new File(dir,"testfile"+i);
      FileWriter fileWriter = new FileWriter(files[i]);
      fileWriter.flush();
      fileWriter.close();
      fileLength[i] = i;
      paths[i] = new Path(outDir+"/testfile"+i);
    }

    CombineFileSplit combineFileSplit = new CombineFileSplit(paths, fileLength);
    TaskAttemptID taskAttemptID = Mockito.mock(TaskAttemptID.class);
    TaskReporter reporter = Mockito.mock(TaskReporter.class);
    TaskAttemptContextImpl taskAttemptContext =
      new TaskAttemptContextImpl(conf, taskAttemptID,reporter);

    CombineFileRecordReader cfrr = new CombineFileRecordReader(combineFileSplit,
      taskAttemptContext, TextRecordReaderWrapper.class);

    cfrr.initialize(combineFileSplit,taskAttemptContext);

    verify(reporter).progress();
    Assert.assertFalse(cfrr.nextKeyValue());
    verify(reporter, times(3)).progress();
  } finally {
    FileUtil.fullyDelete(new File(outDir.toString()));
  }
}
项目:aliyun-oss-hadoop-fs    文件:TaskContext.java   
public TaskContext(JobConf conf, Class<?> iKClass, Class<?> iVClass,
    Class<?> oKClass, Class<?> oVClass, TaskReporter reporter,
    TaskAttemptID id) {
  this.conf = conf;
  this.iKClass = iKClass;
  this.iVClass = iVClass;
  this.oKClass = oKClass;
  this.oVClass = oVClass;
  this.reporter = reporter;
  this.taskAttemptID = id;
}
项目:aliyun-oss-hadoop-fs    文件:NativeRuntime.java   
/**
 * Get the status report from native space
 */
public static void reportStatus(TaskReporter reporter) throws IOException {
  assertNativeLibraryLoaded();
  synchronized (reporter) {
    final byte[] statusBytes = JNIUpdateStatus();
    final DataInputBuffer ib = new DataInputBuffer();
    ib.reset(statusBytes, statusBytes.length);
    final FloatWritable progress = new FloatWritable();
    progress.readFields(ib);
    reporter.setProgress(progress.get());
    final Text status = new Text();
    status.readFields(ib);
    if (status.getLength() > 0) {
      reporter.setStatus(status.toString());
    }
    final IntWritable numCounters = new IntWritable();
    numCounters.readFields(ib);
    if (numCounters.get() == 0) {
      return;
    }
    final Text group = new Text();
    final Text name = new Text();
    final LongWritable amount = new LongWritable();
    for (int i = 0; i < numCounters.get(); i++) {
      group.readFields(ib);
      name.readFields(ib);
      amount.readFields(ib);
      reporter.incrCounter(group.toString(), name.toString(), amount.get());
    }
  }
}
项目:aliyun-oss-hadoop-fs    文件:TestCombineFileRecordReader.java   
@SuppressWarnings("unchecked")
@Test
public void testProgressIsReportedIfInputASeriesOfEmptyFiles() throws IOException, InterruptedException {
  JobConf conf = new JobConf();
  Path[] paths = new Path[3];
  File[] files = new File[3];
  long[] fileLength = new long[3];

  try {
    for(int i=0;i<3;i++){
      File dir = new File(outDir.toString());
      dir.mkdir();
      files[i] = new File(dir,"testfile"+i);
      FileWriter fileWriter = new FileWriter(files[i]);
      fileWriter.flush();
      fileWriter.close();
      fileLength[i] = i;
      paths[i] = new Path(outDir+"/testfile"+i);
    }

    CombineFileSplit combineFileSplit = new CombineFileSplit(paths, fileLength);
    TaskAttemptID taskAttemptID = Mockito.mock(TaskAttemptID.class);
    TaskReporter reporter = Mockito.mock(TaskReporter.class);
    TaskAttemptContextImpl taskAttemptContext =
      new TaskAttemptContextImpl(conf, taskAttemptID,reporter);

    CombineFileRecordReader cfrr = new CombineFileRecordReader(combineFileSplit,
      taskAttemptContext, TextRecordReaderWrapper.class);

    cfrr.initialize(combineFileSplit,taskAttemptContext);

    verify(reporter).progress();
    Assert.assertFalse(cfrr.nextKeyValue());
    verify(reporter, times(3)).progress();
  } finally {
    FileUtil.fullyDelete(new File(outDir.toString()));
  }
}
项目:big-c    文件:TestCombineFileRecordReader.java   
@SuppressWarnings("unchecked")
@Test
public void testProgressIsReportedIfInputASeriesOfEmptyFiles() throws IOException, InterruptedException {
  JobConf conf = new JobConf();
  Path[] paths = new Path[3];
  File[] files = new File[3];
  long[] fileLength = new long[3];

  try {
    for(int i=0;i<3;i++){
      File dir = new File(outDir.toString());
      dir.mkdir();
      files[i] = new File(dir,"testfile"+i);
      FileWriter fileWriter = new FileWriter(files[i]);
      fileWriter.flush();
      fileWriter.close();
      fileLength[i] = i;
      paths[i] = new Path(outDir+"/testfile"+i);
    }

    CombineFileSplit combineFileSplit = new CombineFileSplit(paths, fileLength);
    TaskAttemptID taskAttemptID = Mockito.mock(TaskAttemptID.class);
    TaskReporter reporter = Mockito.mock(TaskReporter.class);
    TaskAttemptContextImpl taskAttemptContext =
      new TaskAttemptContextImpl(conf, taskAttemptID,reporter);

    CombineFileRecordReader cfrr = new CombineFileRecordReader(combineFileSplit,
      taskAttemptContext, TextRecordReaderWrapper.class);

    cfrr.initialize(combineFileSplit,taskAttemptContext);

    verify(reporter).progress();
    Assert.assertFalse(cfrr.nextKeyValue());
    verify(reporter, times(3)).progress();
  } finally {
    FileUtil.fullyDelete(new File(outDir.toString()));
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TaskContext.java   
public TaskContext(JobConf conf, Class<?> iKClass, Class<?> iVClass,
    Class<?> oKClass, Class<?> oVClass, TaskReporter reporter,
    TaskAttemptID id) {
  this.conf = conf;
  this.iKClass = iKClass;
  this.iVClass = iVClass;
  this.oKClass = oKClass;
  this.oVClass = oVClass;
  this.reporter = reporter;
  this.taskAttemptID = id;
}