Java 类org.apache.hadoop.mapred.IFile 实例源码

项目:hadoop    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));

  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
      fs.getFileStatus(path).getLen(), null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:aliyun-oss-hadoop-fs    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));

  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
      fs.getFileStatus(path).getLen(), null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:big-c    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));

  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
      fs.getFileStatus(path).getLen(), null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));

  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
      fs.getFileStatus(path).getLen(), null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hadoop-plus    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, fs,
      path, null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:FlexMap    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));

  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
      fs.getFileStatus(path).getLen(), null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hops    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path));

  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in,
      fs.getFileStatus(path).getLen(), null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hadoop-TCP    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, fs,
      path, null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hardfs    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, fs,
      path, null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hadoop-on-lustre2    文件:TestMerger.java   
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path,
    List<String> keys, List<String> values) throws IOException {
  IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, fs,
      path, null, null);
  DataInputBuffer keyBuff = new DataInputBuffer();
  DataInputBuffer valueBuff = new DataInputBuffer();
  Text key = new Text();
  Text value = new Text();
  while (reader.nextRawKey(keyBuff)) {
    key.readFields(keyBuff);
    keys.add(key.toString());
    reader.nextRawValue(valueBuff);
    value.readFields(valueBuff);
    values.add(value.toString());
  }
}
项目:hadoop    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:hadoop    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:aliyun-oss-hadoop-fs    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:aliyun-oss-hadoop-fs    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:big-c    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:big-c    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:hadoop-plus    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:hadoop-plus    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:FlexMap    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:FlexMap    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:hops    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:hops    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:hadoop-TCP    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:hadoop-TCP    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:hardfs    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:hardfs    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:hadoop-on-lustre2    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}
项目:hadoop-on-lustre2    文件:TestMerger.java   
private byte[] writeMapOutput(Configuration conf, Map<String, String> keysToValues)
    throws IOException {
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  FSDataOutputStream fsdos = new FSDataOutputStream(baos, null);
  IFile.Writer<Text, Text> writer = new IFile.Writer<Text, Text>(conf, fsdos,
      Text.class, Text.class, null, null);
  for (String key : keysToValues.keySet()) {
    String value = keysToValues.get(key);
    writer.append(new Text(key), new Text(value));
  }
  writer.close();
  return baos.toByteArray();
}
项目:mapreduce-fork    文件:InMemoryWriter.java   
public void close() throws IOException {
  // Write EOF_MARKER for key/value length
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);
  WritableUtils.writeVInt(out, IFile.EOF_MARKER);

  // Close the stream 
  out.close();
  out = null;
}