Java 类com.google.common.io.LimitInputStream 实例源码

项目:feedlack    文件:CompileTask.java   
/**
 * Gets the default externs set.
 *
 * Adapted from {@link CommandLineRunner}.
 */
private List<JSSourceFile> getDefaultExterns() {
  try {
    InputStream input = Compiler.class.getResourceAsStream(
        "/externs.zip");
    ZipInputStream zip = new ZipInputStream(input);
    List<JSSourceFile> externs = Lists.newLinkedList();

    for (ZipEntry entry; (entry = zip.getNextEntry()) != null; ) {
      LimitInputStream entryStream =
          new LimitInputStream(zip, entry.getSize());
      externs.add(
          JSSourceFile.fromInputStream(entry.getName(), entryStream));
    }

    return externs;
  } catch (IOException e) {
    throw new BuildException(e);
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:PBImageTextWriter.java   
private void output(Configuration conf, FileSummary summary,
    FileInputStream fin, ArrayList<FileSummary.Section> sections)
    throws IOException {
  InputStream is;
  long startTime = Time.monotonicNow();
  for (FileSummary.Section section : sections) {
    if (SectionName.fromString(section.getName()) == SectionName.INODE) {
      fin.getChannel().position(section.getOffset());
      is = FSImageUtil.wrapInputStreamForCompression(conf,
          summary.getCodec(), new BufferedInputStream(new LimitInputStream(
              fin, section.getLength())));
      outputINodes(is);
    }
  }
  long timeTaken = Time.monotonicNow() - startTime;
  LOG.debug("Time to output inodes: {}ms", timeTaken);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:PBImageTextWriter.java   
/** Load the directories in the INode section. */
private void loadDirectories(
    FileInputStream fin, List<FileSummary.Section> sections,
    FileSummary summary, Configuration conf)
    throws IOException {
  LOG.info("Loading directories");
  long startTime = Time.monotonicNow();
  for (FileSummary.Section section : sections) {
    if (SectionName.fromString(section.getName())
        == SectionName.INODE) {
      fin.getChannel().position(section.getOffset());
      InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
          summary.getCodec(), new BufferedInputStream(new LimitInputStream(
              fin, section.getLength())));
      loadDirectoriesInINodeSection(is);
    }
  }
  long timeTaken = Time.monotonicNow() - startTime;
  LOG.info("Finished loading directories in {}ms", timeTaken);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:PBImageTextWriter.java   
private void loadINodeDirSection(
    FileInputStream fin, List<FileSummary.Section> sections,
    FileSummary summary, Configuration conf)
    throws IOException {
  LOG.info("Loading INode directory section.");
  long startTime = Time.monotonicNow();
  for (FileSummary.Section section : sections) {
    if (SectionName.fromString(section.getName())
        == SectionName.INODE_DIR) {
      fin.getChannel().position(section.getOffset());
      InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
          summary.getCodec(), new BufferedInputStream(
              new LimitInputStream(fin, section.getLength())));
      buildNamespace(is);
    }
  }
  long timeTaken = Time.monotonicNow() - startTime;
  LOG.info("Finished loading INode directory section in {}ms", timeTaken);
}
项目:hadoop-on-lustre2    文件:FileDistributionCalculator.java   
void visit(RandomAccessFile file) throws IOException {
  if (!FSImageUtil.checkFileFormat(file)) {
    throw new IOException("Unrecognized FSImage");
  }

  FileSummary summary = FSImageUtil.loadSummary(file);
  FileInputStream in = null;
  try {
    in = new FileInputStream(file.getFD());
    for (FileSummary.Section s : summary.getSectionsList()) {
      if (SectionName.fromString(s.getName()) != SectionName.INODE) {
        continue;
      }

      in.getChannel().position(s.getOffset());
      InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
          summary.getCodec(), new BufferedInputStream(new LimitInputStream(
              in, s.getLength())));
      run(is);
      output();
    }
  } finally {
    IOUtils.cleanup(null, in);
  }
}
项目:jclouds-examples    文件:HdfsPayloadSlicer.java   
protected Payload doSlice(final FSDataInputStream inputStream,
      final long offset, final long length) {
   return new InputStreamSupplierPayload(new InputSupplier<InputStream>() {
      public InputStream getInput() throws IOException {
         if (offset > 0) {
            try {
               inputStream.seek(offset);
            } catch (IOException e) {
               Closeables.closeQuietly(inputStream);
               throw e;
            }
         }
         return new LimitInputStream(inputStream, length);
      }
   });
}
项目:feedlack    文件:CommandLineRunner.java   
/**
 * @return a mutable list
 * @throws IOException
 */
private List<JSSourceFile> getDefaultExterns() throws IOException {
  InputStream input = CommandLineRunner.class.getResourceAsStream(
      "/externs.zip");
  ZipInputStream zip = new ZipInputStream(input);
  List<JSSourceFile> externs = Lists.newLinkedList();
  for (ZipEntry entry = null; (entry = zip.getNextEntry()) != null; ) {
    LimitInputStream entryStream = new LimitInputStream(zip, entry.getSize());
    externs.add(JSSourceFile.fromInputStream(entry.getName(), entryStream));
  }
  return externs;
}
项目:htmlcompressor    文件:ClosureJavaScriptCompressor.java   
private List<JSSourceFile> getDefaultExterns() throws IOException {
    InputStream input = ClosureJavaScriptCompressor.class.getResourceAsStream("/externs.zip");
    ZipInputStream zip = new ZipInputStream(input);
    List<JSSourceFile> externs = Lists.newLinkedList();
    for (ZipEntry entry = null; (entry = zip.getNextEntry()) != null;) {
        LimitInputStream entryStream = new LimitInputStream(zip, entry.getSize());
        externs.add(JSSourceFile.fromInputStream(entry.getName(), entryStream));
    }
    return externs;
}
项目:js-symbolic-executor    文件:CommandLineRunner.java   
/**
 * @return a mutable list
 * @throws IOException
 */
public static List<JSSourceFile> getDefaultExterns() throws IOException {
  InputStream input = CommandLineRunner.class.getResourceAsStream(
      "/externs.zip");
  ZipInputStream zip = new ZipInputStream(input);
  Map<String, JSSourceFile> externsMap = Maps.newHashMap();
  for (ZipEntry entry = null; (entry = zip.getNextEntry()) != null; ) {
    LimitInputStream entryStream = new LimitInputStream(zip, entry.getSize());
    externsMap.put(entry.getName(),
        JSSourceFile.fromInputStream(
            // Give the files an odd prefix, so that they do not conflict
            // with the user's files.
            "externs.zip//" + entry.getName(),
            entryStream));
  }

  Preconditions.checkState(
      externsMap.keySet().equals(Sets.newHashSet(DEFAULT_EXTERNS_NAMES)),
      "Externs zip must match our hard-coded list of externs.");

  // Order matters, so the resources must be added to the result list
  // in the expected order.
  List<JSSourceFile> externs = Lists.newArrayList();
  for (String key : DEFAULT_EXTERNS_NAMES) {
    externs.add(externsMap.get(key));
  }

  return externs;
}
项目:astor    文件:CommandLineRunner.java   
/**
 * @return a mutable list
 * @throws IOException
 */
public static List<SourceFile> getDefaultExterns() throws IOException {
  InputStream input = CommandLineRunner.class.getResourceAsStream(
      "/externs.zip");
  if (input == null) {
    // In some environments, the externs.zip is relative to this class.
    input = CommandLineRunner.class.getResourceAsStream("externs.zip");
  }
  Preconditions.checkNotNull(input);

  ZipInputStream zip = new ZipInputStream(input);
  Map<String, SourceFile> externsMap = Maps.newHashMap();
  for (ZipEntry entry = null; (entry = zip.getNextEntry()) != null; ) {
    BufferedInputStream entryStream = new BufferedInputStream(
        new LimitInputStream(zip, entry.getSize()));
    externsMap.put(entry.getName(),
        SourceFile.fromInputStream(
            // Give the files an odd prefix, so that they do not conflict
            // with the user's files.
            "externs.zip//" + entry.getName(),
            entryStream));
  }

  Preconditions.checkState(
      externsMap.keySet().equals(Sets.newHashSet(DEFAULT_EXTERNS_NAMES)),
      "Externs zip must match our hard-coded list of externs.");

  // Order matters, so the resources must be added to the result list
  // in the expected order.
  List<SourceFile> externs = Lists.newArrayList();
  for (String key : DEFAULT_EXTERNS_NAMES) {
    externs.add(externsMap.get(key));
  }

  return externs;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:PBImageTextWriter.java   
public void visit(RandomAccessFile file) throws IOException {
  Configuration conf = new Configuration();
  if (!FSImageUtil.checkFileFormat(file)) {
    throw new IOException("Unrecognized FSImage");
  }

  FileSummary summary = FSImageUtil.loadSummary(file);

  try (FileInputStream fin = new FileInputStream(file.getFD())) {
    InputStream is;
    ArrayList<FileSummary.Section> sections =
        Lists.newArrayList(summary.getSectionsList());
    Collections.sort(sections,
        new Comparator<FileSummary.Section>() {
          @Override
          public int compare(FsImageProto.FileSummary.Section s1,
              FsImageProto.FileSummary.Section s2) {
            FSImageFormatProtobuf.SectionName n1 =
                FSImageFormatProtobuf.SectionName.fromString(s1.getName());
            FSImageFormatProtobuf.SectionName n2 =
                FSImageFormatProtobuf.SectionName.fromString(s2.getName());
            if (n1 == null) {
              return n2 == null ? 0 : -1;
            } else if (n2 == null) {
              return -1;
            } else {
              return n1.ordinal() - n2.ordinal();
            }
          }
        });

    for (FileSummary.Section section : sections) {
      fin.getChannel().position(section.getOffset());
      is = FSImageUtil.wrapInputStreamForCompression(conf,
          summary.getCodec(), new BufferedInputStream(new LimitInputStream(
              fin, section.getLength())));
      switch (SectionName.fromString(section.getName())) {
      case STRING_TABLE:
        stringTable = FSImageLoader.loadStringTable(is);
        break;
      default:
        break;
      }
    }

    loadDirectories(fin, sections, summary, conf);
    loadINodeDirSection(fin, sections, summary, conf);
    metadataMap.sync();
    output(conf, summary, fin, sections);
  }
}
项目:apex-malhar    文件:AbstractFileOutputOperatorTest.java   
private void checkSnappyFile(File file, List<Long> offsets, int startVal, int totalWindows, int totalRecords) throws IOException
{
  FileInputStream fis;
  InputStream gss = null;
  Configuration conf = new Configuration();
  CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(SnappyCodec.class, conf);
  CompressionInputStream snappyIs = null;

  BufferedReader br = null;

  int numWindows = 0;
  try {
    fis = new FileInputStream(file);
    gss = fis;

    long startOffset = 0;
    for (long offset : offsets) {
      // Skip initial case in case file is not yet created
      if (offset == 0) {
        continue;
      }
      long limit = offset - startOffset;
      LimitInputStream lis = new LimitInputStream(gss, limit);

      snappyIs = codec.createInputStream(lis);
      br = new BufferedReader(new InputStreamReader(snappyIs));
      String eline = "" + (startVal + numWindows * 2);
      int count = 0;
      String line;
      while ((line = br.readLine()) != null) {
        Assert.assertEquals("File line", eline, line);
        ++count;
        if ((count % totalRecords) == 0) {
          ++numWindows;
          eline = "" + (startVal + numWindows * 2);
        }
      }
      startOffset = offset;
    }
  } catch (Exception e) {
    e.printStackTrace();
  } finally {
    if (br != null) {
      br.close();
    } else {
      if (snappyIs != null) {
        snappyIs.close();
      } else if (gss != null) {
        gss.close();
      }
    }
  }
  Assert.assertEquals("Total", totalWindows, numWindows);
}
项目:hadoop-on-lustre2    文件:PBImageXmlWriter.java   
public void visit(RandomAccessFile file) throws IOException {
  if (!FSImageUtil.checkFileFormat(file)) {
    throw new IOException("Unrecognized FSImage");
  }

  FileSummary summary = FSImageUtil.loadSummary(file);
  FileInputStream fin = null;
  try {
    fin = new FileInputStream(file.getFD());
    out.print("<?xml version=\"1.0\"?>\n<fsimage>");

    ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary
        .getSectionsList());
    Collections.sort(sections, new Comparator<FileSummary.Section>() {
      @Override
      public int compare(FileSummary.Section s1, FileSummary.Section s2) {
        SectionName n1 = SectionName.fromString(s1.getName());
        SectionName n2 = SectionName.fromString(s2.getName());
        if (n1 == null) {
          return n2 == null ? 0 : -1;
        } else if (n2 == null) {
          return -1;
        } else {
          return n1.ordinal() - n2.ordinal();
        }
      }
    });

    for (FileSummary.Section s : sections) {
      fin.getChannel().position(s.getOffset());
      InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
          summary.getCodec(), new BufferedInputStream(new LimitInputStream(
              fin, s.getLength())));

      switch (SectionName.fromString(s.getName())) {
      case NS_INFO:
        dumpNameSection(is);
        break;
      case STRING_TABLE:
        loadStringTable(is);
        break;
      case INODE:
        dumpINodeSection(is);
        break;
      case INODE_REFERENCE:
        dumpINodeReferenceSection(is);
        break;
      case INODE_DIR:
        dumpINodeDirectorySection(is);
        break;
      case FILES_UNDERCONSTRUCTION:
        dumpFileUnderConstructionSection(is);
        break;
      case SNAPSHOT:
        dumpSnapshotSection(is);
        break;
      case SNAPSHOT_DIFF:
        dumpSnapshotDiffSection(is);
        break;
      case SECRET_MANAGER:
        dumpSecretManagerSection(is);
        break;
      case CACHE_MANAGER:
        dumpCacheManagerSection(is);
        break;
      default:
        break;
      }
    }
    out.print("</fsimage>\n");
  } finally {
    IOUtils.cleanup(null, fin);
  }
}
项目:hadoop-on-lustre2    文件:LsrPBImage.java   
public void visit(RandomAccessFile file) throws IOException {
  if (!FSImageUtil.checkFileFormat(file)) {
    throw new IOException("Unrecognized FSImage");
  }

  FileSummary summary = FSImageUtil.loadSummary(file);
  FileInputStream fin = null;
  try {
    fin = new FileInputStream(file.getFD());

    ArrayList<FileSummary.Section> sections = Lists.newArrayList(summary
        .getSectionsList());
    Collections.sort(sections, new Comparator<FileSummary.Section>() {
      @Override
      public int compare(FileSummary.Section s1, FileSummary.Section s2) {
        SectionName n1 = SectionName.fromString(s1.getName());
        SectionName n2 = SectionName.fromString(s2.getName());
        if (n1 == null) {
          return n2 == null ? 0 : -1;
        } else if (n2 == null) {
          return -1;
        } else {
          return n1.ordinal() - n2.ordinal();
        }
      }
    });

    for (FileSummary.Section s : sections) {
      fin.getChannel().position(s.getOffset());
      InputStream is = FSImageUtil.wrapInputStreamForCompression(conf,
          summary.getCodec(), new BufferedInputStream(new LimitInputStream(
              fin, s.getLength())));

      switch (SectionName.fromString(s.getName())) {
      case STRING_TABLE:
        loadStringTable(is);
        break;
      case INODE:
        loadINodeSection(is);
        break;
      case INODE_REFERENCE:
        loadINodeReferenceSection(is);
        break;
      case INODE_DIR:
        loadINodeDirectorySection(is);
        break;
      default:
        break;
      }
    }
    list("", INodeId.ROOT_INODE_ID);
  } finally {
    IOUtils.cleanup(null, fin);
  }
}