Java 类org.codehaus.jackson.JsonFactory 实例源码

项目:hadoop    文件:RumenToSLSConverter.java   
private static void generateSLSLoadFile(String inputFile, String outputFile)
        throws IOException {
  Reader input = new FileReader(inputFile);
  try {
    Writer output = new FileWriter(outputFile);
    try {
      ObjectMapper mapper = new ObjectMapper();
      ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
      Iterator<Map> i = mapper.readValues(
              new JsonFactory().createJsonParser(input), Map.class);
      while (i.hasNext()) {
        Map m = i.next();
        output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
      }
    } finally {
      output.close();
    }
  } finally {
    input.close();
  }
}
项目:hadoop-oss    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:hadoop    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:hadoop    文件:TestHistograms.java   
public static void main(String[] args) throws IOException {
  final Configuration conf = new Configuration();
  final FileSystem lfs = FileSystem.getLocal(conf);

  for (String arg : args) {
    Path filePath = new Path(arg).makeQualified(lfs);
    String fileName = filePath.getName();
    if (fileName.startsWith("input")) {
      LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs);
      String testName = fileName.substring("input".length());
      Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);

      ObjectMapper mapper = new ObjectMapper();
      JsonFactory factory = mapper.getJsonFactory();
      FSDataOutputStream ostream = lfs.create(goldFilePath, true);
      JsonGenerator gen = factory.createJsonGenerator(ostream,
          JsonEncoding.UTF8);
      gen.useDefaultPrettyPrinter();

      gen.writeObject(newResult);

      gen.close();
    } else {
      System.err.println("Input file not started with \"input\". File "+fileName+" skipped.");
    }
  }
}
项目:hadoop    文件:SLSUtils.java   
/**
 * parse the sls trace file, return each host name
 */
public static Set<String> parseNodesFromSLSTrace(String jobTrace)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input = new FileReader(jobTrace);
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      List tasks = (List) jsonE.get("job.tasks");
      for (Object o : tasks) {
        Map jsonTask = (Map) o;
        String hostname = jsonTask.get("container.host").toString();
        nodeSet.add(hostname);
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:hadoop    文件:SLSUtils.java   
/**
 * parse the input node file, return each host name
 */
public static Set<String> parseNodesFromNodeFile(String nodeFile)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input = new FileReader(nodeFile);
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      String rack = "/" + jsonE.get("rack");
      List tasks = (List) jsonE.get("nodes");
      for (Object o : tasks) {
        Map jsonNode = (Map) o;
        nodeSet.add(rack + "/" + jsonNode.get("node"));
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:hadoop    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:personium-core    文件:BarFileUtils.java   
/**
 * barファイルエントリからJSONファイルを読み込む.
 * @param <T> JSONMappedObject
 * @param inStream barファイルエントリのInputStream
 * @param entryName entryName
 * @param clazz clazz
 * @return JSONファイルから読み込んだオブジェクト
 * @throws IOException JSONファイル読み込みエラー
 */
public static <T> T readJsonEntry(
        InputStream inStream, String entryName, Class<T> clazz) throws IOException {
    JsonParser jp = null;
    ObjectMapper mapper = new ObjectMapper();
    JsonFactory f = new JsonFactory();
    jp = f.createJsonParser(inStream);
    JsonToken token = jp.nextToken(); // JSONルート要素("{")
    Pattern formatPattern = Pattern.compile(".*/+(.*)");
    Matcher formatMatcher = formatPattern.matcher(entryName);
    String jsonName = formatMatcher.replaceAll("$1");
    T json = null;
    if (token == JsonToken.START_OBJECT) {
        try {
            json = mapper.readValue(jp, clazz);
        } catch (UnrecognizedPropertyException ex) {
            throw PersoniumCoreException.BarInstall.JSON_FILE_FORMAT_ERROR.params(jsonName);
        }
    } else {
        throw PersoniumCoreException.BarInstall.JSON_FILE_FORMAT_ERROR.params(jsonName);
    }
    return json;
}
项目:personium-core    文件:EventResource.java   
/**
 * リクエストボディを解析してEventオブジェクトを取得する.
 * @param reader Http入力ストリーム
 * @return 解析したEventオブジェクト
 */
protected JSONEvent getRequestBody(final Reader reader) {
    JSONEvent event = null;
    JsonParser jp = null;
    ObjectMapper mapper = new ObjectMapper();
    JsonFactory f = new JsonFactory();
    try {
        jp = f.createJsonParser(reader);
        JsonToken token = jp.nextToken(); // JSONルート要素("{")
        if (token == JsonToken.START_OBJECT) {
            event = mapper.readValue(jp, JSONEvent.class);
        } else {
            throw PersoniumCoreException.Event.JSON_PARSE_ERROR;
        }
    } catch (IOException e) {
        throw PersoniumCoreException.Event.JSON_PARSE_ERROR;
    }
    return event;
}
项目:personium-core    文件:JSONManifestTest.java   
/**
 * manifest.jsonのschema値がURL形式である場合trueが返却されること.
 * @throws IOException IOException
 */
@SuppressWarnings("unchecked")
@Test
public void manifest_jsonのschema値がURL形式である場合trueが返却されること() throws IOException {
    JsonFactory f = new JsonFactory();
    JSONObject json = new JSONObject();
    json.put("bar_version", "1");
    json.put("box_version", "1");
    json.put("DefaultPath", "boxName");
    json.put("schema", "http://app1.example.com/");
    JsonParser jp = f.createJsonParser(json.toJSONString());
    ObjectMapper mapper = new ObjectMapper();
    jp.nextToken();

    JSONManifest manifest = mapper.readValue(jp, JSONManifest.class);

    assertTrue(manifest.checkSchema());
}
项目:personium-core    文件:JSONManifestTest.java   
/**
 * manifest_jsonのschema値がURL形式でない場合falseが返却されること.
 * @throws IOException IOException
 */
@SuppressWarnings("unchecked")
@Test
public void manifest_jsonのschema値がURL形式でない場合falseが返却されること() throws IOException {
    JsonFactory f = new JsonFactory();
    JSONObject json = new JSONObject();
    json.put("bar_version", "1");
    json.put("box_version", "1");
    json.put("DefaultPath", "boxName");
    json.put("schema", "test");
    JsonParser jp = f.createJsonParser(json.toJSONString());
    ObjectMapper mapper = new ObjectMapper();
    jp.nextToken();

    JSONManifest manifest = mapper.readValue(jp, JSONManifest.class);

    assertFalse(manifest.checkSchema());
}
项目:personium-core    文件:JSONManifestTest.java   
/**
 * manifest_jsonのschema値がnull場合falseが返却されること.
 * @throws IOException IOException
 */
@SuppressWarnings("unchecked")
@Test
public void manifest_jsonのschema値がnull場合falseが返却されること() throws IOException {
    JsonFactory f = new JsonFactory();
    JSONObject json = new JSONObject();
    json.put("bar_version", "1");
    json.put("box_version", "1");
    json.put("DefaultPath", "boxName");
    json.put("schema", null);
    JsonParser jp = f.createJsonParser(json.toJSONString());
    ObjectMapper mapper = new ObjectMapper();
    jp.nextToken();

    JSONManifest manifest = mapper.readValue(jp, JSONManifest.class);

    assertFalse(manifest.checkSchema());
}
项目:personium-core    文件:JSONManifestTest.java   
/**
 * manifest_jsonのschemaの指定がない場合falseが返却されること.
 * @throws IOException IOException
 */
@SuppressWarnings("unchecked")
@Test
public void manifest_jsonのschemaの指定がない場合falseが返却されること() throws IOException {
    JsonFactory f = new JsonFactory();
    JSONObject json = new JSONObject();
    json.put("bar_version", "1");
    json.put("box_version", "1");
    json.put("DefaultPath", "boxName");
    JsonParser jp = f.createJsonParser(json.toJSONString());
    ObjectMapper mapper = new ObjectMapper();
    jp.nextToken();

    JSONManifest manifest = mapper.readValue(jp, JSONManifest.class);

    assertFalse(manifest.checkSchema());
}
项目:personium-core    文件:BarFileValidateTest.java   
/**
 * bar_versionを指定しない場合に例外がスローされる.
 */
@Test
@SuppressWarnings({"unchecked" })
public void bar_versionを指定しない場合に例外がスローされる() {
    JsonFactory f = new JsonFactory();
    JSONObject json = new JSONObject();
    json.put("box_version", "1");
    json.put("DefaultPath", "boxName");
    json.put("schema", "http://app1.example.com");

    try {
        JsonParser jp = f.createJsonParser(json.toJSONString());
        ObjectMapper mapper = new ObjectMapper();
        jp.nextToken();

        TestBarRunner testBarRunner = new TestBarRunner();
        testBarRunner.manifestJsonValidate(jp, mapper);
    } catch (PersoniumCoreException dce) {
        assertEquals(400, dce.getStatus());
        assertEquals("PR400-BI-0006", dce.getCode());
        return;
    } catch (Exception ex) {
        fail("Unexpected exception");
    }
    fail("PersoniumCoreExceptionが返却されない");
}
项目:personium-core    文件:BarFileValidateTest.java   
/**
 * box_versionを指定しない場合に例外がスローされる.
 */
@Test
@SuppressWarnings({"unchecked" })
public void box_versionを指定しない場合に例外がスローされる() {
    JsonFactory f = new JsonFactory();
    JSONObject json = new JSONObject();
    json.put("bar_version", "1");
    json.put("DefaultPath", "boxName");
    json.put("schema", "http://app1.example.com");

    try {
        JsonParser jp = f.createJsonParser(json.toJSONString());
        ObjectMapper mapper = new ObjectMapper();
        jp.nextToken();

        TestBarRunner testBarRunner = new TestBarRunner();
        testBarRunner.manifestJsonValidate(jp, mapper);
    } catch (PersoniumCoreException dce) {
        assertEquals(400, dce.getStatus());
        assertEquals("PR400-BI-0006", dce.getCode());
        return;
    } catch (Exception ex) {
        fail("Unexpected exception");
    }
    fail("PersoniumCoreExceptionが返却されない");
}
项目:personium-core    文件:BarFileValidateTest.java   
/**
 * DefaultPathを指定しない場合に例外がスローされる.
 */
@Test
@SuppressWarnings({"unchecked" })
public void DefaultPathを指定しない場合に例外がスローされる() {
    JsonFactory f = new JsonFactory();
    JSONObject json = new JSONObject();
    json.put("bar_version", "1");
    json.put("box_version", "1");
    json.put("schema", "http://app1.example.com");

    try {
        JsonParser jp = f.createJsonParser(json.toJSONString());
        ObjectMapper mapper = new ObjectMapper();
        jp.nextToken();

        TestBarRunner testBarRunner = new TestBarRunner();
        testBarRunner.manifestJsonValidate(jp, mapper);
    } catch (PersoniumCoreException dce) {
        assertEquals(400, dce.getStatus());
        assertEquals("PR400-BI-0006", dce.getCode());
        return;
    } catch (Exception ex) {
        fail("Unexpected exception");
    }
    fail("PersoniumCoreExceptionが返却されない");
}
项目:defense-solutions-proofs-of-concept    文件:QueryReportProcessor.java   
private com.esri.core.geometry.Geometry constructGeometry(com.esri.ges.spatial.Geometry geo) throws Exception
{
    try{
        String jsonIn = geo.toJson();
        JsonFactory jf = new JsonFactory();
        JsonParser jp = jf.createJsonParser(jsonIn);
        MapGeometry mgeo = GeometryEngine.jsonToGeometry(jp);
        com.esri.core.geometry.Geometry geoIn= mgeo.getGeometry();
        return GeometryEngine.project(geoIn, srIn, srBuffer);
    }
    catch(Exception e)
    {
        LOG.error(e.getMessage());
        LOG.error(e.getStackTrace());
        throw(e);
    }
}
项目:aliyun-oss-hadoop-fs    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:aliyun-oss-hadoop-fs    文件:SLSUtils.java   
/**
 * parse the sls trace file, return each host name
 */
public static Set<String> parseNodesFromSLSTrace(String jobTrace)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input =
      new InputStreamReader(new FileInputStream(jobTrace), "UTF-8");
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      List tasks = (List) jsonE.get("job.tasks");
      for (Object o : tasks) {
        Map jsonTask = (Map) o;
        String hostname = jsonTask.get("container.host").toString();
        nodeSet.add(hostname);
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:aliyun-oss-hadoop-fs    文件:SLSUtils.java   
/**
 * parse the input node file, return each host name
 */
public static Set<String> parseNodesFromNodeFile(String nodeFile)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input =
      new InputStreamReader(new FileInputStream(nodeFile), "UTF-8");
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      String rack = "/" + jsonE.get("rack");
      List tasks = (List) jsonE.get("nodes");
      for (Object o : tasks) {
        Map jsonNode = (Map) o;
        nodeSet.add(rack + "/" + jsonNode.get("node"));
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:aliyun-oss-hadoop-fs    文件:RumenToSLSConverter.java   
private static void generateSLSLoadFile(String inputFile, String outputFile)
        throws IOException {
  try (Reader input =
      new InputStreamReader(new FileInputStream(inputFile), "UTF-8")) {
    try (Writer output =
        new OutputStreamWriter(new FileOutputStream(outputFile), "UTF-8")) {
      ObjectMapper mapper = new ObjectMapper();
      ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
      Iterator<Map> i = mapper.readValues(
              new JsonFactory().createJsonParser(input), Map.class);
      while (i.hasNext()) {
        Map m = i.next();
        output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
      }
    }
  }
}
项目:aliyun-oss-hadoop-fs    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:big-c    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:big-c    文件:SLSUtils.java   
/**
 * parse the sls trace file, return each host name
 */
public static Set<String> parseNodesFromSLSTrace(String jobTrace)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input = new FileReader(jobTrace);
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      List tasks = (List) jsonE.get("job.tasks");
      for (Object o : tasks) {
        Map jsonTask = (Map) o;
        String hostname = jsonTask.get("container.host").toString();
        nodeSet.add(hostname);
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:big-c    文件:SLSUtils.java   
/**
 * parse the input node file, return each host name
 */
public static Set<String> parseNodesFromNodeFile(String nodeFile)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input = new FileReader(nodeFile);
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      String rack = "/" + jsonE.get("rack");
      List tasks = (List) jsonE.get("nodes");
      for (Object o : tasks) {
        Map jsonNode = (Map) o;
        nodeSet.add(rack + "/" + jsonNode.get("node"));
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:big-c    文件:RumenToSLSConverter.java   
private static void generateSLSLoadFile(String inputFile, String outputFile)
        throws IOException {
  Reader input = new FileReader(inputFile);
  try {
    Writer output = new FileWriter(outputFile);
    try {
      ObjectMapper mapper = new ObjectMapper();
      ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
      Iterator<Map> i = mapper.readValues(
              new JsonFactory().createJsonParser(input), Map.class);
      while (i.hasNext()) {
        Map m = i.next();
        output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
      }
    } finally {
      output.close();
    }
  } finally {
    input.close();
  }
}
项目:big-c    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:SLSUtils.java   
/**
 * parse the sls trace file, return each host name
 */
public static Set<String> parseNodesFromSLSTrace(String jobTrace)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input = new FileReader(jobTrace);
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      List tasks = (List) jsonE.get("job.tasks");
      for (Object o : tasks) {
        Map jsonTask = (Map) o;
        String hostname = jsonTask.get("container.host").toString();
        nodeSet.add(hostname);
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:SLSUtils.java   
/**
 * parse the input node file, return each host name
 */
public static Set<String> parseNodesFromNodeFile(String nodeFile)
        throws IOException {
  Set<String> nodeSet = new HashSet<String>();
  JsonFactory jsonF = new JsonFactory();
  ObjectMapper mapper = new ObjectMapper();
  Reader input = new FileReader(nodeFile);
  try {
    Iterator<Map> i = mapper.readValues(
            jsonF.createJsonParser(input), Map.class);
    while (i.hasNext()) {
      Map jsonE = i.next();
      String rack = "/" + jsonE.get("rack");
      List tasks = (List) jsonE.get("nodes");
      for (Object o : tasks) {
        Map jsonNode = (Map) o;
        nodeSet.add(rack + "/" + jsonNode.get("node"));
      }
    }
  } finally {
    input.close();
  }
  return nodeSet;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:RumenToSLSConverter.java   
private static void generateSLSLoadFile(String inputFile, String outputFile)
        throws IOException {
  Reader input = new FileReader(inputFile);
  try {
    Writer output = new FileWriter(outputFile);
    try {
      ObjectMapper mapper = new ObjectMapper();
      ObjectWriter writer = mapper.defaultPrettyPrintingWriter();
      Iterator<Map> i = mapper.readValues(
              new JsonFactory().createJsonParser(input), Map.class);
      while (i.hasNext()) {
        Map m = i.next();
        output.write(writer.writeValueAsString(createSLSJob(m)) + EOL);
      }
    } finally {
      output.close();
    }
  } finally {
    input.close();
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:hadoop-EAR    文件:CoronaSerializer.java   
/**
 * This is a helper method which creates a JsonGenerator instance, for writing
 * the state of the ClusterManager to the state file. The JsonGenerator
 * instance writes to a compressed file if we have the compression flag
 * turned on.
 *
 * @param conf The CoronaConf instance to be used
 * @return The JsonGenerator instance to be used
 * @throws IOException
 */
public static JsonGenerator createJsonGenerator(CoronaConf conf)
  throws IOException {
  OutputStream outputStream = new FileOutputStream(conf.getCMStateFile());
  if (conf.getCMCompressStateFlag()) {
    outputStream = new GZIPOutputStream(outputStream);
  }
  ObjectMapper mapper = new ObjectMapper();
  JsonGenerator jsonGenerator =
    new JsonFactory().createJsonGenerator(outputStream, JsonEncoding.UTF8);
  jsonGenerator.setCodec(mapper);
  if (!conf.getCMCompressStateFlag()) {
    jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter());
  }
  return jsonGenerator;
}
项目:hadoop-EAR    文件:JsonUtils.java   
/**
 * This is a helper method which creates a JsonGenerator instance, for writing
 * the state of the ClusterManager to the state file. The JsonGenerator
 * instance writes to a compressed file if we have the compression flag
 * turned on.
 *
 * @param conf The CoronaConf instance to be used
 * @return The JsonGenerator instance to be used
 * @throws IOException
 */
public static JsonGenerator createJsonGenerator(CoronaConf conf)
  throws IOException {
  OutputStream outputStream = new FileOutputStream(conf.getCMStateFile());
  if (conf.getCMCompressStateFlag()) {
    outputStream = new GZIPOutputStream(outputStream);
  }
  ObjectMapper mapper = new ObjectMapper();
  JsonGenerator jsonGenerator =
    new JsonFactory().createJsonGenerator(outputStream, JsonEncoding.UTF8);
  jsonGenerator.setCodec(mapper);
  if (!conf.getCMCompressStateFlag()) {
    jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter());
  }
  return jsonGenerator;
}
项目:hadoop-EAR    文件:Configuration.java   
/**
 *  Writes out all the parameters and their properties (final and resource) to
 *  the given {@link Writer}
 *  The format of the output would be 
 *  { "properties" : [ {key1,value1,key1.isFinal,key1.resource}, {key2,value2,
 *  key2.isFinal,key2.resource}... ] } 
 *  It does not output the parameters of the configuration object which is 
 *  loaded from an input stream.
 * @param out the Writer to write to
 * @throws IOException
 */
public static void dumpConfiguration(Configuration config,
    Writer out) throws IOException {
  JsonFactory dumpFactory = new JsonFactory();
  JsonGenerator dumpGenerator = dumpFactory.createJsonGenerator(out);
  dumpGenerator.writeStartObject();
  dumpGenerator.writeFieldName("properties");
  dumpGenerator.writeStartArray();
  dumpGenerator.flush();
  synchronized (config) {
    for (Map.Entry<Object,Object> item: config.getProps().entrySet()) {
      dumpGenerator.writeStartObject();
      dumpGenerator.writeStringField("key", (String) item.getKey());
      dumpGenerator.writeStringField("value",
                                     config.get((String) item.getKey()));
      dumpGenerator.writeBooleanField("isFinal",
                                      config.finalParameters.contains(item.getKey()));
      dumpGenerator.writeStringField("resource",
                                     config.updatingResource.get(item.getKey()));
      dumpGenerator.writeEndObject();
    }
  }
  dumpGenerator.writeEndArray();
  dumpGenerator.writeEndObject();
  dumpGenerator.flush();
}
项目:teiid-webui    文件:DataVirtUploadServlet.java   
/**
 * Writes the response values back to the http response.  This allows the calling code to
 * parse the response values for display to the user.
 *
 * @param responseMap the response params to write to the http response
 * @param response the http response
 * @throws IOException
 */
private static void writeToResponse(Map<String, String> responseMap, HttpServletResponse response) throws IOException {
       // Note: setting the content-type to text/html because otherwise IE prompt the user to download
       // the result rather than handing it off to the GWT form response handler.
       // See JIRA issue https://issues.jboss.org/browse/SRAMPUI-103
    response.setContentType("text/html; charset=UTF8"); //$NON-NLS-1$
       JsonFactory f = new JsonFactory();
       JsonGenerator g = f.createJsonGenerator(response.getOutputStream(), JsonEncoding.UTF8);
       g.useDefaultPrettyPrinter();
       g.writeStartObject();
       for (java.util.Map.Entry<String, String> entry : responseMap.entrySet()) {
           String key = entry.getKey();
           String val = entry.getValue();
           g.writeStringField(key, val);
       }
       g.writeEndObject();
       g.flush();
       g.close();
}
项目:hadoop-plus    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:hadoop-plus    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  fileReader =
    DataFileReader.openReader(new File(status.getPath().toUri()), reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:Rosetta.NetAppStoragePlugin    文件:CDMIConnector.java   
private String getSpecificJSONValue(HttpResponse response, String jsonKey) throws JsonParseException, IllegalStateException, IOException {
    InputStream content = response.getEntity().getContent();
    if (isSuccessfulResponse(response)) {
        JsonFactory f = new JsonFactory();
        JsonParser jp = f.createJsonParser(content);
        while ((jp.nextToken()) != JsonToken.END_OBJECT) {
            if (jsonKey.equals(jp.getCurrentName())) {
                jp.nextToken();
                return jp.getText();
            }
        }
    } else {
        String string = IOUtils.toString(content);
        System.err.println(string);
    }
    return null;
}
项目:ODFExplorer    文件:StylesJSONWriter.java   
/**
 * Open a JSON output file
 * The file output may be derived from many input sources
 *  
 * @param name
 * @param string 
 * @throws XMLStreamException
 * @throws IOException
 */
public void open(File file) throws XMLStreamException, IOException {

       // Mapped convention
    JsonFactory f = new JsonFactory();
    try {
        generator = f.createJsonGenerator(new FileWriter(file));
        ObjectMapper mapper = new ObjectMapper();
        mapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
        generator.setCodec(mapper);
        generator.useDefaultPrettyPrinter();

        rootNode = mapper.createObjectNode();
        rootNode.put("name", "odfestyles");
        rootArray = rootNode.putArray(CHILDREN_TAG);

    } catch (JsonGenerationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } 
}