Java 类org.codehaus.jackson.JsonEncoding 实例源码

项目:hadoop-oss    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:lams    文件:MappingJacksonHttpMessageConverter.java   
@Override
protected void writeInternal(Object object, HttpOutputMessage outputMessage)
        throws IOException, HttpMessageNotWritableException {

    JsonEncoding encoding = getJsonEncoding(outputMessage.getHeaders().getContentType());
    JsonGenerator jsonGenerator =
            this.objectMapper.getJsonFactory().createJsonGenerator(outputMessage.getBody(), encoding);

    // A workaround for JsonGenerators not applying serialization features
    // https://github.com/FasterXML/jackson-databind/issues/12
    if (this.objectMapper.getSerializationConfig().isEnabled(SerializationConfig.Feature.INDENT_OUTPUT)) {
        jsonGenerator.useDefaultPrettyPrinter();
    }

    try {
        if (this.jsonPrefix != null) {
            jsonGenerator.writeRaw(this.jsonPrefix);
        }
        this.objectMapper.writeValue(jsonGenerator, object);
    }
    catch (JsonProcessingException ex) {
        throw new HttpMessageNotWritableException("Could not write JSON: " + ex.getMessage(), ex);
    }
}
项目:hadoop    文件:JsonObjectMapperWriter.java   
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  mapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);

  // define a module
  SimpleModule module = new SimpleModule("Default Serializer",  
                                         new Version(0, 1, 1, "FINAL"));
  // add various serializers to the module
  //   add default (all-pass) serializer for all rumen specific data types
  module.addSerializer(DataType.class, new DefaultRumenSerializer());
  //   add a serializer to use object.toString() while serializing
  module.addSerializer(ID.class, new ObjectStringSerializer<ID>());

  // register the module with the object-mapper
  mapper.registerModule(module);

  mapper.getJsonFactory();
  writer = mapper.getJsonFactory().createJsonGenerator(
      output, JsonEncoding.UTF8);
  if (prettyPrint) {
    writer.useDefaultPrettyPrinter();
  }
}
项目:hadoop    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:hadoop    文件:Anonymizer.java   
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();

  return outGen;
}
项目:hadoop    文件:TestHistograms.java   
public static void main(String[] args) throws IOException {
  final Configuration conf = new Configuration();
  final FileSystem lfs = FileSystem.getLocal(conf);

  for (String arg : args) {
    Path filePath = new Path(arg).makeQualified(lfs);
    String fileName = filePath.getName();
    if (fileName.startsWith("input")) {
      LoggedDiscreteCDF newResult = histogramFileToCDF(filePath, lfs);
      String testName = fileName.substring("input".length());
      Path goldFilePath = new Path(filePath.getParent(), "gold"+testName);

      ObjectMapper mapper = new ObjectMapper();
      JsonFactory factory = mapper.getJsonFactory();
      FSDataOutputStream ostream = lfs.create(goldFilePath, true);
      JsonGenerator gen = factory.createJsonGenerator(ostream,
          JsonEncoding.UTF8);
      gen.useDefaultPrettyPrinter();

      gen.writeObject(newResult);

      gen.close();
    } else {
      System.err.println("Input file not started with \"input\". File "+fileName+" skipped.");
    }
  }
}
项目:hadoop    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:aliyun-oss-hadoop-fs    文件:JsonObjectMapperWriter.java   
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  mapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);

  // define a module
  SimpleModule module = new SimpleModule("Default Serializer",  
                                         new Version(0, 1, 1, "FINAL"));
  // add various serializers to the module
  //   add default (all-pass) serializer for all rumen specific data types
  module.addSerializer(DataType.class, new DefaultRumenSerializer());
  //   add a serializer to use object.toString() while serializing
  module.addSerializer(ID.class, new ObjectStringSerializer<ID>());

  // register the module with the object-mapper
  mapper.registerModule(module);

  mapper.getJsonFactory();
  writer = mapper.getJsonFactory().createJsonGenerator(
      output, JsonEncoding.UTF8);
  if (prettyPrint) {
    writer.useDefaultPrettyPrinter();
  }
}
项目:aliyun-oss-hadoop-fs    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:aliyun-oss-hadoop-fs    文件:Anonymizer.java   
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();

  return outGen;
}
项目:aliyun-oss-hadoop-fs    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:nenlvse1.0    文件:CustomMappingJacksonJsonpView.java   
protected JsonEncoding getJsonEncoding(MediaType contentType) {
    if(contentType != null && contentType.getCharSet() != null) {
        Charset charset = contentType.getCharSet();
        JsonEncoding[] var3 = JsonEncoding.values();
        int var4 = var3.length;

        for(int var5 = 0; var5 < var4; ++var5) {
            JsonEncoding encoding = var3[var5];
            if(charset.name().equals(encoding.getJavaName())) {
                return encoding;
            }
        }
    }

    return JsonEncoding.UTF8;
}
项目:nenlvse1.0    文件:CustomMappingJacksonJsonpView.java   
protected JsonEncoding getJsonEncoding(MediaType contentType) {
    if(contentType != null && contentType.getCharSet() != null) {
        Charset charset = contentType.getCharSet();
        JsonEncoding[] var3 = JsonEncoding.values();
        int var4 = var3.length;

        for(int var5 = 0; var5 < var4; ++var5) {
            JsonEncoding encoding = var3[var5];
            if(charset.name().equals(encoding.getJavaName())) {
                return encoding;
            }
        }
    }

    return JsonEncoding.UTF8;
}
项目:big-c    文件:JsonObjectMapperWriter.java   
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  mapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);

  // define a module
  SimpleModule module = new SimpleModule("Default Serializer",  
                                         new Version(0, 1, 1, "FINAL"));
  // add various serializers to the module
  //   add default (all-pass) serializer for all rumen specific data types
  module.addSerializer(DataType.class, new DefaultRumenSerializer());
  //   add a serializer to use object.toString() while serializing
  module.addSerializer(ID.class, new ObjectStringSerializer<ID>());

  // register the module with the object-mapper
  mapper.registerModule(module);

  mapper.getJsonFactory();
  writer = mapper.getJsonFactory().createJsonGenerator(
      output, JsonEncoding.UTF8);
  if (prettyPrint) {
    writer.useDefaultPrettyPrinter();
  }
}
项目:big-c    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:big-c    文件:Anonymizer.java   
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();

  return outGen;
}
项目:big-c    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:hadoop-2.6.0-cdh5.4.3    文件:JsonObjectMapperWriter.java   
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  mapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);

  // define a module
  SimpleModule module = new SimpleModule("Default Serializer",  
                                         new Version(0, 1, 1, "FINAL"));
  // add various serializers to the module
  //   add default (all-pass) serializer for all rumen specific data types
  module.addSerializer(DataType.class, new DefaultRumenSerializer());
  //   add a serializer to use object.toString() while serializing
  module.addSerializer(ID.class, new ObjectStringSerializer<ID>());

  // register the module with the object-mapper
  mapper.registerModule(module);

  mapper.getJsonFactory();
  writer = mapper.getJsonFactory().createJsonGenerator(
      output, JsonEncoding.UTF8);
  if (prettyPrint) {
    writer.useDefaultPrettyPrinter();
  }
}
项目:hadoop-2.6.0-cdh5.4.3    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:hadoop-2.6.0-cdh5.4.3    文件:Anonymizer.java   
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();

  return outGen;
}
项目:hadoop-2.6.0-cdh5.4.3    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:hadoop-EAR    文件:CoronaSerializer.java   
/**
 * This is a helper method which creates a JsonGenerator instance, for writing
 * the state of the ClusterManager to the state file. The JsonGenerator
 * instance writes to a compressed file if we have the compression flag
 * turned on.
 *
 * @param conf The CoronaConf instance to be used
 * @return The JsonGenerator instance to be used
 * @throws IOException
 */
public static JsonGenerator createJsonGenerator(CoronaConf conf)
  throws IOException {
  OutputStream outputStream = new FileOutputStream(conf.getCMStateFile());
  if (conf.getCMCompressStateFlag()) {
    outputStream = new GZIPOutputStream(outputStream);
  }
  ObjectMapper mapper = new ObjectMapper();
  JsonGenerator jsonGenerator =
    new JsonFactory().createJsonGenerator(outputStream, JsonEncoding.UTF8);
  jsonGenerator.setCodec(mapper);
  if (!conf.getCMCompressStateFlag()) {
    jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter());
  }
  return jsonGenerator;
}
项目:hadoop-EAR    文件:JsonUtils.java   
/**
 * This is a helper method which creates a JsonGenerator instance, for writing
 * the state of the ClusterManager to the state file. The JsonGenerator
 * instance writes to a compressed file if we have the compression flag
 * turned on.
 *
 * @param conf The CoronaConf instance to be used
 * @return The JsonGenerator instance to be used
 * @throws IOException
 */
public static JsonGenerator createJsonGenerator(CoronaConf conf)
  throws IOException {
  OutputStream outputStream = new FileOutputStream(conf.getCMStateFile());
  if (conf.getCMCompressStateFlag()) {
    outputStream = new GZIPOutputStream(outputStream);
  }
  ObjectMapper mapper = new ObjectMapper();
  JsonGenerator jsonGenerator =
    new JsonFactory().createJsonGenerator(outputStream, JsonEncoding.UTF8);
  jsonGenerator.setCodec(mapper);
  if (!conf.getCMCompressStateFlag()) {
    jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter());
  }
  return jsonGenerator;
}
项目:teiid-webui    文件:DataVirtUploadServlet.java   
/**
 * Writes the response values back to the http response.  This allows the calling code to
 * parse the response values for display to the user.
 *
 * @param responseMap the response params to write to the http response
 * @param response the http response
 * @throws IOException
 */
private static void writeToResponse(Map<String, String> responseMap, HttpServletResponse response) throws IOException {
       // Note: setting the content-type to text/html because otherwise IE prompt the user to download
       // the result rather than handing it off to the GWT form response handler.
       // See JIRA issue https://issues.jboss.org/browse/SRAMPUI-103
    response.setContentType("text/html; charset=UTF8"); //$NON-NLS-1$
       JsonFactory f = new JsonFactory();
       JsonGenerator g = f.createJsonGenerator(response.getOutputStream(), JsonEncoding.UTF8);
       g.useDefaultPrettyPrinter();
       g.writeStartObject();
       for (java.util.Map.Entry<String, String> entry : responseMap.entrySet()) {
           String key = entry.getKey();
           String val = entry.getValue();
           g.writeStringField(key, val);
       }
       g.writeEndObject();
       g.flush();
       g.close();
}
项目:hadoop-plus    文件:JsonObjectMapperWriter.java   
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  mapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);

  // define a module
  SimpleModule module = new SimpleModule("Default Serializer",  
                                         new Version(0, 1, 1, "FINAL"));
  // add various serializers to the module
  //   add default (all-pass) serializer for all rumen specific data types
  module.addSerializer(DataType.class, new DefaultRumenSerializer());
  //   add a serializer to use object.toString() while serializing
  module.addSerializer(ID.class, new ObjectStringSerializer<ID>());

  // register the module with the object-mapper
  mapper.registerModule(module);

  mapper.getJsonFactory();
  writer = mapper.getJsonFactory().createJsonGenerator(
      output, JsonEncoding.UTF8);
  if (prettyPrint) {
    writer.useDefaultPrettyPrinter();
  }
}
项目:hadoop-plus    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:hadoop-plus    文件:Anonymizer.java   
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();

  return outGen;
}
项目:hadoop-plus    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  fileReader =
    DataFileReader.openReader(new File(status.getPath().toUri()), reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:ShoppingMall    文件:MappingJacksonHttpMessageConverter.java   
@Override
protected void writeInternal(Object object, HttpOutputMessage outputMessage)
        throws IOException, HttpMessageNotWritableException {

    JsonEncoding encoding = getJsonEncoding(outputMessage.getHeaders().getContentType());
    JsonGenerator jsonGenerator =
            this.objectMapper.getJsonFactory().createJsonGenerator(outputMessage.getBody(), encoding);
    try {
        if (this.prefixJson) {
            jsonGenerator.writeRaw("{} && ");
        }
        this.objectMapper.writeValue(jsonGenerator, object);
    }
    catch (IOException ex) {
        throw new HttpMessageNotWritableException("Could not write JSON: " + ex.getMessage(), ex);
    }
}
项目:BaijiSerializer4J    文件:SpecificJsonWriter.java   
/**
 * The only public write interface
 * @param schema the object schema
 * @param obj the object
 * @param os the final output stream
 */
public void write(Schema schema, T obj, OutputStream os) {
    if (schema instanceof RecordSchema) {
        if (os != null) {
            try {
                RecordSchema recordSchema = (RecordSchema) schema;
                JsonGenerator g = FACTORY.createJsonGenerator(os, JsonEncoding.UTF8);
                writeRecord(recordSchema, obj, g);
                g.flush();
            } catch (IOException e) {
                throw new BaijiRuntimeException("Serialize process failed.", e);
            }
        } else {
            throw new BaijiRuntimeException("Output stream can't be null");
        }
    } else {
        throw new BaijiRuntimeException("schema must be RecordSchema");
    }
}
项目:appverse-server    文件:CustomMappingJacksonHttpMessageConverter.java   
@Override
protected void writeInternal(Object o, HttpOutputMessage outputMessage)
        throws IOException, HttpMessageNotWritableException {
    JsonEncoding encoding = getEncoding(outputMessage.getHeaders()
            .getContentType());
    JsonGenerator jsonGenerator = this.objectMapper.getJsonFactory()
            .createJsonGenerator(outputMessage.getBody(), encoding);
    if (this.prefixJson) {
        jsonGenerator.writeRaw("{} && ");
    }
    this.objectMapper.writeValue(jsonGenerator, o);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    JsonGenerator baosjsonGenerator = this.objectMapper.getJsonFactory()
            .createJsonGenerator(baos, encoding);
    this.objectMapper.writeValue(baosjsonGenerator, o);
    logger.debug("Middleware returns " + o.getClass().getName() + " "
            + baos.toString());
}
项目:hops    文件:JsonObjectMapperWriter.java   
public JsonObjectMapperWriter(OutputStream output, boolean prettyPrint) throws IOException {
  ObjectMapper mapper = new ObjectMapper();
  mapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);

  // define a module
  SimpleModule module = new SimpleModule("Default Serializer",  
                                         new Version(0, 1, 1, "FINAL"));
  // add various serializers to the module
  //   add default (all-pass) serializer for all rumen specific data types
  module.addSerializer(DataType.class, new DefaultRumenSerializer());
  //   add a serializer to use object.toString() while serializing
  module.addSerializer(ID.class, new ObjectStringSerializer<ID>());

  // register the module with the object-mapper
  mapper.registerModule(module);

  mapper.getJsonFactory();
  writer = mapper.getJsonFactory().createJsonGenerator(
      output, JsonEncoding.UTF8);
  if (prettyPrint) {
    writer.useDefaultPrettyPrinter();
  }
}
项目:hops    文件:StatePool.java   
private void write(DataOutput out) throws IOException {
  // This is just a JSON experiment
  System.out.println("Dumping the StatePool's in JSON format.");
  ObjectMapper outMapper = new ObjectMapper();
  outMapper.configure(
      SerializationConfig.Feature.CAN_OVERRIDE_ACCESS_MODIFIERS, true);
  // define a module
  SimpleModule module = new SimpleModule("State Serializer",  
      new Version(0, 1, 1, "FINAL"));
  // add the state serializer
  //module.addSerializer(State.class, new StateSerializer());

  // register the module with the object-mapper
  outMapper.registerModule(module);

  JsonFactory outFactory = outMapper.getJsonFactory();
  JsonGenerator jGen = 
    outFactory.createJsonGenerator((DataOutputStream)out, JsonEncoding.UTF8);
  jGen.useDefaultPrettyPrinter();

  jGen.writeObject(this);
  jGen.close();
}
项目:hops    文件:Anonymizer.java   
private JsonGenerator createJsonGenerator(Configuration conf, Path path) 
throws IOException {
  FileSystem outFS = path.getFileSystem(conf);
  CompressionCodec codec =
    new CompressionCodecFactory(conf).getCodec(path);
  OutputStream output;
  Compressor compressor = null;
  if (codec != null) {
    compressor = CodecPool.getCompressor(codec);
    output = codec.createOutputStream(outFS.create(path), compressor);
  } else {
    output = outFS.create(path);
  }

  JsonGenerator outGen = outFactory.createJsonGenerator(output, 
                                                        JsonEncoding.UTF8);
  outGen.useDefaultPrettyPrinter();

  return outGen;
}
项目:hops    文件:Display.java   
public AvroFileInputStream(FileStatus status) throws IOException {
  pos = 0;
  buffer = new byte[0];
  GenericDatumReader<Object> reader = new GenericDatumReader<Object>();
  FileContext fc = FileContext.getFileContext(new Configuration());
  fileReader =
    DataFileReader.openReader(new AvroFSInput(fc, status.getPath()),reader);
  Schema schema = fileReader.getSchema();
  writer = new GenericDatumWriter<Object>(schema);
  output = new ByteArrayOutputStream();
  JsonGenerator generator =
    new JsonFactory().createJsonGenerator(output, JsonEncoding.UTF8);
  MinimalPrettyPrinter prettyPrinter = new MinimalPrettyPrinter();
  prettyPrinter.setRootValueSeparator(System.getProperty("line.separator"));
  generator.setPrettyPrinter(prettyPrinter);
  encoder = EncoderFactory.get().jsonEncoder(schema, generator);
}
项目:FoxBPM    文件:TaskCommandDefinitionsGenerator.java   
public void generate(ZipOutputStream out) {
    try {
        log.debug("开始处理taskCommandDefinition.data文件。。。");
        ProcessEngineConfigurationImpl processEngineConfigurationImpl = FoxBpmUtil.getProcessEngine().getProcessEngineConfiguration();
        List<TaskCommandDefinition> list = processEngineConfigurationImpl.getTaskCommandDefinitions();
        ObjectMapper objectMapper = new ObjectMapper();
        JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
        String tmpEntryName = "cache/taskCommandDefinition.data";
        ZipEntry zipEntry = new ZipEntry(tmpEntryName);
        zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式
        out.putNextEntry(zipEntry);
        jsonGenerator.writeObject(list);
        out.closeEntry();
        log.debug("处理taskCommandDefinition.data文件完毕");
    } catch (Exception ex) {
        log.error("解析taskCommandDefinition.data文件失败!生成zip文件失败!");
        throw new FoxBPMException("解析taskCommandDefinition.data文件失败", ex);
    }
}
项目:FoxBPM    文件:GroupDefinitionsCacheGenerator.java   
public void generate(ZipOutputStream out) {
    log.debug("开始处理GroupDefinitions.data...");
    try{
        List<GroupDefinition> groupDefinitions = FoxBpmUtil.getProcessEngine().getIdentityService().getAllGroupDefinitions();
        Map<String,Object> resultMap = new HashMap<String, Object>();
        resultMap.put("data", groupDefinitions);
        ObjectMapper objectMapper = new ObjectMapper();
        JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
        String tmpEntryName = "cache/allGroupDefinitions.data";
        ZipEntry zipEntry = new ZipEntry(tmpEntryName);
        zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式
        out.putNextEntry(zipEntry);
        jsonGenerator.writeObject(resultMap);
        out.closeEntry();
        log.debug("处理GroupDefinitions.data文件完毕");
    }catch(Exception ex){
        log.error("解析GroupDefinitions.data文件失败!生成zip文件失败!");
        throw new FoxBPMException("解析GroupDefinitions.data文件失败",ex);
    }
}
项目:FoxBPM    文件:DataObjectCacheGenerator.java   
public void generate(ZipOutputStream out) {
    log.debug("开始处理bizData.data...");
    try{
        List<Map<String,Object>> list = FoxBpmUtil.getProcessEngine().getModelService().getAllBizObjects();
        ObjectMapper objectMapper = new ObjectMapper();
        JsonGenerator jsonGenerator = objectMapper.getJsonFactory().createJsonGenerator(out, JsonEncoding.UTF8);
        String tmpEntryName = "cache/bizData.data";
        ZipEntry zipEntry = new ZipEntry(tmpEntryName);
        zipEntry.setMethod(ZipEntry.DEFLATED);// 设置条目的压缩方式
        out.putNextEntry(zipEntry);
        jsonGenerator.writeObject(list);
        out.closeEntry();
        log.debug("处理bizData.data文件完毕");
    }catch(Exception ex){
        log.error("解析bizData.data文件失败!生成zip文件失败!");
        throw new FoxBPMException("解析bizData.data文件失败",ex);
    }
}
项目:JsonResponse    文件:JsonResponseAwareJsonMessageConverter.java   
protected void writeJson(ResponseWrapper response, HttpOutputMessage outputMessage)
        throws IOException, HttpMessageNotWritableException {

    JsonEncoding encoding = getJsonEncoding(outputMessage.getHeaders().getContentType());

    ObjectMapper mapper = new ObjectMapper();

    //Add support for jackson mixins
    JsonMixin[] jsonMixins = response.getJsonResponse().mixins();
    for(int i=0;i<jsonMixins.length;i++) {
        mapper.getSerializationConfig()
            .addMixInAnnotations(jsonMixins[i].target(), jsonMixins[i].mixin());
    }

    JsonGenerator jsonGenerator =
            mapper.getJsonFactory().createJsonGenerator(outputMessage.getBody(), encoding);
    try {
        mapper.writeValue(jsonGenerator, response.getOriginalResponse());
    }
    catch (IOException ex) {
        throw new HttpMessageNotWritableException("Could not write JSON: " + ex.getMessage(), ex);
    }
}
项目:baiji4j    文件:SpecificJsonWriter.java   
/**
 * The only public write interface
 * @param schema the object schema
 * @param obj the object
 * @param os the final output stream
 */
public void write(Schema schema, T obj, OutputStream os) {
    if (schema instanceof RecordSchema) {
        if (os != null) {
            try {
                RecordSchema recordSchema = (RecordSchema) schema;
                JsonGenerator g = FACTORY.createJsonGenerator(os, JsonEncoding.UTF8);
                writeRecord(recordSchema, obj, g);
                g.flush();
            } catch (IOException e) {
                throw new BaijiRuntimeException("Serialize process failed.", e);
            }
        } else {
            throw new BaijiRuntimeException("Output stream can't be null");
        }
    } else {
        throw new BaijiRuntimeException("schema must be RecordSchema");
    }
}