private int traceBeforeNegotiate() { int beforeNumSubjectCreds = 0; // Traces all credentials too. if (subject != null) { log.debug("[" + getName() + "] AUTH_NEGOTIATE as subject " + subject.toString()); beforeNumSubjectCreds = subject.getPrivateCredentials().size(); } if (negotiationToken != null && negotiationToken.length > 0) { try { OutputStream os = new ByteArrayOutputStream(); HexDump.dump(negotiationToken, 0, os, 0); log.debug("[" + getName() + "] AUTH_NEGOTIATE Process token from acceptor==>\n" + os.toString()); } catch (IOException e) {} } return beforeNumSubjectCreds; }
private void traceAfterNegotiate( int beforeNumSubjectCreds ) { if (subject != null) { int afterNumSubjectCreds = subject.getPrivateCredentials().size(); if (afterNumSubjectCreds > beforeNumSubjectCreds) { log.debug("[" + getName() + "] AUTH_NEGOTIATE have extra credentials."); // Traces all credentials too. log.debug("[" + getName() + "] AUTH_NEGOTIATE updated subject=" + subject.toString()); } } if (negotiationToken != null && negotiationToken.length > 0) { try { OutputStream os = new ByteArrayOutputStream(); HexDump.dump(negotiationToken, 0, os, 0); log.debug("[" + getName() + "] AUTH_NEGOTIATE Send token to acceptor==>\n" + os.toString()); } catch (IOException e) {} } }
private static <T> T unmarshal(Class<T> clazz, byte[] in) throws IOException { if (LOGGER.isLoggable(Level.FINEST)) { System.out.println(clazz.toString() + ";"); HexDump.dump(in, 0, System.out, 0); } Codec<T> codec = Codecs.create(clazz); T gs = null; byte[] readback = null; try { gs = Codecs.decode(codec, in); } catch (DecodingException ex) { ex.printStackTrace(); } //try { // readback = Codecs.encode(gs, codec); // HexDump.dump(readback, 0, System.out, 0); //} catch (Exception ex) { // ex.printStackTrace(); //} return gs; }
@Test public void testGetLong() throws IOException { byte[] array = new byte[] { (byte)0x91, (byte)0xE9, 0x1D, (byte)0x98, 0x39, 0x01, 0x00, 0x00 }; HexDump.dump(array, 0, System.out, 0); int offset = 0; // System.out.println(((long) (array[offset + 7] & 0xff) << 56)); // System.out.println(((long) (array[offset + 6] & 0xff) << 48)); // System.out.println(((long) (array[offset + 5] & 0xff) << 40)); // System.out.println(((long) (array[offset + 4] & 0xff) << 32)); // System.out.println(((long) (array[offset + 3] & 0xff) << 24)); // System.out.println(((long) (array[offset + 2] & 0xff) << 16)); // System.out.println(((long) (array[offset + 1] & 0xff) << 8)); // System.out.println(((long) (array[offset + 0] & 0xff) << 0)); // System.out.println(((long) (array[offset + 5] & 0xff) << 40) | // ((long) (array[offset + 4] & 0xff) << 32)); // System.out.println(BlockHeader.getLong(array, offset)); assertEquals(1346876860817L, LittleEndian.getLong(array, offset)); }
public static void printJavaSerializeBinarySize(Object target) throws Exception{ try { byte[] bytes = MinimumMarshaller.marshal(target); System.out.println(target.getClass().getSimpleName() + " binaly size is " + bytes.length); ByteArrayOutputStream os = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, os, 0); System.out.println(os.toString()); Object o = MinimumMarshaller.unmarshal(bytes); // Verify correct unmarshalling among before and after if( ! o.equals(target) ) { throw new RuntimeException("Different! " + target ); } } finally { } }
public static <T> T testAndPrintHex(T target) throws Exception{ try { byte[] bytes = MinimumMarshaller.marshal(target); System.out.println(target.getClass().getSimpleName() + " binary size is " + bytes.length); ByteArrayOutputStream os = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, os, 0); System.out.println(os.toString()); System.out.println(""); return (T) MinimumMarshaller.unmarshal(bytes); } finally { } }
@Override public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution) throws IOException { logger.info("Sending headers: " + request.getHeaders()); if (body.length > 0) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); HexDump.dump(body, 0, baos, 0); logger.info("Sending to [{}]: \n{}", request.getURI(), baos.toString(Charsets.UTF_8.name()).trim()); } else { logger.info("Sending empty body to [{}]!", request.getURI()); } return execution.execute(request, body); }
public static String dumpEvent(Event event, int maxBytes) { StringBuilder buffer = new StringBuilder(); if (event == null || event.getBody() == null) { buffer.append("null"); } else if (event.getBody().length == 0) { // do nothing... in this case, HexDump.dump() will throw an exception } else { byte[] body = event.getBody(); byte[] data = Arrays.copyOf(body, Math.min(body.length, maxBytes)); ByteArrayOutputStream out = new ByteArrayOutputStream(); try { HexDump.dump(data, 0, out, 0); String hexDump = new String(out.toByteArray()); // remove offset since it's not relevant for such a small dataset if (hexDump.startsWith(HEXDUMP_OFFSET)) { hexDump = hexDump.substring(HEXDUMP_OFFSET.length()); } buffer.append(hexDump); } catch (Exception e) { if (LOGGER.isInfoEnabled()) { LOGGER.info("Exception while dumping event", e); } buffer.append("...Exception while dumping: ").append(e.getMessage()); } String result = buffer.toString(); if (result.endsWith(EOL) && buffer.length() > EOL.length()) { buffer.delete(buffer.length() - EOL.length(), buffer.length()).toString(); } } return "{ headers:" + event.getHeaders() + " body:" + buffer + " }"; }
public static <T extends TLObject> void dump(T object, byte[] serialized) { try { String path = getFilePath(object.getClass()); FileUtils.writeStringToFile(new File(dumpDir + path + ".json"), toJson(object), Charset.forName("UTF-8")); FileUtils.writeStringToFile(new File(dumpDir + path + ".dump"), StreamUtils.toHexString(serialized), Charset.forName("UTF-8")); HexDump.dump(serialized, 0, new FileOutputStream(dumpDir + path + ".dump2"), 0); // More friendly dump } catch (IOException e) { e.printStackTrace(); } }
/** * Interprets the message to create a string representation * * @param message * The message to interpret * @param displayHex * Whether to display BytesMessages in hexdump style, ignored for simple text messages * @return String representation of the message */ private String interpretMessage(Message message, boolean displayHex) throws IOException { byte[] msgData = message.getData(); ByteArrayOutputStream out = new ByteArrayOutputStream(); if (!displayHex) { return new String(msgData); } else { HexDump.dump(msgData, 0, out, 0); return new String(out.toByteArray()); } }
public static String hexDump(byte[] bytes) { ByteArrayOutputStream buf = new ByteArrayOutputStream(); try { HexDump.dump(bytes, 0, buf, 0); return buf.toString(); } catch (Exception x) { } return ""; }
public static final void dump(byte[] packet) { Logger logger = Logger.getLogger("MySQL.Packet"); if (!logger.isTraceEnabled()) return; try { ByteArrayOutputStream out = new ByteArrayOutputStream(); HexDump.dump(packet, 0, out, 0); logger.trace("Dumping packet\n"+out.toString()); } catch (IOException e) { return; } }
public static final void dump_stderr(byte[] packet) { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); HexDump.dump(packet, 0, out, 0); System.err.println("Dumping packet\n"+out.toString()); } catch (IOException e) { return; } }
public static Pair<Long, Long> readStartPos(CloseableHttpClient client) throws IOException { log.info("Reading header from " + INDEX_URL); HttpGet httpGet = new HttpGet(INDEX_URL); try (CloseableHttpResponse response = client.execute(httpGet)) { HttpEntity entity = Utils.checkAndFetch(response, INDEX_URL); try (InputStream stream = entity.getContent()) { try { // try with the first few bytes initially byte[] header = new byte[HEADER_BLOCK_SIZE]; IOUtils.read(stream, header); HexDump.dump(header, 0, System.out, 0); long blockSize = LittleEndian.getUInt(header, 0); long indexBlockCount = LittleEndian.getUInt(header, 4); log.info("Header: blockSize " + blockSize + ", indexBlockCount: " + indexBlockCount); return ImmutablePair.of(blockSize, HEADER_BLOCK_SIZE + (blockSize * indexBlockCount)); } finally { // always abort reading here inside the finally block of the InputStream as // otherwise HttpClient tries to read the stream fully, which is at least 270GB... httpGet.abort(); } } } }
public static void main(String... args) throws Exception { try (final ZMQ.Context context = ZMQ.context(1)) { try (final ZMQ.Socket socket = context.socket(ZMQ.REP)) { socket.bind("tcp://127.0.0.1:5555"); while (!Thread.currentThread ().isInterrupted()) { byte[] request = socket.recv(0); HexDump.dump(request, 0, System.out, 0); String response = "World"; socket.send(response.getBytes(), 0); Thread.sleep(1000); // Do some 'work' } } } }
private void logBytes(String type, ByteBuf buf, ChannelHandlerContext ctx) throws IOException { if (buf.readableBytes() > 0) { try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) { byte[] bytes = new byte[buf.readableBytes()]; buf.getBytes(0, bytes, 0, bytes.length); HexDump.dump(bytes, 0, stream, 0); stream.flush(); logger.trace(String.format("%s %s [%s]:%n%s%n", type, buf, ctx.channel().remoteAddress().toString(), stream.toString(StandardCharsets.UTF_8.name()))); } } }
private void traceData(String msg, byte[] b, ChannelHandlerContext ctx) throws Exception { if (logger.isTraceEnabled() && b.length > 0) { try (ByteArrayOutputStream stream = new ByteArrayOutputStream()) { HexDump.dump(b, 0, stream, 0); stream.flush(); logger.trace(String.format("%s [%s]:%n%s%n", msg, ctx.channel().remoteAddress().toString(), stream.toString(StandardCharsets.UTF_8.name()))); } } }
@Override public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution) throws IOException { if (body.length > 0) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); HexDump.dump(body, 0, baos, 0); logger.info("Sending to [{}]: \n{}", request.getURI(), baos.toString(Charsets.UTF_8.name()).trim()); } else { logger.info("Sending empty body to [{}]!", request.getURI()); } return execution.execute(request, body); }
public static void main(String[] args) throws Exception { // This is how a float is encoded to be used by Avro. System.out.println("\n> 3.5f encoded in Avro:"); byte[] number = AvroUtils.convetIntToByteArrayInLE(Float.floatToIntBits(MyConstants.MY_FLOAT)); System.out.println(String.format("0x%s", new String(Hex.encodeHex(number)))); // This is how a double is encoded to be used by Avro. System.out.println("\n> 20.5d encoded in Avro:"); number = AvroUtils.convetLongToByteArrayInLE(Double.doubleToLongBits(MyConstants.MY_DOUBLE)); System.out.println(String.format("0x%s", new String(Hex.encodeHex(number)))); // Create a sample record to use in our examples. MyRecord myRecord = AvroUtils.createMyRecord(); // We can print using the field position in the schema. System.out.println("\n> Getting fields using their position:"); System.out.println(myRecord.get(0)); System.out.println(myRecord.get(1)); // Using the field name in the schema. System.out.println("\n> Getting fields using their name:"); System.out.println(myRecord.get("MyLong")); System.out.println(myRecord.get("MyString")); // Or using the Java property. System.out.println("\n> Getting fields using their Java property:"); System.out.println(myRecord.getMyLong()); System.out.println(myRecord.getMyString()); // This way we can extract the schema. Note that this string is not exactly the same as the one defined in the schema file. System.out.println("\n> Getting the field schema:"); System.out.println(myRecord.getSchema()); // Extract the byte representation of this single object. There is no schema in this format. System.out.println("\n> Dumping the bytes in the single object format:"); ByteBuffer buffer = myRecord.toByteBuffer(); HexDump.dump(buffer.array(), 0, System.out, 0); // Create a MyRecord from the previous byte buffer and dump some info. System.out.println("\n> Getting the object from the single object bytes:"); MyRecord myRecord2 = MyRecord.fromByteBuffer(buffer); System.out.println(myRecord2.getMyLong()); System.out.println(myRecord2.getMyString()); // This is how the schema CRC is calculated. System.out.println("\n> Schema CRC in LE"); long l = SchemaNormalization.parsingFingerprint64(myRecord.getSchema()); HexDump.dump(AvroUtils.convetLongToByteArrayInLE(l), 0, System.out, 0); System.out.println("\n> 20 in ZigZag"); int i = AvroUtils.convertToZigZag(20); HexDump.dump(AvroUtils.convetIntToByteArrayInLE(i), 0, System.out, 0); System.out.println("\n> 30 in ZigZag"); i = AvroUtils.convertToZigZag(30); HexDump.dump(AvroUtils.convetIntToByteArrayInLE(i), 0, System.out, 0); System.out.println("\n> 23 in ZigZag"); i = AvroUtils.convertToZigZag(23); HexDump.dump(AvroUtils.convetIntToByteArrayInLE(i), 0, System.out, 0); }
/** * 显示RDB字节内容以及格式化 */ @Test public void showRDBDumpData() throws Exception { HexDump.dump(rdbData, 0, System.out, 0); }
@Test public void testOldToNew() throws Exception { // HandlerRegistryの内容を強制的に書き換えて、別のクラス定義で読み込む。 // MinimumMarshallerは、IDのみバイナリに書き込む為、HandlerRegistryの内容を書き換えれば別のクラスで復元する。 // テスト以外では禁止です(HandlerRegistryの内部マップはスレッドセーフでは有りません)。 Field f_handlerMap = HandlerRegistry.class.getDeclaredField("handlerMap"); f_handlerMap.setAccessible(true); ShortHMap handlerMap = (ShortHMap)f_handlerMap.get(null); Field f_classIdRMap = HandlerRegistry.class.getDeclaredField("classIdRMap"); f_classIdRMap.setAccessible(true); Map<Class<?>, Short> classIdRMap = (Map<Class<?>, Short>)f_classIdRMap.get(null); Field f_classMap = HandlerRegistry.class.getDeclaredField("classMap"); f_classMap.setAccessible(true); ShortCMap classMap = (ShortCMap)f_classMap.get(null); // クラスマッピングの準備 handlerMap.put(classId , new ObjectHandler()); classIdRMap.put(OldClassModifyBean.class, classId); classMap.put(classId, OldClassModifyBean.class); OldClassModifyBean old = new OldClassModifyBean(); old.setProp1("prop1prop1"); // 古いオブジェクトでマーシャル byte[] bytes = MinimumMarshaller.marshal(old); // System.out.println(old.getClass().getSimpleName() + " binary size is " + bytes.length); ByteArrayOutputStream os = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, os, 0); System.out.println(os.toString()); System.out.println(""); // マッピングの強制変更 classIdRMap.remove(OldClassModifyBean.class); classIdRMap.put(NewClassModifyBean.class, classId); classMap.put(classId, NewClassModifyBean.class); // 新しいクラスでアンマーシャル NewClassModifyBean newBean = (NewClassModifyBean) MinimumMarshaller.unmarshal(bytes); // 内容チェック Assert.assertEquals("prop1prop1", newBean.getProp1()); Assert.assertNull(newBean.getProp2()); // 後片付け handlerMap.remove(classId); classIdRMap.remove(NewClassModifyBean.class); classMap.remove(classId); }
@Test public void testNewToOld() throws Exception { // HandlerRegistryの内容を強制的に書き換えて、別のクラス定義で読み込む。 // MinimumMarshallerは、IDのみバイナリに書き込む為、HandlerRegistryの内容を書き換えれば別のクラスで復元する。 // テスト以外では禁止です(HandlerRegistryの内部マップはスレッドセーフでは有りません)。 Field f_handlerMap = HandlerRegistry.class.getDeclaredField("handlerMap"); f_handlerMap.setAccessible(true); ShortHMap handlerMap = (ShortHMap)f_handlerMap.get(null); Field f_classIdRMap = HandlerRegistry.class.getDeclaredField("classIdRMap"); f_classIdRMap.setAccessible(true); Map<Class<?>, Short> classIdRMap = (Map<Class<?>, Short>)f_classIdRMap.get(null); Field f_classMap = HandlerRegistry.class.getDeclaredField("classMap"); f_classMap.setAccessible(true); ShortCMap classMap = (ShortCMap)f_classMap.get(null); // クラスマッピングの準備 handlerMap.put(classId , new ObjectHandler()); classIdRMap.put(NewClassModifyBean.class, classId); classMap.put(classId, NewClassModifyBean.class); NewClassModifyBean newBean = new NewClassModifyBean(); newBean.setProp1("prop1prop1"); newBean.setProp2("prop2prop2"); // 新しいクラスでマーシャル byte[] bytes = MinimumMarshaller.marshal(newBean); // ログ System.out.println(newBean.getClass().getSimpleName() + " binary size is " + bytes.length); ByteArrayOutputStream os = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, os, 0); System.out.println(os.toString()); System.out.println(""); // マッピングの強制変更 classIdRMap.remove(NewClassModifyBean.class); classIdRMap.put(OldClassModifyBean.class, classId); classMap.put(classId, OldClassModifyBean.class); // 古いクラスでアンマーシャル OldClassModifyBean oldBean = (OldClassModifyBean) MinimumMarshaller.unmarshal(bytes); // 内容チェック Assert.assertEquals("prop1prop1", oldBean.getProp1()); // 後片付け handlerMap.remove(classId); classIdRMap.remove(OldClassModifyBean.class); classMap.remove(classId); }
@Test public void testNestOldToNew() throws Exception { // HandlerRegistryの内容を強制的に書き換えて、別のクラス定義で読み込む。 // MinimumMarshallerは、IDのみバイナリに書き込む為、HandlerRegistryの内容を書き換えれば別のクラスで復元する。 // テスト以外では禁止です(HandlerRegistryの内部マップはスレッドセーフでは有りません)。 Field f_handlerMap = HandlerRegistry.class.getDeclaredField("handlerMap"); f_handlerMap.setAccessible(true); ShortHMap handlerMap = (ShortHMap)f_handlerMap.get(null); Field f_classIdRMap = HandlerRegistry.class.getDeclaredField("classIdRMap"); f_classIdRMap.setAccessible(true); Map<Class<?>, Short> classIdRMap = (Map<Class<?>, Short>)f_classIdRMap.get(null); Field f_classMap = HandlerRegistry.class.getDeclaredField("classMap"); f_classMap.setAccessible(true); ShortCMap classMap = (ShortCMap)f_classMap.get(null); // クラスマッピングの準備 handlerMap.put(parentClassId , new ObjectHandler()); classIdRMap.put(ParentBean.class, parentClassId); classMap.put(parentClassId, ParentBean.class); handlerMap.put(classId , new ObjectHandler()); classIdRMap.put(OldClassModifyBean.class, classId); classMap.put(classId, OldClassModifyBean.class); ParentBean old = new ParentBean(); old.setProp1("prop1"); OldClassModifyBean oldNest = new OldClassModifyBean(); oldNest.setProp1("prop1prop1"); old.setProp2(oldNest); old.setProp3("prop3"); // 古いオブジェクトでマーシャル byte[] bytes = MinimumMarshaller.marshal(old); // System.out.println(old.getClass().getSimpleName() + " binary size is " + bytes.length); ByteArrayOutputStream os = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, os, 0); System.out.println(os.toString()); System.out.println(""); // マッピングの強制変更 classIdRMap.remove(OldClassModifyBean.class); classIdRMap.put(NewClassModifyBean.class, classId); classMap.put(classId, NewClassModifyBean.class); // 新しいクラスでアンマーシャル ParentBean newBean = (ParentBean) MinimumMarshaller.unmarshal(bytes); // 内容チェック Assert.assertEquals("prop1", newBean.getProp1()); NewClassModifyBean nestBean = (NewClassModifyBean)newBean.getProp2(); Assert.assertEquals("prop1prop1", nestBean.getProp1()); Assert.assertNull(nestBean.getProp2()); Assert.assertEquals("prop3", newBean.getProp3()); // 後片付け handlerMap.remove(parentClassId); classIdRMap.remove(ParentBean.class); classMap.remove(parentClassId); handlerMap.remove(classId); classIdRMap.remove(NewClassModifyBean.class); classMap.remove(classId); }
@Test public void testNestNewToOld() throws Exception { // HandlerRegistryの内容を強制的に書き換えて、別のクラス定義で読み込む。 // MinimumMarshallerは、IDのみバイナリに書き込む為、HandlerRegistryの内容を書き換えれば別のクラスで復元する。 // テスト以外では禁止です(HandlerRegistryの内部マップはスレッドセーフでは有りません)。 Field f_handlerMap = HandlerRegistry.class.getDeclaredField("handlerMap"); f_handlerMap.setAccessible(true); ShortHMap handlerMap = (ShortHMap)f_handlerMap.get(null); Field f_classIdRMap = HandlerRegistry.class.getDeclaredField("classIdRMap"); f_classIdRMap.setAccessible(true); Map<Class<?>, Short> classIdRMap = (Map<Class<?>, Short>)f_classIdRMap.get(null); Field f_classMap = HandlerRegistry.class.getDeclaredField("classMap"); f_classMap.setAccessible(true); ShortCMap classMap = (ShortCMap)f_classMap.get(null); // クラスマッピングの準備 handlerMap.put(parentClassId , new ObjectHandler()); classIdRMap.put(ParentBean.class, parentClassId); classMap.put(parentClassId, ParentBean.class); handlerMap.put(classId , new ObjectHandler()); classIdRMap.put(NewClassModifyBean.class, classId); classMap.put(classId, NewClassModifyBean.class); ParentBean newBean = new ParentBean(); newBean.setProp1("prop1"); NewClassModifyBean nestBean = new NewClassModifyBean(); nestBean.setProp1("prop1prop1"); nestBean.setProp2("prop2prop2"); newBean.setProp2(nestBean); newBean.setProp3("prop3"); // 新しいクラスでマーシャル byte[] bytes = MinimumMarshaller.marshal(newBean); // ログ System.out.println(newBean.getClass().getSimpleName() + " binary size is " + bytes.length); ByteArrayOutputStream os = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, os, 0); System.out.println(os.toString()); System.out.println(""); // マッピングの強制変更 classIdRMap.remove(NewClassModifyBean.class); classIdRMap.put(OldClassModifyBean.class, classId); classMap.put(classId, OldClassModifyBean.class); // 古いクラスでアンマーシャル ParentBean oldBean = (ParentBean) MinimumMarshaller.unmarshal(bytes); // 内容チェック Assert.assertEquals("prop1", oldBean.getProp1()); OldClassModifyBean nestOldBean = (OldClassModifyBean)oldBean.getProp2(); Assert.assertEquals("prop1prop1", nestOldBean.getProp1()); Assert.assertEquals("prop3", oldBean.getProp3()); // 後片付け handlerMap.remove(parentClassId); classIdRMap.remove(ParentBean.class); classMap.remove(parentClassId); handlerMap.remove(classId); classIdRMap.remove(OldClassModifyBean.class); classMap.remove(classId); }
private static void dumpBytes(byte[] bytes) throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); HexDump.dump(bytes, 0, baos, 0); logger.info("Serialized object to: \n{}", baos.toString(Charsets.UTF_8.name()).trim()); }
/** * Extracts the Type1 PFB file from the given AFP outline font. * @param file the AFP file to read from * @param targetDir the target directory where the PFB file is to be placed. * @throws IOException if an I/O error occurs */ public void extract(File file, File targetDir) throws IOException { InputStream in = new java.io.FileInputStream(file); try { MODCAParser parser = new MODCAParser(in); ByteArrayOutputStream baout = new ByteArrayOutputStream(); UnparsedStructuredField strucField; while ((strucField = parser.readNextStructuredField()) != null) { if (strucField.getSfTypeID() == 0xD3EE89) { byte[] sfData = strucField.getData(); println(strucField.toString()); HexDump.dump(sfData, 0, printStream, 0); baout.write(sfData); } } ByteArrayInputStream bin = new ByteArrayInputStream(baout.toByteArray()); DataInputStream din = new DataInputStream(bin); long len = din.readInt() & 0xFFFFFFFFL; println("Length: " + len); din.skip(4); //checksum int tidLen = din.readUnsignedShort() - 2; byte[] tid = new byte[tidLen]; din.readFully(tid); String filename = new String(tid, "ISO-8859-1"); int asciiCount1 = countUSAsciiCharacters(filename); String filenameEBCDIC = new String(tid, "Cp1146"); int asciiCount2 = countUSAsciiCharacters(filenameEBCDIC); println("TID: " + filename + " " + filenameEBCDIC); if (asciiCount2 > asciiCount1) { //Haven't found an indicator if the name is encoded in EBCDIC or not //so we use a trick. filename = filenameEBCDIC; } if (!filename.toLowerCase().endsWith(".pfb")) { filename = filename + ".pfb"; } println("Output filename: " + filename); File out = new File(targetDir, filename); OutputStream fout = new java.io.FileOutputStream(out); try { IOUtils.copyLarge(din, fout); } finally { IOUtils.closeQuietly(fout); } } finally { IOUtils.closeQuietly(in); } }
public static void main(String[] args) throws Exception { DatumWriter<MyRecord> dw = new SpecificDatumWriter<>(); dw.setSchema(MyRecord.getClassSchema()); ByteArrayOutputStream buf = new ByteArrayOutputStream(); DataFileWriter<MyRecord> fw = new DataFileWriter<>(dw); fw.create(MyRecord.getClassSchema(), buf, MyConstants.MY_SYNC); fw.append(AvroUtils.createMyRecord()); fw.append(AvroUtils.createMyRecord()); fw.fSync(); fw.append(AvroUtils.createMyRecord()); // Will it work? No, it doesn't work because it adds a signature and schema CRC in the beginning. //fw.appendEncoded(schema.AvroUtils.createMyRecord().toByteBuffer()); // Remove the first 10 bytes. They are the object header. byte[] arr = AvroUtils.createMyRecord().toByteBuffer().array(); arr = Arrays.copyOfRange(arr, MyConstants.HEADER_SIZE, arr.length); // Append the raw bytes. fw.appendEncoded(ByteBuffer.wrap(arr)); // Close the file. It will add a sync block. fw.close(); System.out.println("\n> Dump the file content."); HexDump.dump(buf.toByteArray(), 0, System.out, 0); Files.write(Paths.get(MyConstants.AVRO_FILE_PATH), buf.toByteArray(), StandardOpenOption.TRUNCATE_EXISTING); }
public static void main(String[] args) throws Exception { MyRecord myRecord = AvroUtils.createMyRecord(); DatumWriter<MyRecord> dw = new SpecificDatumWriter<>(); dw.setSchema(myRecord.getSchema()); ByteArrayOutputStream out = new ByteArrayOutputStream(); JsonEncoder json = EncoderFactory.get().jsonEncoder(myRecord.getSchema(), out); dw.write(myRecord, json); dw.write(myRecord, json); System.out.println("\n> Dump the two objects. They will be serialized in sequence as JSON."); HexDump.dump(out.toByteArray(), 0, System.out, 0); // Empty the buffer. out.reset(); BinaryEncoder bin = EncoderFactory.get().directBinaryEncoder(out, null); dw.write(myRecord, bin); dw.write(myRecord, bin); System.out.println("\n> Dump the two objects. They will be serialized in sequence as BINARY."); HexDump.dump(out.toByteArray(), 0, System.out, 0); }
private static void dumpToLog(MessageSerDe serDe, byte[] data) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); HexDump.dump(data, 0, baos, 0); logger.info("Serialized object using [{}] to: \n{}", serDe.getMessageFormatName(), baos.toString(Charsets.UTF_8.name()).trim()); }