@Override public int write(AudioInputStream stream, Type fileType, OutputStream out) throws IOException { Objects.requireNonNull(stream); Objects.requireNonNull(fileType); Objects.requireNonNull(out); checkFormat(fileType, stream); if (stream.getFormat().isBigEndian()) stream = toLittleEndian(stream); RIFFWriter writer = new RIFFWriter(new NoCloseOutputStream(out), "WAVE"); write(stream, writer); int fpointer = (int) writer.getFilePointer(); writer.close(); return fpointer; }
@Override public int write(AudioInputStream stream, Type fileType, File out) throws IOException { Objects.requireNonNull(stream); Objects.requireNonNull(fileType); Objects.requireNonNull(out); checkFormat(fileType, stream); if (stream.getFormat().isBigEndian()) stream = toLittleEndian(stream); RIFFWriter writer = new RIFFWriter(out, "WAVE"); write(stream, writer); int fpointer = (int) writer.getFilePointer(); writer.close(); return fpointer; }
@Override public Type[] getAudioFileTypes(AudioInputStream stream) { Type[] filetypes = new Type[types.length]; System.arraycopy(types, 0, filetypes, 0, types.length); // make sure we can write this stream AudioFormat format = stream.getFormat(); AudioFormat.Encoding encoding = format.getEncoding(); if (AudioFormat.Encoding.ALAW.equals(encoding) || AudioFormat.Encoding.ULAW.equals(encoding) || AudioFormat.Encoding.PCM_SIGNED.equals(encoding) || AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) || AudioFormat.Encoding.PCM_FLOAT.equals(encoding)) { return filetypes; } return new Type[0]; }
@Override public int write(AudioInputStream stream, Type fileType, OutputStream out) throws IOException { Objects.requireNonNull(stream); Objects.requireNonNull(fileType); Objects.requireNonNull(out); // we must know the total data length to calculate the file length //$$fb 2001-07-13: fix for bug 4351296: do not throw an exception //if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { // throw new IOException("stream length not specified"); //} // throws IllegalArgumentException if not supported AuFileFormat auFileFormat = (AuFileFormat)getAudioFileFormat(fileType, stream); return writeAuFile(stream, auFileFormat, out); }
@Override public void stop() throws RenderCommandException { byte[] bytes = new byte[buffer.size() * 2]; for(int i = buffer.size(); --i >= 0;) { int val = (int) (buffer.get(i) * Short.MAX_VALUE); bytes[i*2+0] = (byte) val; bytes[i*2+1] = (byte) (val >> 8); } AudioInputStream in = new AudioInputStream(new ByteArrayInputStream(bytes), new AudioFormat(sRate, 16, 1, true, false), buffer.size()); try(FileOutputStream out = new FileOutputStream(file)) { AudioSystem.write(in, Type.WAVE, out); } catch (IOException e) { throw new RenderCommandException(e); } super.stop(); }
MidiAudio(InputStream data) throws MidiUnavailableException { getSequencer(); setSequence(data); sequencer.addMetaEventListener(new MetaEventListener() { public void meta(MetaMessage msg) { if (msg.getType() == 47) { try { sequencer.setSequence(sequence); } catch (InvalidMidiDataException e) { ErrorHandler.alert(e); } sequencer.setTickPosition(0); if (loop) { // End of track sequencer.start(); } } } }); AudioFormat base = new AudioFormat(44100, 16, 2, true, false); format = new AudioFileFormat(new Type("MIDI", "mid"), base, (int) (base.getFrameRate() * (sequence.getMicrosecondLength() / 1000000 + 4))).getFormat(); }
@Override public byte[] glitchPixels(byte[] inputImageBytes) throws Exception { int audioBitRate = ((Integer) getPixelGlitchParameters().get("bitRateBlend")).intValue(); float bitRateBlend = (float) audioBitRate / 10; if(bitRateBlend < 0.1F || bitRateBlend > 0.9F) { return null; } BufferedImage inputImage = ImageUtil.getImageFromBytes(inputImageBytes); InputStream imageInputStream = new ByteArrayInputStream(inputImageBytes); AudioInputStream distortionAudioStream = new AudioInputStream(imageInputStream, new AudioFormat(AudioFormat.Encoding.ULAW, ThreadLocalRandom.current().nextInt(8000, 20000), 8, 5, 9, ThreadLocalRandom.current().nextInt(8000, 20000), true), inputImageBytes.length); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); AudioSystem.write(distortionAudioStream, Type.WAVE, outputStream); BufferedImage outputImage = new BufferedImage(inputImage.getWidth(), inputImage.getHeight(), BufferedImage.TYPE_4BYTE_ABGR); byte[] imageData = ((DataBufferByte) outputImage.getRaster().getDataBuffer()).getData(); System.arraycopy(outputStream.toByteArray(),0,imageData,0,outputStream.toByteArray().length); int[] abgrOffsets = {3, 2, 1, 0}; DataBuffer outputBuffer = new DataBufferByte(imageData, imageData.length); WritableRaster raster = Raster.createInterleavedRaster(outputBuffer, inputImage.getWidth(), inputImage.getHeight(), 4 * inputImage.getWidth(), 4, abgrOffsets, null); ColorModel colorModel = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_sRGB), true, false, Transparency.TRANSLUCENT, DataBuffer.TYPE_BYTE); BufferedImage rasterizedImage = new BufferedImage(colorModel, raster, colorModel.isAlphaPremultiplied(), null); rasterizedImage = resizeImage(rasterizedImage, inputImage.getWidth() * 4, inputImage.getHeight() * 4); Graphics2D g2d = rasterizedImage.createGraphics(); g2d.setComposite(AlphaComposite.SrcOver.derive(bitRateBlend)); g2d.drawImage(inputImage, 0, 0, null); g2d.dispose(); rasterizedImage = rasterizedImage.getSubimage(0, 0, inputImage.getWidth(), inputImage.getHeight()); return ImageUtil.getImageBytes(rasterizedImage); }
private void checkFormat(AudioFileFormat.Type type, AudioInputStream stream) { if (!Type.WAVE.equals(type)) throw new IllegalArgumentException("File type " + type + " not supported."); if (!stream.getFormat().getEncoding().equals(Encoding.PCM_FLOAT)) throw new IllegalArgumentException("File format " + stream.getFormat() + " not supported."); }
public int write(AudioInputStream stream, Type fileType, OutputStream out) throws IOException { checkFormat(fileType, stream); if (stream.getFormat().isBigEndian()) stream = toLittleEndian(stream); RIFFWriter writer = new RIFFWriter(new NoCloseOutputStream(out), "WAVE"); write(stream, writer); int fpointer = (int) writer.getFilePointer(); writer.close(); return fpointer; }
public int write(AudioInputStream stream, Type fileType, File out) throws IOException { checkFormat(fileType, stream); if (stream.getFormat().isBigEndian()) stream = toLittleEndian(stream); RIFFWriter writer = new RIFFWriter(out, "WAVE"); write(stream, writer); int fpointer = (int) writer.getFilePointer(); writer.close(); return fpointer; }
@Override public Type[] getAudioFileTypes(AudioInputStream stream) { if (!stream.getFormat().getEncoding().equals(Encoding.PCM_FLOAT)) return new Type[0]; return new Type[] { Type.WAVE }; }
@Override public int write(AudioInputStream stream, Type fileType, File out) throws IOException { Objects.requireNonNull(stream); Objects.requireNonNull(fileType); Objects.requireNonNull(out); // throws IllegalArgumentException if not supported AuFileFormat auFileFormat = (AuFileFormat)getAudioFileFormat(fileType, stream); // first write the file without worrying about length fields FileOutputStream fos = new FileOutputStream( out ); // throws IOException BufferedOutputStream bos = new BufferedOutputStream( fos, bisBufferSize ); int bytesWritten = writeAuFile(stream, auFileFormat, bos ); bos.close(); // now, if length fields were not specified, calculate them, // open as a random access file, write the appropriate fields, // close again.... if( auFileFormat.getByteLength()== AudioSystem.NOT_SPECIFIED ) { // $$kk: 10.22.99: jan: please either implement this or throw an exception! // $$fb: 2001-07-13: done. Fixes Bug 4479981 RandomAccessFile raf=new RandomAccessFile(out, "rw"); if (raf.length()<=0x7FFFFFFFl) { // skip AU magic and data offset field raf.skipBytes(8); raf.writeInt(bytesWritten-AuFileFormat.AU_HEADERSIZE); // that's all } raf.close(); } return bytesWritten; }
/** * Returns the AudioFileFormat describing the file that will be written from this AudioInputStream. * Throws IllegalArgumentException if not supported. */ private AudioFileFormat getAudioFileFormat(Type type, AudioInputStream stream) { if (!isFileTypeSupported(type, stream)) { throw new IllegalArgumentException("File type " + type + " not supported."); } AudioFormat streamFormat = stream.getFormat(); AudioFormat.Encoding encoding = streamFormat.getEncoding(); if (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) { encoding = AudioFormat.Encoding.PCM_SIGNED; } // We always write big endian au files, this is by far the standard AudioFormat format = new AudioFormat(encoding, streamFormat.getSampleRate(), streamFormat.getSampleSizeInBits(), streamFormat.getChannels(), streamFormat.getFrameSize(), streamFormat.getFrameRate(), true); int fileSize; if (stream.getFrameLength() != AudioSystem.NOT_SPECIFIED) { fileSize = (int)stream.getFrameLength()*streamFormat.getFrameSize() + AuFileFormat.AU_HEADERSIZE; } else { fileSize = AudioSystem.NOT_SPECIFIED; } return new AuFileFormat(Type.AU, fileSize, format, (int) stream.getFrameLength()); }
public static void main(final String[] args) throws Exception { for (final AudioFileFormat.Type type : types) { for (final AudioFormat format : formats) { testAS(type, format); for (final AudioFileWriter afw : load(AudioFileWriter.class)) { testAFW(afw, type, format); } } } Files.delete(Paths.get(FILE.getAbsolutePath())); }
public static void main(final String[] args) throws Exception { for (final AudioFileFormat.Type type : types) { for (final AudioFormat format : formats) { testAS(type, format); for (final AudioFileWriter afw : load(AudioFileWriter.class)) { testAFW(afw, type, format); } } } }
public Type[] getAudioFileTypes(AudioInputStream stream) { if (!stream.getFormat().getEncoding().equals( AudioFloatConverter.PCM_FLOAT)) return new Type[0]; return new Type[] { Type.WAVE }; }
private void checkFormat(AudioFileFormat.Type type, AudioInputStream stream) { if (!Type.WAVE.equals(type)) throw new IllegalArgumentException("File type " + type + " not supported."); if (!stream.getFormat().getEncoding().equals( AudioFloatConverter.PCM_FLOAT)) throw new IllegalArgumentException("File format " + stream.getFormat() + " not supported."); }
@Override public int write(AudioInputStream ais, Type type, File out) throws IOException { // TODO Auto-generated method stub return 0; }
@Override public int write(AudioInputStream ais, Type type, OutputStream os) throws IOException { // TODO Auto-generated method stub return 0; }
/** * Bake a path to a wav file * @param patch the patch to bake * @param wav the wav file to write * @param channels how many channels (1 for mono, 2 for stereo, can be more than 2 channels) * @param sampleRate sample rate used by Pd * @param time baking duration in seconds * @throws IOException */ public static void bake(File patch, File wav, int channels, int sampleRate, float time) throws IOException { // disable Pd : does nothing if Pd alreay initialized. PdConfiguration.disabled = true; // Pause audio. // Does nothing in headless mode but required to // have Pd static code executed (load library) Pd.audio.pause(); int handle = PdBase.openPatch(patch); PdBase.openAudio(0, channels, sampleRate); PdBase.computeAudio(true); int frames = (int)(time * sampleRate); int samples = frames * channels; short [] data = new short[samples]; int ticks = frames / PdBase.blockSize(); PdBase.process(ticks, new short[]{}, data); PdBase.closePatch(handle); // save byte [] buf = new byte[data.length * 2]; for(int i=0 ; i<data.length ; i++){ buf[i*2+0] = (byte)(data[i] & 0xFF); buf[i*2+1] = (byte)((data[i] >> 8) & 0xFF); } ByteArrayInputStream stream = new ByteArrayInputStream(buf); AudioFormat format = new AudioFormat(sampleRate, 16, channels, true, false); AudioInputStream audioStream = new AudioInputStream(stream, format, data.length); AudioSystem.write(audioStream, Type.WAVE, wav); // resume audio Pd.audio.resume(); }