/** * Creates a new Sound instance by the specified file path. Loads the sound * data into a byte array and also retrieves information about the format of * the sound file. * * Note that the constructor is private. In order to load files use the static * methods {@link #find(String)} or {@link #load(String)} methods depending on * whether you already loaded the sound or not. * * @param is * The input stream to load the sound from. */ private Sound(InputStream is, String name) { this.name = name; try { AudioInputStream in = AudioSystem.getAudioInputStream(is); if (in != null) { final AudioFormat baseFormat = in.getFormat(); final AudioFormat decodedFormat = this.getOutFormat(baseFormat); // Get AudioInputStream that will be decoded by underlying VorbisSPI in = AudioSystem.getAudioInputStream(decodedFormat, in); this.stream = in; this.streamData = StreamUtilities.getBytes(this.stream); this.format = this.stream.getFormat(); } } catch (final UnsupportedAudioFileException | IOException e) { log.log(Level.SEVERE, e.getMessage(), e); } }
@Override public AudioFileFormat.Type[] getAudioFileTypes(AudioInputStream stream) { AudioFileFormat.Type[] filetypes = new AudioFileFormat.Type[types.length]; System.arraycopy(types, 0, filetypes, 0, types.length); // make sure we can write this stream AudioFormat format = stream.getFormat(); AudioFormat.Encoding encoding = format.getEncoding(); if( AudioFormat.Encoding.ALAW.equals(encoding) || AudioFormat.Encoding.ULAW.equals(encoding) || AudioFormat.Encoding.PCM_SIGNED.equals(encoding) || AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) ) { return filetypes; } return new AudioFileFormat.Type[0]; }
@Override public AudioInputStream getAudioInputStream(final InputStream stream) throws UnsupportedAudioFileException, IOException { final StandardFileFormat format = getAudioFileFormat(stream); final AudioFormat af = format.getFormat(); final long length = format.getLongFrameLength(); // we've got everything, the stream is supported and it is at the // beginning of the header, so find the data chunk again and return an // AudioInputStream final RIFFReader riffiterator = new RIFFReader(stream); while (riffiterator.hasNextChunk()) { RIFFReader chunk = riffiterator.nextChunk(); if (chunk.getFormat().equals("data")) { return new AudioInputStream(chunk, af, length); } } throw new UnsupportedAudioFileException(); }
/** * This method is a replacement for * AudioSystem.getAudioInputStream(AudioFormat, AudioInputStream), which is * used for audio format conversion at the stream level. This method includes * a workaround for converting from an mp3 AudioInputStream when the sketch * is running in an applet. The workaround was developed by the Tritonus team * and originally comes from the package javazoom.jlgui.basicplayer * * @param targetFormat * the AudioFormat to convert the stream to * @param sourceStream * the stream containing the unconverted audio * @return an AudioInputStream in the target format */ AudioInputStream getAudioInputStream(AudioFormat targetFormat, AudioInputStream sourceStream) { try { return AudioSystem.getAudioInputStream(targetFormat, sourceStream); } catch (IllegalArgumentException iae) { debug("Using AppletMpegSPIWorkaround to get codec"); try { Class.forName("javazoom.spi.mpeg.sampled.convert.MpegFormatConversionProvider"); return new javazoom.spi.mpeg.sampled.convert.MpegFormatConversionProvider().getAudioInputStream( targetFormat, sourceStream); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("Mpeg codec not properly installed"); } } }
/** * WAV files only * * @param name * Name to store sound as * @param file * Sound file */ public static void loadSound(String name, String file) { try { System.out.print("Loading sound file: \"" + file + "\" into clip: \"" + name + "\", "); BufferedInputStream in = new BufferedInputStream(SoundPlayer.class.getResourceAsStream(file)); AudioInputStream ain = AudioSystem.getAudioInputStream(in); Clip c = AudioSystem.getClip(); c.open(ain); c.setLoopPoints(0, -1); clips.put(name, c); ain.close(); in.close(); System.out.println("Done."); } catch (Exception e) { System.out.println("Failed. (" + e.getMessage() + ")"); } }
/** * Verifies the frame length after the stream was saved/read to/from file. */ private static void testAfterSaveToFile(final AudioFileWriter afw, final AudioFileFormat.Type type, AudioInputStream ais) throws IOException { final File temp = File.createTempFile("sound", ".tmp"); try { afw.write(ais, type, temp); ais = AudioSystem.getAudioInputStream(temp); final long frameLength = ais.getFrameLength(); ais.close(); validate(frameLength); } catch (IllegalArgumentException | UnsupportedAudioFileException ignored) { } finally { Files.delete(Paths.get(temp.getAbsolutePath())); } }
private void readStream(AudioInputStream as, long byteLen) throws IOException { // arrays "only" max. 2GB int intLen; if (byteLen > 2147483647) { intLen = 2147483647; } else { intLen = (int) byteLen; } loadedAudio = new byte[intLen]; loadedAudioByteLength = 0; // this loop may throw an IOException while (true) { int bytesRead = as.read(loadedAudio, loadedAudioByteLength, intLen - loadedAudioByteLength); if (bytesRead <= 0) { as.close(); break; } loadedAudioByteLength += bytesRead; } }
public void write(AudioInputStream stream, RIFFWriter writer) throws IOException { RIFFWriter fmt_chunk = writer.writeChunk("fmt "); AudioFormat format = stream.getFormat(); fmt_chunk.writeUnsignedShort(3); // WAVE_FORMAT_IEEE_FLOAT fmt_chunk.writeUnsignedShort(format.getChannels()); fmt_chunk.writeUnsignedInt((int) format.getSampleRate()); fmt_chunk.writeUnsignedInt(((int) format.getFrameRate()) * format.getFrameSize()); fmt_chunk.writeUnsignedShort(format.getFrameSize()); fmt_chunk.writeUnsignedShort(format.getSampleSizeInBits()); fmt_chunk.close(); RIFFWriter data_chunk = writer.writeChunk("data"); byte[] buff = new byte[1024]; int len; while ((len = stream.read(buff, 0, buff.length)) != -1) data_chunk.write(buff, 0, len); data_chunk.close(); }
public static void main(String[] params) throws Exception { AudioInputStream is = AudioSystem.getAudioInputStream(new ByteArrayInputStream(new byte[] { (byte)0x2E, (byte)0x73, (byte)0x6E, (byte)0x64, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x18, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x03, (byte)0x00, (byte)0x00, (byte)0x1F, (byte)0x40, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, })); if (is.getFrameLength() != AudioSystem.NOT_SPECIFIED) { System.out.println("frame length should be NOT_SPECIFIED, but is: "+is.getFrameLength()); failed=true; } //assertTrue(is.getFrameLength() == AudioSystem.NOT_SPECIFIED); //assertTrue(is.read(new byte[8]) == 8); //assertTrue(is.read(new byte[2]) == -1); if (failed) throw new Exception("Test FAILED!"); System.out.println("Test Passed."); }
/** * Obtains an audio stream from the File provided. The File must * point to valid audio file data. * @param file the File for which the <code>AudioInputStream</code> should be * constructed * @return an <code>AudioInputStream</code> object based on the audio file data pointed * to by the File * @throws UnsupportedAudioFileException if the File does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioInputStream getAudioInputStream(File file) throws UnsupportedAudioFileException, IOException { FileInputStream fis = new FileInputStream(file); // throws IOException AudioFileFormat fileFormat = null; // part of fix for 4325421 try { fileFormat = getCOMM(fis, false); } finally { if (fileFormat == null) { fis.close(); } } return new AudioInputStream(fis, fileFormat.getFormat(), fileFormat.getFrameLength()); }
/** */ public AudioInputStream getAudioInputStream(AudioFormat.Encoding targetEncoding, AudioInputStream sourceStream) { if( isConversionSupported(targetEncoding, sourceStream.getFormat()) ) { AudioFormat sourceFormat = sourceStream.getFormat(); AudioFormat targetFormat = new AudioFormat( targetEncoding, sourceFormat.getSampleRate(), sourceFormat.getSampleSizeInBits(), sourceFormat.getChannels(), sourceFormat.getFrameSize(), sourceFormat.getFrameRate(), sourceFormat.isBigEndian() ); return getAudioInputStream( targetFormat, sourceStream ); } else { throw new IllegalArgumentException("Unsupported conversion: " + sourceStream.getFormat().toString() + " to " + targetEncoding.toString() ); } }
private void readStream(AudioInputStream as) throws IOException { DirectBAOS baos = new DirectBAOS(); byte buffer[] = new byte[16384]; int bytesRead = 0; int totalBytesRead = 0; // this loop may throw an IOException while( true ) { bytesRead = as.read(buffer, 0, buffer.length); if (bytesRead <= 0) { as.close(); break; } totalBytesRead += bytesRead; baos.write(buffer, 0, bytesRead); } loadedAudio = baos.getInternalBuffer(); loadedAudioByteLength = totalBytesRead; }
public AudioFileFormat.Type[] getAudioFileTypes(AudioInputStream stream) { AudioFileFormat.Type[] filetypes = new AudioFileFormat.Type[types.length]; System.arraycopy(types, 0, filetypes, 0, types.length); // make sure we can write this stream AudioFormat format = stream.getFormat(); AudioFormat.Encoding encoding = format.getEncoding(); if( (AudioFormat.Encoding.ALAW.equals(encoding)) || (AudioFormat.Encoding.ULAW.equals(encoding)) || (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) ) { return filetypes; } return new AudioFileFormat.Type[0]; }
public static AudioInputStream getPCMConvertedAudioInputStream(AudioInputStream ais) { // we can't open the device for non-PCM playback, so we have // convert any other encodings to PCM here (at least we try!) AudioFormat af = ais.getFormat(); if( (!af.getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) && (!af.getEncoding().equals(AudioFormat.Encoding.PCM_UNSIGNED))) { try { AudioFormat newFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, af.getSampleRate(), 16, af.getChannels(), af.getChannels() * 2, af.getSampleRate(), Platform.isBigEndian()); ais = AudioSystem.getAudioInputStream(newFormat, ais); } catch (Exception e) { if (Printer.err) e.printStackTrace(); ais = null; } } return ais; }
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException { AudioFileFormat format = getAudioFileFormat(stream); RIFFReader riffiterator = new RIFFReader(stream); if (!riffiterator.getFormat().equals("RIFF")) throw new UnsupportedAudioFileException(); if (!riffiterator.getType().equals("WAVE")) throw new UnsupportedAudioFileException(); while (riffiterator.hasNextChunk()) { RIFFReader chunk = riffiterator.nextChunk(); if (chunk.getFormat().equals("data")) { return new AudioInputStream(chunk, format.getFormat(), chunk .getSize()); } } throw new UnsupportedAudioFileException(); }
public AudioInputStream getAudioInputStream(AudioFormat targetFormat, AudioFloatInputStream sourceStream) { if (!isConversionSupported(targetFormat, sourceStream.getFormat())) throw new IllegalArgumentException("Unsupported conversion: " + sourceStream.getFormat().toString() + " to " + targetFormat.toString()); if (targetFormat.getChannels() != sourceStream.getFormat() .getChannels()) sourceStream = new AudioFloatInputStreamChannelMixer(sourceStream, targetFormat.getChannels()); if (Math.abs(targetFormat.getSampleRate() - sourceStream.getFormat().getSampleRate()) > 0.000001) sourceStream = new AudioFloatInputStreamResampler(sourceStream, targetFormat); return new AudioInputStream(new AudioFloatFormatConverterInputStream( targetFormat, sourceStream), targetFormat, sourceStream .getFrameLength()); }
/** * Saves the double array as an audio file (using .wav or .au format). * * @param filename * the name of the audio file * @param samples * the array of samples * @throws IllegalArgumentException * if unable to save {@code filename} * @throws IllegalArgumentException * if {@code samples} is {@code null} */ public static void save(String filename, double[] samples) { if (samples == null) { throw new IllegalArgumentException("samples[] is null"); } // assumes 44,100 samples per second // use 16-bit audio, mono, signed PCM, little Endian AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, 1, true, false); byte[] data = new byte[2 * samples.length]; for (int i = 0; i < samples.length; i++) { int temp = (short) (samples[i] * MAX_16_BIT); data[2 * i + 0] = (byte) temp; data[2 * i + 1] = (byte) (temp >> 8); } // now save the file try { ByteArrayInputStream bais = new ByteArrayInputStream(data); AudioInputStream ais = new AudioInputStream(bais, format, samples.length); if (filename.endsWith(".wav") || filename.endsWith(".WAV")) { AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename)); } else if (filename.endsWith(".au") || filename.endsWith(".AU")) { AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename)); } else { throw new IllegalArgumentException("unsupported audio format: '" + filename + "'"); } } catch (IOException ioe) { throw new IllegalArgumentException("unable to save file '" + filename + "'", ioe); } }
/** * Plays an audio file (in .wav, .mid, or .au format) in a background * thread. * * @param filename * the name of the audio file * @throws IllegalArgumentException * if unable to play {@code filename} * @throws IllegalArgumentException * if {@code filename} is {@code null} */ public static synchronized void play(final String filename) { if (filename == null) throw new IllegalArgumentException(); InputStream is = StdAudio.class.getResourceAsStream(filename); if (is == null) { throw new IllegalArgumentException("could not read '" + filename + "'"); } // code adapted from: // http://stackoverflow.com/questions/26305/how-can-i-play-sound-in-java try { // check if file format is supported // (if not, will throw an UnsupportedAudioFileException) @SuppressWarnings("unused") AudioInputStream ais = AudioSystem.getAudioInputStream(is); new Thread(new Runnable() { @Override public void run() { stream(filename); } }).start(); } // let's try Applet.newAudioClip() instead catch (UnsupportedAudioFileException e) { playApplet(filename); return; } // something else went wrong catch (IOException ioe) { throw new IllegalArgumentException("could not play '" + filename + "'", ioe); } }
@Override public void close() throws IOException { AudioInputStream astream = weak_stream_link.get(); if(astream != null) astream.close(); }
/** * Indicates whether an audio file of the type specified can be written from * the audio input stream indicated. * * @param fileType file type for which write capabilities are queried * @param stream for which file writing support is queried * @return {@code true} if the file type is supported for this audio input * stream, otherwise {@code false} * @throws NullPointerException if {@code fileType} or {@code stream} are * {@code null} */ public boolean isFileTypeSupported(Type fileType, AudioInputStream stream) { Objects.requireNonNull(fileType); Type types[] = getAudioFileTypes( stream ); for(int i=0; i<types.length; i++) { if( fileType.equals( types[i] ) ) { return true; } } return false; }
public static AudioFloatInputStream getInputStream(AudioFormat format, byte[] buffer, int offset, int len) { AudioFloatConverter converter = AudioFloatConverter .getConverter(format); if (converter != null) return new BytaArrayAudioFloatInputStream(converter, buffer, offset, len); InputStream stream = new ByteArrayInputStream(buffer, offset, len); long aLen = format.getFrameSize() == AudioSystem.NOT_SPECIFIED ? AudioSystem.NOT_SPECIFIED : len / format.getFrameSize(); AudioInputStream astream = new AudioInputStream(stream, format, aLen); return getInputStream(astream); }
JSBaseAudioRecordingStream(JSMinim sys, AudioMetaData metaData, AudioInputStream stream, SourceDataLine sdl, int inBufferSize, int msLen) { system = sys; meta = metaData; format = sdl.getFormat(); bufferSize = inBufferSize; // allocate reading data buffer = new FloatSampleBuffer( format.getChannels(), bufferSize, format.getSampleRate() ); system.debug( "JSBaseAudioRecordingStream :: FloatSampleBuffer has " + buffer.getSampleCount() + " samples." ); rawBytes = new byte[buffer.getByteArrayBufferSize( format )]; system.debug( "JSBaseAudioRecordingStream :: rawBytes has length " + rawBytes.length ); skipBytes = new byte[ (int)AudioUtils.millis2BytesFrameAligned( 10000, format ) ]; system.debug( "JSBaseAudioRecordingStream :: skipBytes has length " + skipBytes.length ); finished = false; line = sdl; ais = stream; loop = false; play = false; numLoops = 0; loopBegin = 0; loopEnd = (int)AudioUtils.millis2BytesFrameAligned( msLen, format ); silence = new float[bufferSize]; iothread = null; totalBytesRead = 0; bytesWritten = 0; shouldRead = true; }
private void enableHorn () throws UnsupportedAudioFileException, IOException, LineUnavailableException { if ( ( this.clip == null || !this.clip.isRunning () ) && Activator.getDefault ().getPreferenceStore ().getBoolean ( PreferenceConstants.BELL_ACTIVATED_KEY ) ) { final AudioInputStream sound = AudioSystem.getAudioInputStream ( this.soundFile ); final DataLine.Info info = new DataLine.Info ( Clip.class, sound.getFormat () ); this.clip = (Clip)AudioSystem.getLine ( info ); this.clip.open ( sound ); this.clip.loop ( Clip.LOOP_CONTINUOUSLY ); } if ( !this.bellIcon.isDisposed () ) { this.bellIcon.setImage ( getBellIcon () ); } }
@Override protected void processControlLogic() { _rightgain = rightgain; _leftgain = leftgain; _eff1gain = eff1gain; _eff2gain = eff2gain; if (active_sg) { _active = active; active_sg = false; } else { active = _active; } if (frameposition_sg) { _frameposition = frameposition; frameposition_sg = false; afis = null; } else { frameposition = _frameposition; } if (loop_sg) { _loopcount = loopcount; _loopstart = loopstart; _loopend = loopend; } if (afis == null) { afis = AudioFloatInputStream.getInputStream(new AudioInputStream( datastream, format, AudioSystem.NOT_SPECIFIED)); if (Math.abs(format.getSampleRate() - outputformat.getSampleRate()) > 0.000001) afis = new AudioFloatInputStreamResampler(afis, outputformat); } }
public final void sendAudio(InputStream wavStream) { AudioInputStream pcmStream; try { pcmStream = adjustAudioEncoding(wavStream); } catch (UnsupportedAudioFileException | IOException ex) { log.error("Problem adjusting audio", ex); return; } send16khzMonoPcmAudio(pcmStream); }
private static AudioInputStream adjustAudioEncoding(InputStream sourceWavStream) throws UnsupportedAudioFileException, IOException { AudioInputStream audioPcm = getAudioInputStream(sourceWavStream); AudioInputStream audio16khz = to16khz(audioPcm); AudioInputStream audio16khzMono = toMono(audio16khz); AudioInputStream audio16khzMonoPcm = toPcm(audio16khzMono); skipRiffHeader(audio16khzMonoPcm); return audio16khzMonoPcm; }
private void jButton16ActionPerformed( java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton16ActionPerformed System.out.print("\7"); //Вот это вот издает звук Clip clip = null; try { clip = AudioSystem.getClip(); } catch (LineUnavailableException ex) { ex.printStackTrace(); } byte[] buf = new byte[1024]; for (int j = 0; j < buf.length; j++) { buf[j] = (byte) j; } AudioFormat af = new AudioFormat( 11025f, 8, // sample size in bits 2, // channels true, // signed false // bigendian ); try { byte[] b = buf; AudioInputStream ais = new AudioInputStream(new ByteArrayInputStream(b), af, 512); clip.open(ais); } catch (Exception e) { e.printStackTrace(); } }
public JavaSoundAudioClip(InputStream in) throws IOException { if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.<init>"); BufferedInputStream bis = new BufferedInputStream(in, STREAM_BUFFER_SIZE); bis.mark(STREAM_BUFFER_SIZE); boolean success = false; try { AudioInputStream as = AudioSystem.getAudioInputStream(bis); // load the stream data into memory success = loadAudioData(as); if (success) { success = false; if (loadedAudioByteLength < CLIP_THRESHOLD) { success = createClip(); } if (!success) { success = createSourceDataLine(); } } } catch (UnsupportedAudioFileException e) { // not an audio file try { MidiFileFormat mff = MidiSystem.getMidiFileFormat(bis); success = createSequencer(bis); } catch (InvalidMidiDataException e1) { success = false; } } if (!success) { throw new IOException("Unable to create AudioClip from input stream"); } }
public static synchronized void playWav(final InputStream url) { //Plays wav, from Inputstream! new Thread(new Runnable() { public void run() { try { Clip clip = AudioSystem.getClip(); AudioInputStream inputStream = AudioSystem.getAudioInputStream(url); clip.open(inputStream); sounds.put(sounds.size() + 1,clip); clip.start(); } catch (Exception e) { e.printStackTrace(); } } },"SoundHandler").start(); }
public static synchronized void playWav(File file) { new Thread(new Runnable() { public void run() { try { Clip clip = AudioSystem.getClip(); AudioInputStream inputStream = AudioSystem.getAudioInputStream(file); clip.open(inputStream); sounds.put(sounds.size() + 1,clip); clip.start(); } catch (Exception e) { e.printStackTrace(); } } },"SoundHandler").start(); }
@Override public int available() throws IOException { AudioInputStream local_stream = stream; if(local_stream != null) return local_stream.available(); return 0; }
@Override public final AudioInputStream getAudioInputStream(final URL url) throws UnsupportedAudioFileException, IOException { final InputStream urlStream = url.openStream(); try { return getAudioInputStream(new BufferedInputStream(urlStream)); } catch (final Throwable e) { closeSilently(urlStream); throw e; } }
public static void load(String s, String n) { if(clips.get(n) != null) return; Clip clip; try { AudioInputStream ais = AudioSystem.getAudioInputStream( JukeBox.class.getResourceAsStream(s) ); AudioFormat baseFormat = ais.getFormat(); AudioFormat decodeFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false ); AudioInputStream dais = AudioSystem.getAudioInputStream(decodeFormat, ais); clip = AudioSystem.getClip(); clip.open(dais); clips.put(n, clip); } catch(Exception e) { e.printStackTrace(); } }
public SFXClip(String fileName) { File file = new File(fileName); try { AudioInputStream stream = AudioSystem.getAudioInputStream(file); this.clip = AudioSystem.getClip(); this.clip.open(stream); } catch(Exception e) { e.printStackTrace(); } }
/** Stop recoding and save if requested */ public void stopRecord() { RecordDialog dialog = new RecordDialog(window, 420, 150); if (dialog.getAcceptRecord()) { Record newRecord = new Record(dialog.getName(), References.CHRONOMETER.getMinute(), References.CHRONOMETER.getSecond(), References.CHRONOMETER.getHundredths()); References.RECORD_PANEL.getRecordModel().addElement(newRecord); References.RECORD_PANEL.setUIStatus("stop"); References.CHRONOMETER.stop(); recordData = byteArrayOutputStream.toByteArray(); recordIS = new ByteArrayInputStream(recordData); recordAIS = new AudioInputStream(recordIS, audioFormat, recordData.length / audioFormat.getFrameSize()); File wavFile = new File(newRecord.getRelativePath()); try { AudioSystem.write(recordAIS, fileType, wavFile); } catch (Exception e) { e.printStackTrace(); } } else { if (transmissionON) { References.RECORD_PANEL.setUIStatus("transmissionON"); } } if (References.KEYLISTENER_PANEL.isKeyIsDown()) { References.KEYLISTENER_PANEL.setKeyIsDown(); References.KEYLISTENER_PANEL.getKeyReleasedAction().actionPerformed(null); } }
private boolean loadAudioData(AudioInputStream as) throws IOException, UnsupportedAudioFileException { if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip->openAsClip()"); // first possibly convert this stream to PCM as = Toolkit.getPCMConvertedAudioInputStream(as); if (as == null) { return false; } loadedAudioFormat = as.getFormat(); long frameLen = as.getFrameLength(); int frameSize = loadedAudioFormat.getFrameSize(); long byteLen = AudioSystem.NOT_SPECIFIED; if (frameLen != AudioSystem.NOT_SPECIFIED && frameLen > 0 && frameSize != AudioSystem.NOT_SPECIFIED && frameSize > 0) { byteLen = frameLen * frameSize; } if (byteLen != AudioSystem.NOT_SPECIFIED) { // if the stream length is known, it can be efficiently loaded into memory readStream(as, byteLen); } else { // otherwise we use a ByteArrayOutputStream to load it into memory readStream(as); } // if everything went fine, we have now the audio data in // loadedAudio, and the byte length in loadedAudioByteLength return true; }
public int write(AudioInputStream stream, Type fileType, OutputStream out) throws IOException { checkFormat(fileType, stream); if (stream.getFormat().isBigEndian()) stream = toLittleEndian(stream); RIFFWriter writer = new RIFFWriter(new NoCloseOutputStream(out), "WAVE"); write(stream, writer); int fpointer = (int) writer.getFilePointer(); writer.close(); return fpointer; }
private void checkFormat(AudioFileFormat.Type type, AudioInputStream stream) { if (!Type.WAVE.equals(type)) throw new IllegalArgumentException("File type " + type + " not supported."); if (!stream.getFormat().getEncoding().equals(Encoding.PCM_FLOAT)) throw new IllegalArgumentException("File format " + stream.getFormat() + " not supported."); }