private static void init() { try { // 44,100 samples per second, 16-bit audio, mono, signed PCM, little // Endian AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE); // the internal buffer is a fraction of the actual buffer size, this // choice is arbitrary // it gets divided because we can't expect the buffered data to line // up exactly with when // the sound card decides to push out its samples. buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3]; } catch (LineUnavailableException e) { System.out.println(e.getMessage()); } // no sound gets made before this call line.start(); }
/** * WAV files only * * @param name * Name to store sound as * @param file * Sound file */ public static void loadSound(String name, String file) { try { System.out.print("Loading sound file: \"" + file + "\" into clip: \"" + name + "\", "); BufferedInputStream in = new BufferedInputStream(SoundPlayer.class.getResourceAsStream(file)); AudioInputStream ain = AudioSystem.getAudioInputStream(in); Clip c = AudioSystem.getClip(); c.open(ain); c.setLoopPoints(0, -1); clips.put(name, c); ain.close(); in.close(); System.out.println("Done."); } catch (Exception e) { System.out.println("Failed. (" + e.getMessage() + ")"); } }
public static AudioInputStream getPCMConvertedAudioInputStream(AudioInputStream ais) { // we can't open the device for non-PCM playback, so we have // convert any other encodings to PCM here (at least we try!) AudioFormat af = ais.getFormat(); if( (!af.getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) && (!af.getEncoding().equals(AudioFormat.Encoding.PCM_UNSIGNED))) { try { AudioFormat newFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, af.getSampleRate(), 16, af.getChannels(), af.getChannels() * 2, af.getSampleRate(), Platform.isBigEndian()); ais = AudioSystem.getAudioInputStream(newFormat, ais); } catch (Exception e) { if (Printer.err) e.printStackTrace(); ais = null; } } return ais; }
public static void test2(AudioFormat inFormat1, AudioFormat inFormat2, AudioFormat outFormat) throws Exception { AudioInputStream inStream1 = new AudioInputStream(in, inFormat1, -1); System.out.println("Input Format1: " + printFormat(inStream1.getFormat())); // get a converted stream AudioInputStream stream1 = AudioSystem.getAudioInputStream(outFormat, inStream1); System.out.println("Output Format 1: " + printFormat(stream1.getFormat())); AudioInputStream inStream2 = new AudioInputStream(in, inFormat2, -1); System.out.println("Input Format1: " + printFormat(inStream2.getFormat())); // get a converted stream in big endian ulaw AudioInputStream stream2 = AudioSystem.getAudioInputStream(outFormat, inStream2); System.out.println("Output Format 2: " + printFormat(stream2.getFormat())); compareStreams(stream1, stream2); }
private boolean createSourceDataLine() { if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createSourceDataLine()"); try { DataLine.Info info = new DataLine.Info(SourceDataLine.class, loadedAudioFormat); if (!(AudioSystem.isLineSupported(info)) ) { if (DEBUG || Printer.err)Printer.err("Line not supported: "+loadedAudioFormat); // fail silently return false; } SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info); datapusher = new DataPusher(source, loadedAudioFormat, loadedAudio, loadedAudioByteLength); } catch (Exception e) { if (DEBUG || Printer.err)e.printStackTrace(); // fail silently return false; } if (datapusher==null) { // fail silently return false; } if (DEBUG || Printer.debug)Printer.debug("Created SourceDataLine."); return true; }
/** * Queues song for the audio player * @param player main instance of the AudioPlayer * @param event event triggered when command is sent * @param audioLink URL linking to audio file * @throws IOException thrown if connection could not be made * @throws UnsupportedAudioFileException thrown if audio file linked * is not playable */ private synchronized void queueSong(AudioPlayer player, MessageReceivedEvent event, String audioLink) throws IOException, UnsupportedAudioFileException { //Connection to server for music file //might be rejected because of no user agent URLConnection conn = new URL(audioLink.trim()).openConnection(); conn.setRequestProperty("User-Agent", rexCord.USER_AGENT); AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(conn.getInputStream()); player.queue(audioInputStream); String message = String.format( "Song is now queued! Your song is #%d on the queue.", player.getPlaylistSize()); rexCord.sendMessage(event.getChannel(), message); //Start playing music if there is nothing in the playlist. if (player.getPlaylistSize() == 0) { player.provide(); } }
/** * This method is a replacement for * AudioSystem.getAudioInputStream(AudioFormat, AudioInputStream), which is * used for audio format conversion at the stream level. This method includes * a workaround for converting from an mp3 AudioInputStream when the sketch * is running in an applet. The workaround was developed by the Tritonus team * and originally comes from the package javazoom.jlgui.basicplayer * * @param targetFormat * the AudioFormat to convert the stream to * @param sourceStream * the stream containing the unconverted audio * @return an AudioInputStream in the target format */ AudioInputStream getAudioInputStream(AudioFormat targetFormat, AudioInputStream sourceStream) { try { return AudioSystem.getAudioInputStream(targetFormat, sourceStream); } catch (IllegalArgumentException iae) { debug("Using AppletMpegSPIWorkaround to get codec"); try { Class.forName("javazoom.spi.mpeg.sampled.convert.MpegFormatConversionProvider"); return new javazoom.spi.mpeg.sampled.convert.MpegFormatConversionProvider().getAudioInputStream( targetFormat, sourceStream); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("Mpeg codec not properly installed"); } } }
@Override public void open() throws AudioException { try { this.audioInputStream = Audio.getAudioInputStream(this.resource); this.clip = AudioSystem.getClip(); this.clip.open(this.audioInputStream); this.clip.addLineListener(event -> { if(event.getType().equals(LineEvent.Type.STOP) && this.clip.getMicrosecondPosition() >= this.clip.getMicrosecondLength()) { this.trigger(AudioEvent.Type.REACHED_END); } }); this.controls = AbstractAudio.extractControls(this.clip, this.controls); this.open = true; this.trigger(AudioEvent.Type.OPENED); } catch(Exception exception) { throw new AudioException(exception); } }
/** * Returns true if at least one soundcard is correctly installed * on the system. */ public static boolean isSoundcardInstalled() { boolean result = false; try { Mixer.Info[] mixers = AudioSystem.getMixerInfo(); if (mixers.length > 0) { result = AudioSystem.getSourceDataLine(null) != null; } } catch (Exception e) { System.err.println("Exception occured: "+e); } if (!result) { System.err.println("Soundcard does not exist or sound drivers not installed!"); System.err.println("This test requires sound drivers for execution."); } return result; }
/** * Creates a WaveData container from the specified url * * @param path * URL to file * @return WaveData containing data, or null if a failure occured */ public static WaveData create(URL path) { try { return create(AudioSystem .getAudioInputStream(new BufferedInputStream(path .openStream()))); } catch (Exception e) { org.lwjgl.LWJGLUtil.log("Unable to create from: " + path); e.printStackTrace(); return null; } }
private synchronized void playSound(final String audioFileName) { if(isSoundEnabled) { try { Clip clip = AudioSystem.getClip(); InputStream inputStream = MainWindow.class.getResourceAsStream(audioFileName); if(inputStream != null) { AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(inputStream); clip.open(audioInputStream); clip.start(); } else { System.out.println("Input stream not valid"); } } catch (Exception e) { e.printStackTrace(); } } }
/** * Play a sound at a given frequency freq during duration (in seconds) with volume as strenght * <br/><br/> * <code>SoundGenerator.playSound(440.0,1.0,0.5,SoundGenerator.FADE_LINEAR,SoundGenerator.WAVE_SIN);</code><br/> * Available fades : FADE_NONE, FADE_LINEAR, FADE_QUADRATIC<br/> * Available waves : WAVE_SIN, WAVE_SQUARE, WAVE_TRIANGLE, WAVE_SAWTOOTH<br/> */ public static void playSound(double freq,double duration,double volume,byte fade,byte wave){ double[] soundData = generateSoundData(freq,duration,volume,fade,wave); byte[] freqdata = new byte[soundData.length]; for(int i = 0;i < soundData.length;i++) { freqdata[i] = (byte)soundData[i]; } // Play it try { final AudioFormat af = new AudioFormat(SAMPLE_RATE, 8, 1, true, true); SourceDataLine line = AudioSystem.getSourceDataLine(af); line.open(af, SAMPLE_RATE); line.start(); line.write(freqdata, 0, freqdata.length); line.drain(); line.close(); }catch(LineUnavailableException e) { e.printStackTrace(); } }
public void restartSDL(){ AudioFormat form = new AudioFormat(sys.getSampleRate(),16,2,true,false); bufptr=0; audioints = new int[(int)((sys.getSampleRate()/1000.0)*sys.getBufferSize())*2]; if(scope!=null) scope.setAudio(audioints); audiobuffer = new byte[audioints.length*2]; try { if(sdl!=null) sdl.close(); sdl = AudioSystem.getSourceDataLine(form); sdl.open(form,audiobuffer.length*3); sdl.start(); } catch (LineUnavailableException e) { e.printStackTrace(); } }
public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { //$$fb the following check must come first ! Otherwise // the next frame length check may throw an IOException and // interrupt iterating File Writers. (see bug 4351296) // throws IllegalArgumentException if not supported AiffFileFormat aiffFileFormat = (AiffFileFormat)getAudioFileFormat(fileType, stream); // we must know the total data length to calculate the file length if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { throw new IOException("stream length not specified"); } int bytesWritten = writeAiffFile(stream, aiffFileFormat, out); return bytesWritten; }
/** * Verifies the frame length after the stream was saved/read to/from file. */ private static void testAfterSaveToFile(final AudioFileWriter afw, final AudioFileFormat.Type type, AudioInputStream ais) throws IOException { final File temp = File.createTempFile("sound", ".tmp"); try { afw.write(ais, type, temp); ais = AudioSystem.getAudioInputStream(temp); final long frameLength = ais.getFrameLength(); ais.close(); validate(frameLength); } catch (IllegalArgumentException | UnsupportedAudioFileException ignored) { } finally { Files.delete(Paths.get(temp.getAbsolutePath())); } }
public static void main(String[] args) throws Exception { // 1st checks Encoding.PCM_FLOAT is available pcmFloatEnc = Encoding.PCM_FLOAT; Encoding[] encodings = AudioSystem.getTargetEncodings(pcmFloatEnc); out("conversion from PCM_FLOAT to " + encodings.length + " encodings:"); for (Encoding e: encodings) { out(" - " + e); } if (encodings.length == 0) { testFailed = true; } test(Encoding.PCM_SIGNED); test(Encoding.PCM_UNSIGNED); if (testFailed) { throw new Exception("test failed"); } out("test passed."); }
/** Creates a new instance of test. Opens the microphone input as the target line. * To start the reporting, {@link #start} the thread. * @throws LineUnavailableException if microphone input is not available */ public VirtualDrummerMicrophoneInput () throws LineUnavailableException{ // getAudioInfo(); // prints lots of useless information format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,sampleRate,8,1,1,sampleRate,false); DataLine.Info dlinfo = new DataLine.Info(TargetDataLine.class, format); if ( AudioSystem.isLineSupported(dlinfo) ){ targetDataLine = (TargetDataLine)AudioSystem.getLine(dlinfo); } targetDataLine.open(format,bufferSize); bufferSize=targetDataLine.getBufferSize(); gui = new DrumSoundDetectorDemo(); gui.setVirtualDrummerMicrophoneInput(this); }
@Override public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { Objects.requireNonNull(stream); Objects.requireNonNull(fileType); Objects.requireNonNull(out); //$$fb the following check must come first ! Otherwise // the next frame length check may throw an IOException and // interrupt iterating File Writers. (see bug 4351296) // throws IllegalArgumentException if not supported AiffFileFormat aiffFileFormat = (AiffFileFormat)getAudioFileFormat(fileType, stream); // we must know the total data length to calculate the file length if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { throw new IOException("stream length not specified"); } return writeAiffFile(stream, aiffFileFormat, out); }
/** * Returns all available mixers. * * @return A List of available Mixers */ public List<String> getMixers() { List<String> mixers = new ArrayList<>(); // Obtains an array of mixer info objects that represents the set of // audio mixers that are currently installed on the system. Mixer.Info[] mixerInfos = AudioSystem.getMixerInfo(); if (mixerInfos != null) Arrays.stream(mixerInfos).forEach(mInfo -> { // line info Line.Info lineInfo = new Line.Info(SourceDataLine.class); Mixer mixer = AudioSystem.getMixer(mInfo); // if line supported if (mixer.isLineSupported(lineInfo)) mixers.add(mInfo.getName()); }); return mixers; }
public static void main(String[] args) throws Exception { AudioFormat format=ManualTestEchoCancel.getFormat(); final Mixer mixer = AudioSystem.getMixer(null); Mic m=new Mic(mixer, format, ManualTestEchoCancel.frameSamples); m.start(); try(Scanner br=new Scanner(System.in)) { System.out.println("Press ENTER to start recording"); br.nextLine(); try(FileOutputStream fos=new FileOutputStream("/tmp/out.sw")) { m.setRecord(fos); System.out.println("Press ENTER to stop recording"); br.nextLine(); m.setRecord(null); } } System.exit(0); }
public static void main(String[] args) throws Exception { boolean foundDuplicates = false; AudioFileFormat.Type[] aTypes = AudioSystem.getAudioFileTypes(); for (int i = 0; i < aTypes.length; i++) { for (int j = 0; j < aTypes.length; j++) { if (aTypes[i].equals(aTypes[j]) && i != j) { foundDuplicates = true; } } } if (foundDuplicates) { throw new Exception("Test failed"); } else { System.out.println("Test passed"); } }
public Soundbank getSoundbank(File file) throws InvalidMidiDataException, IOException { try { AudioInputStream ais = AudioSystem.getAudioInputStream(file); ais.close(); ModelByteBufferWavetable osc = new ModelByteBufferWavetable( new ModelByteBuffer(file, 0, file.length()), -4800); ModelPerformer performer = new ModelPerformer(); performer.getOscillators().add(osc); SimpleSoundbank sbk = new SimpleSoundbank(); SimpleInstrument ins = new SimpleInstrument(); ins.add(performer); sbk.addInstrument(ins); return sbk; } catch (UnsupportedAudioFileException e1) { return null; } catch (IOException e) { return null; } }
public static void main(String[] args) throws Exception { if (isSoundcardInstalled()) { bais.mark(0); run(null); Mixer.Info[] infos = AudioSystem.getMixerInfo(); for (int i = 0; i<infos.length; i++) { try { Mixer m = AudioSystem.getMixer(infos[i]); run(m); } catch (Exception e) { } } if (success > 0) { out("No crash -> Test passed"); } else { System.err.println("Test could not execute: please install an audio device"); } } }
/** * Creates a WaveData container from the specified inputstream * * @param is InputStream to read from * @return WaveData containing data, or null if a failure occured */ public static WaveData create(InputStream is) { try { return create( AudioSystem.getAudioInputStream(is)); } catch (Exception e) { org.lwjgl.LWJGLUtil.log("Unable to create from inputstream"); e.printStackTrace(); return null; } }
@Override public void open(AudioInputStream stream) throws LineUnavailableException, IOException { if (isOpen()) { throw new IllegalStateException("Clip is already open with format " + getFormat() + " and frame lengh of " + getFrameLength()); } if (AudioFloatConverter.getConverter(stream.getFormat()) == null) throw new IllegalArgumentException("Invalid format : " + stream.getFormat().toString()); if (stream.getFrameLength() != AudioSystem.NOT_SPECIFIED) { byte[] data = new byte[(int) stream.getFrameLength() * stream.getFormat().getFrameSize()]; int readsize = 512 * stream.getFormat().getFrameSize(); int len = 0; while (len != data.length) { if (readsize > data.length - len) readsize = data.length - len; int ret = stream.read(data, len, readsize); if (ret == -1) break; if (ret == 0) Thread.yield(); len += ret; } open(stream.getFormat(), data, 0, len); } else { ByteArrayOutputStream baos = new ByteArrayOutputStream(); byte[] b = new byte[512 * stream.getFormat().getFrameSize()]; int r = 0; while ((r = stream.read(b)) != -1) { if (r == 0) Thread.yield(); baos.write(b, 0, r); } open(stream.getFormat(), baos.toByteArray(), 0, baos.size()); } }
/** * Creates a AiffData container from the specified url * * @param path URL to file * @return AiffData containing data, or null if a failure occured */ public static AiffData create(URL path) { try { return create( AudioSystem.getAudioInputStream( new BufferedInputStream(path.openStream()))); } catch (Exception e) { org.lwjgl.LWJGLUtil.log("Unable to create from: " + path); e.printStackTrace(); return null; } }
public static void main(String[] args) throws Exception { boolean res=true; Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo(); System.out.println(mixerInfo.length+" mixers on system."); if (mixerInfo.length == 0) { System.out.println("Cannot execute test. Not Failed!"); } else { for (int i = 0; i < mixerInfo.length; i++) { Mixer mixer = AudioSystem.getMixer(mixerInfo[i]); System.out.println(); System.out.println(mixer+":"); showMixerLines(mixer.getSourceLineInfo()); showMixerLines(mixer.getTargetLineInfo()); } res=ok16 && ok32; } if (res) { System.out.println("Test passed"); } else { System.out.println("Test failed"); throw new Exception("Test failed"); } //ystem.exit(res?0:1); }
public static void main(String args[]) throws Exception { boolean res = true; try { AudioInputStream ais = new AudioInputStream( new ByteArrayInputStream(new byte[2000]), new AudioFormat(8000.0f, 8, 1, false, false), 2000); // AudioFormat format = ais.getFormat(); DataLine.Info info = new DataLine.Info(Clip.class, format, ((int) ais.getFrameLength() * format .getFrameSize())); Clip clip = (Clip) AudioSystem.getLine(info); clip.open(); FloatControl rateControl = (FloatControl) clip.getControl( FloatControl.Type.SAMPLE_RATE); int c = 0; while (c++ < 10) { clip.stop(); clip.setFramePosition(0); clip.start(); for (float frq = 22000; frq < 44100; frq = frq + 100) { try { Thread.currentThread().sleep(20); } catch (Exception e) { break; } rateControl.setValue(frq); } } } catch (Exception ex) { ex.printStackTrace(); res = ex.getMessage().indexOf( "This method should not have been invoked!") < 0; } if (res) { System.out.println("Test passed"); } else { System.out.println("Test failed"); throw new Exception("Test failed"); } }
public static void playSound(String filename) { URL resource = ClassLoader.getSystemClassLoader().getResource(filename); try { final Clip clip = (Clip) AudioSystem.getLine(new Line.Info(Clip.class)); clip.addLineListener(event -> { if (event.getType() == LineEvent.Type.STOP) { clip.close(); } }); clip.open(AudioSystem.getAudioInputStream(resource)); clip.start(); } catch (Exception e) { logger.error("Failed to play sound " + filename, e); } }
public void playAudio(int tempo, boolean flag) throws UnsupportedAudioFileException, LineUnavailableException, IOException, InterruptedException{ Clip clip = AudioSystem.getClip(); URL url = getClass().getResource("/audio/smb_die.wav"); URL urlToHot = this.getClass().getResource("/audio/smb_die.wav"); System.out.println(urlToHot); this.audio = Applet.newAudioClip(url); if(flag) audio.loop(); else audio.stop(); }
/** * Creates a AiffData container from the specified bytes * * @param buffer array of bytes containing the complete Aiff file * @return AiffData containing data, or null if a failure occured */ public static AiffData create(byte[] buffer) { try { return create( AudioSystem.getAudioInputStream( new BufferedInputStream(new ByteArrayInputStream(buffer)))); } catch (Exception e) { e.printStackTrace(); return null; } }
@Override public byte[] glitchPixels(byte[] inputImageBytes) throws Exception { int audioBitRate = ((Integer) getPixelGlitchParameters().get("bitRateBlend")).intValue(); float bitRateBlend = (float) audioBitRate / 10; if(bitRateBlend < 0.1F || bitRateBlend > 0.9F) { return null; } BufferedImage inputImage = ImageUtil.getImageFromBytes(inputImageBytes); InputStream imageInputStream = new ByteArrayInputStream(inputImageBytes); AudioInputStream distortionAudioStream = new AudioInputStream(imageInputStream, new AudioFormat(AudioFormat.Encoding.ULAW, ThreadLocalRandom.current().nextInt(8000, 20000), 8, 5, 9, ThreadLocalRandom.current().nextInt(8000, 20000), true), inputImageBytes.length); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); AudioSystem.write(distortionAudioStream, Type.WAVE, outputStream); BufferedImage outputImage = new BufferedImage(inputImage.getWidth(), inputImage.getHeight(), BufferedImage.TYPE_4BYTE_ABGR); byte[] imageData = ((DataBufferByte) outputImage.getRaster().getDataBuffer()).getData(); System.arraycopy(outputStream.toByteArray(),0,imageData,0,outputStream.toByteArray().length); int[] abgrOffsets = {3, 2, 1, 0}; DataBuffer outputBuffer = new DataBufferByte(imageData, imageData.length); WritableRaster raster = Raster.createInterleavedRaster(outputBuffer, inputImage.getWidth(), inputImage.getHeight(), 4 * inputImage.getWidth(), 4, abgrOffsets, null); ColorModel colorModel = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_sRGB), true, false, Transparency.TRANSLUCENT, DataBuffer.TYPE_BYTE); BufferedImage rasterizedImage = new BufferedImage(colorModel, raster, colorModel.isAlphaPremultiplied(), null); rasterizedImage = resizeImage(rasterizedImage, inputImage.getWidth() * 4, inputImage.getHeight() * 4); Graphics2D g2d = rasterizedImage.createGraphics(); g2d.setComposite(AlphaComposite.SrcOver.derive(bitRateBlend)); g2d.drawImage(inputImage, 0, 0, null); g2d.dispose(); rasterizedImage = rasterizedImage.getSubimage(0, 0, inputImage.getWidth(), inputImage.getHeight()); return ImageUtil.getImageBytes(rasterizedImage); }
/** * Returns AudioFileFormat from URL. */ public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException { if (TDebug.TraceAudioFileReader) { TDebug.out("MpegAudioFileReader.getAudioFileFormat(URL): begin"); } long lFileLengthInBytes = AudioSystem.NOT_SPECIFIED; URLConnection conn = url.openConnection(); // Tell shoucast server (if any) that SPI support shoutcast stream. conn.setRequestProperty("Icy-Metadata", "1"); InputStream inputStream = conn.getInputStream(); AudioFileFormat audioFileFormat = null; try { audioFileFormat = getAudioFileFormat(inputStream, lFileLengthInBytes); } finally { inputStream.close(); } if (TDebug.TraceAudioFileReader) { TDebug.out("MpegAudioFileReader.getAudioFileFormat(URL): end"); } return audioFileFormat; }
/** * @return */ public int getPositionByte() { int positionByte = AudioSystem.NOT_SPECIFIED; if (audioProperties != null) { if (audioProperties.containsKey("mp3.position.byte")) return positionByte = ( (Integer) audioProperties.get("mp3.position.byte") ).intValue(); if (audioProperties.containsKey("ogg.position.byte")) return positionByte = ( (Integer) audioProperties.get("ogg.position.byte") ).intValue(); } return positionByte; }
private void jButton16ActionPerformed( java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton16ActionPerformed System.out.print("\7"); //Вот это вот издает звук Clip clip = null; try { clip = AudioSystem.getClip(); } catch (LineUnavailableException ex) { ex.printStackTrace(); } byte[] buf = new byte[1024]; for (int j = 0; j < buf.length; j++) { buf[j] = (byte) j; } AudioFormat af = new AudioFormat( 11025f, 8, // sample size in bits 2, // channels true, // signed false // bigendian ); try { byte[] b = buf; AudioInputStream ais = new AudioInputStream(new ByteArrayInputStream(b), af, 512); clip.open(ais); } catch (Exception e) { e.printStackTrace(); } }
/** * Gets the Mixer to use. Depends upon selectedMixerIndex being defined. * * @see #newProperties */ private Mixer getSelectedMixer() { if (selectedMixerIndex.equals("default")) { return null; } else { Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo(); if (selectedMixerIndex.equals("last")) { return AudioSystem.getMixer(mixerInfo[mixerInfo.length - 1]); } else { int index = Integer.parseInt(selectedMixerIndex); return AudioSystem.getMixer(mixerInfo[index]); } } }
/** * Creates a WaveData container from the specified inputstream * * @param is * InputStream to read from * @return WaveData containing data, or null if a failure occured */ public static WaveData create(InputStream is) { try { return create(AudioSystem.getAudioInputStream(is)); } catch (Exception e) { org.lwjgl.LWJGLUtil.log("Unable to create from inputstream"); e.printStackTrace(); return null; } }