private static void init() { try { // 44,100 samples per second, 16-bit audio, mono, signed PCM, little // Endian AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE); // the internal buffer is a fraction of the actual buffer size, this // choice is arbitrary // it gets divided because we can't expect the buffered data to line // up exactly with when // the sound card decides to push out its samples. buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3]; } catch (LineUnavailableException e) { System.out.println(e.getMessage()); } // no sound gets made before this call line.start(); }
/** Creates a new instance of test. Opens the microphone input as the target line. * To start the reporting, {@link #start} the thread. * @throws LineUnavailableException if microphone input is not available */ public VirtualDrummerMicrophoneInput () throws LineUnavailableException{ // getAudioInfo(); // prints lots of useless information format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,sampleRate,8,1,1,sampleRate,false); DataLine.Info dlinfo = new DataLine.Info(TargetDataLine.class, format); if ( AudioSystem.isLineSupported(dlinfo) ){ targetDataLine = (TargetDataLine)AudioSystem.getLine(dlinfo); } targetDataLine.open(format,bufferSize); bufferSize=targetDataLine.getBufferSize(); gui = new DrumSoundDetectorDemo(); gui.setVirtualDrummerMicrophoneInput(this); }
public static void main(String[] args) throws Exception { AbstractRcomArgs a=new AbstractRcomArgs(); UtilCli.parse(a, args, true); File folder=new File("/home/rizsi/tmp/video"); byte[] data=UtilFile.loadFile(new File(folder, "remote.sw")); AudioFormat format=ManualTestEchoCancel.getFormat(); final Mixer mixer = AudioSystem.getMixer(null); DataLine.Info info2= new DataLine.Info(SourceDataLine.class, format); SourceDataLine s=(SourceDataLine) mixer.getLine(info2); s.open(format, framesamples*2); s.start(); try(LoopInputStream lis=new LoopInputStream(data)) { try(JitterResampler rs=new JitterResampler(a, 8000, framesamples, 2)) { new FeedThread(lis, rs).start(); final byte[] buffer=new byte[framesamples*2];; while(true) { rs.readOutput(buffer); s.write(buffer, 0, buffer.length); } } } }
private boolean createSourceDataLine() { if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createSourceDataLine()"); try { DataLine.Info info = new DataLine.Info(SourceDataLine.class, loadedAudioFormat); if (!(AudioSystem.isLineSupported(info)) ) { if (DEBUG || Printer.err)Printer.err("Line not supported: "+loadedAudioFormat); // fail silently return false; } SourceDataLine source = (SourceDataLine) AudioSystem.getLine(info); datapusher = new DataPusher(source, loadedAudioFormat, loadedAudio, loadedAudioByteLength); } catch (Exception e) { if (DEBUG || Printer.err)e.printStackTrace(); // fail silently return false; } if (datapusher==null) { // fail silently return false; } if (DEBUG || Printer.debug)Printer.debug("Created SourceDataLine."); return true; }
public ExtendedClip(JuggleMasterPro objPjuggleMasterPro, byte bytPsoundFileIndex) { this.bytGsoundFileIndex = bytPsoundFileIndex; try { final AudioInputStream objLaudioInputStream = AudioSystem.getAudioInputStream(new File(Strings.doConcat( objPjuggleMasterPro.strS_CODE_BASE, Constants.strS_FILE_NAME_A[Constants.intS_FILE_FOLDER_SOUNDS], objPjuggleMasterPro.chrGpathSeparator, Constants.strS_FILE_SOUND_NAME_A[bytPsoundFileIndex]))); final AudioFormat objLaudioFormat = objLaudioInputStream.getFormat(); final DataLine.Info objLdataLineInfo = new DataLine.Info(Clip.class, objLaudioFormat, (int) objLaudioInputStream.getFrameLength() * objLaudioFormat.getFrameSize()); this.objGclip = (Clip) AudioSystem.getLine(objLdataLineInfo); this.objGclip.open(objLaudioInputStream); } catch (final Throwable objPthrowable) { Tools.err("Error while initializing sound : ", Constants.strS_FILE_SOUND_NAME_A[bytPsoundFileIndex]); this.objGclip = null; } }
@Override public int getMaxLines(Line.Info info) { Line.Info fullInfo = getLineInfo(info); // if it's not supported at all, return 0. if (fullInfo == null) { return 0; } if (fullInfo instanceof DataLine.Info) { // DirectAudioDevices should mix ! return getMaxSimulLines(); } return 0; }
@Override public void startPlayback(final ISyncAudioSource resampler) { final AudioFormat format = StreamSourceAudio.getFormat(); new Thread("Audio output") { private byte[] buffer; public void run() { try { DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); try(SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info)) { line.open(format, StreamSourceAudio.requestBufferSize); line.start(); buffer=new byte[line.getBufferSize()]; while(!resampler.isClosed()) { resampler.readOutput(buffer); line.write(buffer, 0, buffer.length); } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } }; }.start(); }
private static void test(final AudioFormat format, final byte[] data) throws Exception { final Line.Info info = new DataLine.Info(Clip.class, format); final Clip clip = (Clip) AudioSystem.getLine(info); go = new CountDownLatch(1); clip.addLineListener(event -> { if (event.getType().equals(LineEvent.Type.START)) { go.countDown(); } }); clip.open(format, data, 0, data.length); clip.start(); go.await(); while (clip.isRunning()) { // This loop should not hang } while (clip.isActive()) { // This loop should not hang } clip.close(); }
private static void doLine2(DataLine line, AudioFormat format) { try { System.out.println(" - call to open()"); line.open(); try { System.out.println(" - line has format: "+line.getFormat()); if (!line.getFormat().matches(format)) { System.out.println("## Error: expected this format: "+format); failed++; } else { passed++; } } finally { line.close(); System.out.println(" - closed"); } } catch (Throwable t) { System.out.println(" - Caught exception. Not failed."); System.out.println(" - "+t.toString()); } }
private static void doMixerClip(Mixer mixer, AudioFormat format) { if (mixer==null) return; try { System.out.println("Clip from mixer "+mixer+":"); System.out.println(" "+mixer.getMixerInfo()); DataLine.Info info = new DataLine.Info( Clip.class, format); if (mixer.isLineSupported(info)) { Clip clip = (Clip) mixer.getLine(info); doLine1(clip, format); } else { System.out.println(" - Line not supported"); } } catch (Throwable t) { System.out.println(" - Caught exception. Not failed."); System.out.println(" - "+t.toString()); } }
private static void doMixerSDL(Mixer mixer, AudioFormat format) { if (mixer==null) return; try { System.out.println("SDL from mixer "+mixer+":"); DataLine.Info info = new DataLine.Info( SourceDataLine.class, format); if (mixer.isLineSupported(info)) { SourceDataLine sdl = (SourceDataLine) mixer.getLine(info); doLine1(sdl, format); doLine2(sdl, format); } else { System.out.println(" - Line not supported"); } } catch (Throwable t) { System.out.println(" - Caught exception. Not failed."); System.out.println(" - "+t.toString()); } }
private static void doMixerTDL(Mixer mixer, AudioFormat format) { if (mixer==null) return; try { System.out.println("TDL from mixer "+mixer+":"); DataLine.Info info = new DataLine.Info( TargetDataLine.class, format); if (mixer.isLineSupported(info)) { TargetDataLine tdl = (TargetDataLine) mixer.getLine(info); doLine1(tdl, format); doLine2(tdl, format); } else { System.out.println(" - Line not supported"); } } catch (Throwable t) { System.out.println(" - Caught exception. Not failed."); System.out.println(" - "+t.toString()); } }
public static void checkLines(Mixer mixer, Line.Info[] infos) { for (int i = 0; i<infos.length; i++) { try { if (infos[i] instanceof DataLine.Info) { DataLine.Info info = (DataLine.Info) infos[i]; System.out.println(" Line "+info+" (max. "+mixer.getMaxLines(info)+" simultaneously): "); AudioFormat[] formats = info.getFormats(); for (int f = 0; f < formats.length; f++) { try { AudioFormat otherEndianOrSign = getOtherEndianOrSign(formats[f]); if (otherEndianOrSign != null) { checkFormat(formats, otherEndianOrSign); } } catch (Exception e1) { out(" Unexpected exception when getting a format: "+e1); } } } } catch (Exception e) { out(" Unexpected exception when getting a line: "+e); } } }
void playRecorded(AudioFormat format, byte[] data) throws Exception { //SourceDataLine line = AudioSystem.getSourceDataLine(format); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); SourceDataLine line = (SourceDataLine)AudioSystem.getLine(info); line.open(); line.start(); int remaining = data.length; while (remaining > 0) { int avail = line.available(); if (avail > 0) { if (avail > remaining) avail = remaining; int written = line.write(data, data.length - remaining, avail); remaining -= written; log("Playing: " + written + " bytes written"); } else { delay(100); } } line.drain(); line.stop(); }
private void enableHorn () throws UnsupportedAudioFileException, IOException, LineUnavailableException { if ( ( this.clip == null || !this.clip.isRunning () ) && Activator.getDefault ().getPreferenceStore ().getBoolean ( PreferenceConstants.BELL_ACTIVATED_KEY ) ) { final AudioInputStream sound = AudioSystem.getAudioInputStream ( this.soundFile ); final DataLine.Info info = new DataLine.Info ( Clip.class, sound.getFormat () ); this.clip = (Clip)AudioSystem.getLine ( info ); this.clip.open ( sound ); this.clip.loop ( Clip.LOOP_CONTINUOUSLY ); } if ( !this.bellIcon.isDisposed () ) { this.bellIcon.setImage ( getBellIcon () ); } }
protected DataLine.Info getSourceLineInfo() { AudioFormat fmt = getAudioFormat(); //DataLine.Info info = new DataLine.Info(SourceDataLine.class, fmt, 4000); DataLine.Info info = new DataLine.Info(SourceDataLine.class, fmt); return info; }
/** * Constructs a Microphone with the given InputStream. */ @Override public void initialize() { super.initialize(); audioList = new LinkedBlockingQueue<Data>(); DataLine.Info info = new DataLine.Info(TargetDataLine.class, desiredFormat); /* * If we cannot get an audio line that matches the desired * characteristics, shoot for one that matches almost everything we * want, but has a higher sample rate. */ if (!AudioSystem.isLineSupported(info)) { logger.info(desiredFormat + " not supported"); AudioFormat nativeFormat = DataUtil.getNativeAudioFormat(desiredFormat, getSelectedMixer()); if (nativeFormat == null) { logger.severe("couldn't find suitable target audio format"); } else { finalFormat = nativeFormat; /* convert from native to the desired format if supported */ doConversion = AudioSystem.isConversionSupported(desiredFormat, nativeFormat); if (doConversion) { logger.info("Converting from " + finalFormat.getSampleRate() + "Hz to " + desiredFormat.getSampleRate() + "Hz"); } else { logger.info("Using native format: Cannot convert from " + finalFormat.getSampleRate() + "Hz to " + desiredFormat.getSampleRate() + "Hz"); } } } else { logger.info("Desired format: " + desiredFormat + " supported."); finalFormat = desiredFormat; } }
void getAudioInfo (){ Mixer.Info[] mixerInfos = AudioSystem.getMixerInfo(); log.info(mixerInfos.length + " mixers"); for ( int i = 0 ; i < mixerInfos.length ; i++ ){ Mixer mixer = AudioSystem.getMixer(mixerInfos[i]); System.out.println("Mixer " + mixer); // target data lines Line.Info[] lineInfos = mixer.getTargetLineInfo(); System.out.println("\t" + lineInfos.length + " lineInfos"); for ( int j = 0 ; j < lineInfos.length ; j++ ){ if ( lineInfos[j] instanceof DataLine.Info ){ AudioFormat[] formats = ( (DataLine.Info)lineInfos[j] ).getFormats(); System.out.println("\t\t\t" + formats.length + " formats"); for ( int k = 0 ; k < formats.length ; k++ ){ System.out.println("\t\tFormat " + formats[k]); } } Line line = null; try{ line = mixer.getLine(lineInfos[j]); System.out.println("\tLine " + line); } catch ( LineUnavailableException e ){ e.printStackTrace(); } } } }
/** Initialize local mixer: microphone */ public void initializeMixer() { audioFormat = new AudioFormat(References.SAMPLE_RATE, References.SAMPLE_SIZE_IN_BITS, References.CHANNELS, References.SIGNED, References.BIG_ENDIAN); dataLineInfo = new DataLine.Info(TargetDataLine.class, audioFormat); try { targetDataLine = (TargetDataLine) AudioSystem.getLine(dataLineInfo); targetDataLine.open(audioFormat); } catch (Exception e) { e.printStackTrace(); } }
public void openMixer() { InputStream byteArrayInputStream = new ByteArrayInputStream(playbuffer); audioInputStream = new AudioInputStream(byteArrayInputStream, audioFormat, playbuffer.length / audioFormat.getFrameSize()); DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat); try { sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); sourceDataLine.open(audioFormat); sourceDataLine.start(); } catch (LineUnavailableException e) { e.printStackTrace(); } }
private boolean createClip() { if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.createClip()"); try { DataLine.Info info = new DataLine.Info(Clip.class, loadedAudioFormat); if (!(AudioSystem.isLineSupported(info)) ) { if (DEBUG || Printer.err)Printer.err("Clip not supported: "+loadedAudioFormat); // fail silently return false; } Object line = AudioSystem.getLine(info); if (!(line instanceof AutoClosingClip)) { if (DEBUG || Printer.err)Printer.err("Clip is not auto closing!"+clip); // fail -> will try with SourceDataLine return false; } clip = (AutoClosingClip) line; clip.setAutoClosing(true); if (DEBUG || Printer.debug) clip.addLineListener(this); } catch (Exception e) { if (DEBUG || Printer.err)e.printStackTrace(); // fail silently return false; } if (clip==null) { // fail silently return false; } if (DEBUG || Printer.debug)Printer.debug("Loaded clip."); return true; }
SoftMixingDataLine(SoftMixingMixer mixer, DataLine.Info info) { this.mixer = mixer; this.info = info; this.control_mutex = mixer.control_mutex; controls = new Control[] { gain_control, mute_control, balance_control, pan_control, reverbsend_control, chorussend_control, apply_reverb }; calcVolume(); }
public Line getLine(Line.Info info) throws LineUnavailableException { if (!isLineSupported(info)) throw new IllegalArgumentException("Line unsupported: " + info); if ((info.getLineClass() == SourceDataLine.class)) { return new SoftMixingSourceDataLine(this, (DataLine.Info) info); } if ((info.getLineClass() == Clip.class)) { return new SoftMixingClip(this, (DataLine.Info) info); } throw new IllegalArgumentException("Line unsupported: " + info); }
public AsyncLineStopper(DataLine line, long delayMS) { this.line = line; this.delayMS = delayMS; thread = new Thread(this); thread.setDaemon(true); // starts the thread and waits until it becomes ready synchronized (readyEvent) { thread.start(); try { readyEvent.wait(); } catch (InterruptedException ex) { } } }
DirectAudioDevice(DirectAudioDeviceProvider.DirectAudioDeviceInfo portMixerInfo) { // pass in Line.Info, mixer, controls super(portMixerInfo, // Mixer.Info null, // Control[] null, // Line.Info[] sourceLineInfo null); // Line.Info[] targetLineInfo if (Printer.trace) Printer.trace(">> DirectAudioDevice: constructor"); // source lines DirectDLI srcLineInfo = createDataLineInfo(true); if (srcLineInfo != null) { sourceLineInfo = new Line.Info[2]; // SourcedataLine sourceLineInfo[0] = srcLineInfo; // Clip sourceLineInfo[1] = new DirectDLI(Clip.class, srcLineInfo.getFormats(), srcLineInfo.getHardwareFormats(), 32, // arbitrary minimum buffer size AudioSystem.NOT_SPECIFIED); } else { sourceLineInfo = new Line.Info[0]; } // TargetDataLine DataLine.Info dstLineInfo = createDataLineInfo(false); if (dstLineInfo != null) { targetLineInfo = new Line.Info[1]; targetLineInfo[0] = dstLineInfo; } else { targetLineInfo = new Line.Info[0]; } if (Printer.trace) Printer.trace("<< DirectAudioDevice: constructor completed"); }
protected DirectDL(DataLine.Info info, DirectAudioDevice mixer, AudioFormat format, int bufferSize, int mixerIndex, int deviceID, boolean isSource) { super(info, mixer, null, format, bufferSize); if (Printer.trace) Printer.trace("DirectDL CONSTRUCTOR: info: " + info); this.mixerIndex = mixerIndex; this.deviceID = deviceID; this.waitTime = 10; // 10 milliseconds default wait time this.isSource = isSource; }
private DirectSDL(DataLine.Info info, AudioFormat format, int bufferSize, DirectAudioDevice mixer) { super(info, mixer, format, bufferSize, mixer.getMixerIndex(), mixer.getDeviceID(), true); if (Printer.trace) Printer.trace("DirectSDL CONSTRUCTOR: completed"); }
private DirectTDL(DataLine.Info info, AudioFormat format, int bufferSize, DirectAudioDevice mixer) { super(info, mixer, format, bufferSize, mixer.getMixerIndex(), mixer.getDeviceID(), false); if (Printer.trace) Printer.trace("DirectTDL CONSTRUCTOR: completed"); }
public Mic(Mixer mixer, AudioFormat format, int frameSamples) throws LineUnavailableException { super(); DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); t=(TargetDataLine) mixer.getLine(info); t.open(format, frameSamples*2); System.out.println("Bytebuffer t: "+t.getBufferSize()); }
@Override public Line getLine(Line.Info info) throws LineUnavailableException { if (!isLineSupported(info)) throw new IllegalArgumentException("Line unsupported: " + info); if ((info.getLineClass() == SourceDataLine.class)) { return new SoftMixingSourceDataLine(this, (DataLine.Info) info); } if ((info.getLineClass() == Clip.class)) { return new SoftMixingClip(this, (DataLine.Info) info); } throw new IllegalArgumentException("Line unsupported: " + info); }
public static void main(String[] args) throws Exception { AbstractRcomArgs a=new AbstractRcomArgs(); UtilCli.parse(a, args, true); File folder=new File("/home/rizsi/tmp/video"); byte[] data=UtilFile.loadFile(new File(folder, "remote.sw")); AudioFormat format=ManualTestEchoCancel.getFormat(); final Mixer mixer = AudioSystem.getMixer(null); DataLine.Info info2= new DataLine.Info(SourceDataLine.class, format); SourceDataLine s=(SourceDataLine) mixer.getLine(info2); s.open(format, framesamples*2); s.start(); try(LoopInputStream lis=new LoopInputStream(data)) { try(SpeexResampler resampler=new SpeexResampler(a, framesamples, new ResampledReceiver(s))) { final byte[] buffer=new byte[framesamples*2];; while(true) { UtilStream.readFully(buffer, lis, buffer.length); feed(resampler, buffer); } } // byte[] buffer=new byte[framesamples*2]; // while(true) // { // UtilStream.readFully(buffer, resampled, buffer.length); // } } }
public static void main(String args[]) throws Exception { boolean res = true; try { AudioInputStream ais = new AudioInputStream( new ByteArrayInputStream(new byte[2000]), new AudioFormat(8000.0f, 8, 1, false, false), 2000); // AudioFormat format = ais.getFormat(); DataLine.Info info = new DataLine.Info(Clip.class, format, ((int) ais.getFrameLength() * format .getFrameSize())); Clip clip = (Clip) AudioSystem.getLine(info); clip.open(); FloatControl rateControl = (FloatControl) clip.getControl( FloatControl.Type.SAMPLE_RATE); int c = 0; while (c++ < 10) { clip.stop(); clip.setFramePosition(0); clip.start(); for (float frq = 22000; frq < 44100; frq = frq + 100) { try { Thread.currentThread().sleep(20); } catch (Exception e) { break; } rateControl.setValue(frq); } } } catch (Exception ex) { ex.printStackTrace(); res = ex.getMessage().indexOf( "This method should not have been invoked!") < 0; } if (res) { System.out.println("Test passed"); } else { System.out.println("Test failed"); throw new Exception("Test failed"); } }