/** * Try to get a line for output to the speaker. The line is not yet opened with a specific format. * * @return a line object, or null if the line could not be created. */ public SourceDataLine getSpeakerOutputLine() { Object audioDescriptor = cbSpeakerOutput.getSelectedItem(); assert audioDescriptor instanceof Mixer.Info; Mixer.Info mixerInfo = (Mixer.Info) audioDescriptor; Mixer mixer = AudioSystem.getMixer(mixerInfo); Line.Info[] lineInfos = mixer.getSourceLineInfo(); assert lineInfos.length > 0 : "Strange, there are no more source lines for mixer: " + mixer; Line.Info lineInfo = lineInfos[0]; SourceDataLine line = null; try { line = (SourceDataLine) mixer.getLine(lineInfo); } catch (LineUnavailableException e) { e.printStackTrace(); } return line; }
private static void init() { try { // 44,100 samples per second, 16-bit audio, mono, signed PCM, little Endian AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE); // the internal buffer is a fraction of the actual buffer size, this choice is arbitrary // it gets divided because we can't expect the buffered data to line up exactly with when // the sound card decides to push out its samples. buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE/3]; } catch (LineUnavailableException e) { System.out.println(e.getMessage()); } // no sound gets made before this call line.start(); }
inFormat); if (!AudioSystem.isLineSupported(info)) { throw new LineUnavailableException( "Unsupported format: " + audioFormat);
thread.start(); } catch (LineUnavailableException e) { e.printStackTrace(); throw new InternalError(e.getMessage());
getMixer(); } catch (LineUnavailableException e) { throw new IOException(e.toString());
public synchronized void play() { stop(); if (clip == null) { try { clip = createClip(); clip.open(getAudioFormat(), samples.clone(), 0, samples.length); if (clip.isControlSupported(FloatControl.Type.PAN)) { FloatControl control = (FloatControl) clip.getControl(FloatControl.Type.PAN); control.setValue(pan); } if (clip.isControlSupported(FloatControl.Type.VOLUME)) { FloatControl control = (FloatControl) clip.getControl(FloatControl.Type.VOLUME); control.setValue(volume / 64f); } clip.start(); } catch (LineUnavailableException e) { e.printStackTrace(); throw new InternalError(e.getMessage()); } } }
LOG.log(Level.WARNING, e.toString(), e);
/** * Try to get a line for output to the expert. The line is not yet opened with a specific format. * * @return a line object, or null if the line could not be created. */ public SourceDataLine getExpertOutputLine() { Object audioDescriptor = cbExpertOutput.getSelectedItem(); assert audioDescriptor instanceof Mixer.Info; Mixer.Info mixerInfo = (Mixer.Info) audioDescriptor; Mixer mixer = AudioSystem.getMixer(mixerInfo); Line.Info[] lineInfos = mixer.getSourceLineInfo(); assert lineInfos.length > 0 : "Strange, there are no more source lines for mixer: " + mixer; Line.Info lineInfo = lineInfos[0]; SourceDataLine line = null; try { line = (SourceDataLine) mixer.getLine(lineInfo); } catch (LineUnavailableException e) { e.printStackTrace(); } return line; }
e.printStackTrace(); throw new InternalError(e.getMessage());
logger.severe("microphone unavailable " + e.getMessage());
private ClipClones(Mixer mixer, SoundData soundData, int size) throws LineUnavailableException { idx = 0; clips = new Clip[size]; DataLine.Info info = new DataLine.Info(Clip.class, soundData.format); if (!AudioSystem.isLineSupported(info)) { throw new LineUnavailableException("Required data line is not supported by the audio system"); } for (int i = 0; i < size; i++) { clips[i] = (Clip) mixer.getLine(info); clips[i].open(soundData.format, soundData.byteData, 0, soundData.length); } }
m_clip.open(audioInputStream); } catch (LineUnavailableException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace();
audioLine.open(finalFormat, audioBufferSize); } catch (LineUnavailableException e) { logger.severe("Can't open microphone " + e.getMessage()); return false;
/** * @return the sourceDataLine */ public synchronized SourceDataLine getSourceDataLine() throws LineUnavailableException{ if(output==null) throw new LineUnavailableException(); if(sourceDataLine==null){ setSourceDataLine((SourceDataLine) AudioSystem.getLine(output.info)); sourceDataLine.open(format); sourceDataLine.start(); }//end if(sourceDataLine==null) return sourceDataLine; }//end getSourceDataLine()
m_clip.open(audioInputStream); } catch (LineUnavailableException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace();
/** * Open the current line */ public void openLine(){ try { this.line.open(format); } catch (LineUnavailableException e) { System.out.println("Unable to open line:" + e.getMessage()); } }
throw new LineUnavailableException();
} catch (LineUnavailableException e) { System.err.println("unable to get a recording line"); e.printStackTrace();
/** * Get a connection to the speaker, if available. * @param audioFormat The java audio format that we will use. * @return A new audio format, or null if we cannot create one. */ public static AudioFrame make(final AudioFormat audioFormat) { try { return new AudioFrame(audioFormat); } catch (LineUnavailableException e) { log.error("Could not get audio data line: {}", e.getMessage()); return null; } }
throw new LineUnavailableException("Audio encoding " + audioFormat.getEncoding() + " is not supported");