Refine search
public JavaSoundAudioRecorder (int samplingRate, boolean isMono) { try { AudioFormat format = new AudioFormat(Encoding.PCM_SIGNED, samplingRate, 16, isMono ? 1 : 2, isMono ? 2 : 4, samplingRate, false); line = AudioSystem.getTargetDataLine(format); line.open(format, buffer.length); line.start(); } catch (Exception ex) { throw new GdxRuntimeException("Error creating JavaSoundAudioRecorder.", ex); } }
@Override public void run() { byte[] data = new byte[microphone.getBufferSize()]; out = new ByteArrayOutputStream(); try { microphone.flush(); microphone.start(); while (!done) { int numBytesRead = microphone.read(data, 0, data.length); if (numBytesRead != -1) { out.write(data, 0, numBytesRead); } else { break; } } microphone.stop(); out.flush(); } catch (IOException e) { e.printStackTrace(); } synchronized (lock) { lock.notify(); } } }
public static void main(String[] args) throws Exception { if (args.length > 0) { for (int file = 0; file < args.length; file++) { AudioInputStream ais = AudioSystem.getAudioInputStream(new File(args[file])); if (!ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) { ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais); if (ais.getFormat().getChannels() > 1) { throw new IllegalArgumentException("Can only deal with mono audio signals"); int samplingRate = (int) ais.getFormat().getSampleRate(); DoubleDataSource signal = new AudioDoubleDataSource(ais); int framelength = (int) (0.01 /* seconds */* samplingRate); AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 1, 2, 44100.0F, false); DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat); AudioInputStream input = null; try { TargetDataLine mic = (TargetDataLine) AudioSystem.getLine(info); mic.open(audioFormat); mic.start(); input = new AudioInputStream(mic); } catch (LineUnavailableException e) {
@Override public void run() { Buffer buf = new Buffer(); AudioFormat lineFormat = line.getFormat(); buf.format = AudioFormatKeys.fromAudioFormat(lineFormat); int bufferSize = lineFormat.getFrameSize() * (int) lineFormat.getSampleRate(); if (((int) lineFormat.getSampleRate() & 1) == 0) { bufferSize /= 2; Rational sampleRate = Rational.valueOf(lineFormat.getSampleRate()); Rational frameRate = Rational.valueOf(lineFormat.getFrameRate()); int count = line.read(bdat, 0, bdat.length); if (count > 0) { buf.sampleCount = count / (lineFormat.getSampleSizeInBits() / 8 * lineFormat.getChannels());
File outputFile = new File(strFilename); AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, rate, 16, channels, 2 * channels, rate, false); DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat); TargetDataLine targetDataLine = null; try { targetDataLine = (TargetDataLine) AudioSystem.getLine(info); targetDataLine.open(audioFormat); } catch (LineUnavailableException e) { out("unable to get a recording line");
TargetDataLine audioLine = getAudioLine(); if (audioLine != null) { if (!audioLine.isOpen()) { logger.info("open"); try { audioLine.open(finalFormat, audioBufferSize); } catch (LineUnavailableException e) { logger.severe("Can't open microphone " + e.getMessage()); audioStream = AudioSystem.getAudioInputStream (desiredFormat, audioStream); assert (audioStream != null); (audioStream.getFormat().getSampleSizeInBits() / 8) * (int) (sec * audioStream.getFormat().getSampleRate()) * desiredFormat.getChannels();
DataLine.Info lineInfo = new DataLine.Info( TargetDataLine.class, new AudioFormat(44100, 16, 2, true, true)); line = (TargetDataLine) AudioSystem.getLine(lineInfo); AudioFormat lineFormat = line.getFormat(); Buffer buf = new Buffer(); buf.format = AudioFormatKeys.fromAudioFormat(lineFormat); buf.sampleDuration = new Rational(1, (long) lineFormat.getSampleRate()); buf.data = new byte[(int) (lineFormat.getFrameSize() * lineFormat.getSampleRate())]; writer = new AVIWriter(file); writer.addTrack(buf.format); line.open(); line.start(); buf.length = line.read((byte[]) buf.data, 0, ((byte[]) buf.data).length); buf.sampleCount = buf.length / lineFormat.getFrameSize(); writer.write(0, buf); } finally { if (line != null) { line.close();
AudioFormat audioFormat = new AudioFormat(44100.0F, 16, 2, true, false); line.open(audioFormat, line.getBufferSize()); line.start(); byte[] audioBytes = new byte[ line.getBufferSize() / 2 ]; // best size? int numBytesRead = 0; numBytesRead = line.read(audioBytes, 0, audioBytes.length);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, inFormat); if (!AudioSystem.isLineSupported(info)) { downsample = true; inFormat = new AudioFormat(44100.0f, // sample rate 16, // sample size 1, // channels (1 == mono) true, // signed false); // little endian info = new DataLine.Info(TargetDataLine.class, inFormat); if (!AudioSystem.isLineSupported(info)) { throw new LineUnavailableException( "Unsupported format: " + audioFormat); microphone = (TargetDataLine) AudioSystem.getLine(info); microphone.open(audioFormat, microphone.getBufferSize());
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); line = (TargetDataLine) AudioSystem.getLine(info); line.open(format, line.getBufferSize()); } catch (LineUnavailableException ex) { shutDown(res.getString("Unable_to_open_the") + ex); int frameSizeInBytes = format.getFrameSize(); int bufferLengthInFrames = line.getBufferSize() / 8; int bufferLengthInBytes = bufferLengthInFrames * frameSizeInBytes; byte[] data = new byte[bufferLengthInBytes]; int numBytesRead; line.start(); audioMeter.start(); if ((numBytesRead = line.read(data, 0, bufferLengthInBytes)) == -1) { break; line.stop(); line.close(); line = null; .getFrameRate()); duration = milliseconds / 1000.0;
AudioFormat format = m_line.getFormat(); int nFrameSize = format.getFrameSize(); long totalBytesToRead = (long) (millis * format.getFrameRate() * nFrameSize / 1000); if (totalBytesToRead % nFrameSize != 0) { totalBytesToRead += nFrameSize - totalBytesToRead % nFrameSize; out("BufferingRecorder.run(): trying to read: " + bytesToRead); int nBytesRead = m_line.read(abBuffer, 0, bytesToRead); totalBytes += nBytesRead; if (totalBytesToRead > 0 && totalBytes >= totalBytesToRead) { / format.getFrameSize()); try { AudioSystem.write(audioInputStream, m_targetType, m_file); } catch (IOException e) { e.printStackTrace();
AudioFormat audioFormat = new AudioFormat(16000, 16, 1, true, false); DataLine.Info targetInfo = new Info( TargetDataLine.class, if (!AudioSystem.isLineSupported(targetInfo)) { System.out.println("Microphone not supported"); System.exit(0); TargetDataLine targetDataLine = (TargetDataLine) AudioSystem.getLine(targetInfo); targetDataLine.open(audioFormat); targetDataLine.start(); System.out.println("Start speaking"); long startTime = System.currentTimeMillis(); if (estimatedTime > 60000) { // 60 seconds System.out.println("Stop speaking."); targetDataLine.stop(); targetDataLine.close(); break;
DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat, nBufferSize); try { if (strMixerName != null) { return null; Mixer mixer = AudioSystem.getMixer(mixerInfo); targetDataLine = (TargetDataLine) mixer.getLine(info); } else { out("AudioCommon.getTargetDataLine(): using default mixer"); targetDataLine = (TargetDataLine) AudioSystem.getLine(info); out("AudioCommon.getTargetDataLine(): opening line..."); targetDataLine.open(audioFormat, nBufferSize); if (DEBUG) { out("AudioCommon.getTargetDataLine(): opened line");
TargetDataLine getTargetDataLine(AudioFormat format, int bufferSize) DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); if (AudioSystem.isLineSupported(info)) line = (TargetDataLine)AudioSystem.getLine(info); line.open(format, bufferSize * format.getFrameSize()); debug("TargetDataLine buffer size is " + line.getBufferSize() + "\n" + "TargetDataLine format is " + line.getFormat().toString() + "\n" + "TargetDataLine info is " + line.getLineInfo().toString()); error("Unable to return a TargetDataLine: unsupported format - " + format.toString());
if (targetDataLine.isOpen()) { Test.output("targetDataLine was open, with format: " + targetDataLine.getFormat()); targetDataLine.close(); if (audioDescriptor instanceof Mixer.Info) { Mixer.Info mixerInfo = (Mixer.Info) audioDescriptor; Mixer mixer = AudioSystem.getMixer(mixerInfo); Line.Info[] lineInfos = mixer.getTargetLineInfo(); assert lineInfos.length > 0 : "Strange, there is no more line info for mixer: " + mixer; } else { assert ((String) audioDescriptor).equals("AudioSystem"); Line.Info lineInfo = new DataLine.Info(TargetDataLine.class, getAudioFormat()); targetDataLine = (TargetDataLine) AudioSystem.getLine(lineInfo); targetDataLine.open(getAudioFormat()); Test.output("Target line opened:"); Test.output("Format requested: " + getAudioFormat()); Test.output("Format opened : " + targetDataLine.getFormat()); return targetDataLine;
public Microphone( float sampleRate, int sampleSize, boolean signed, boolean bigEndian) { AudioFormat format = new AudioFormat(sampleRate, sampleSize, 1, signed, bigEndian); try { line = AudioSystem.getTargetDataLine(format); line.open(); } catch (LineUnavailableException e) { throw new IllegalStateException(e); } inputStream = new AudioInputStream(line); }
private byte[] record() throws LineUnavailableException { AudioFormat format = AudioUtil.getAudioFormat(audioConf); DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); // Checks if system supports the data line if (!AudioSystem.isLineSupported(info)) { LOGGER.error("Line not supported"); System.exit(0); } microphone = (TargetDataLine) AudioSystem.getLine(info); microphone.open(format); microphone.start(); LOGGER.info("Listening, tap enter to stop ..."); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); int numBytesRead; byte[] data = new byte[microphone.getBufferSize() / 5]; // Begin audio capture. microphone.start(); // Here, stopped is a global boolean set by another thread. while (!stopped) { // Read the next chunk of data from the TargetDataLine. numBytesRead = microphone.read(data, 0, data.length); // Save this chunk of data. byteArrayOutputStream.write(data, 0, numBytesRead); } return byteArrayOutputStream.toByteArray(); }
public int read(MultiChannelBuffer buffer) { // create our converter object int numChannels = line.getFormat().getChannels(); int numSamples = buffer.getBufferSize(); float sampleRate = line.getFormat().getSampleRate(); FloatSampleBuffer convert = new FloatSampleBuffer( numChannels, numSamples, sampleRate ); // allocate enough bytes for the size of this buffer byte[] bytes = new byte[ convert.getByteArrayBufferSize(line.getFormat()) ]; // read the bytes line.read(bytes, 0, bytes.length); // convert the bytes convert.setSamplesFromBytes(bytes, 0, line.getFormat(), 0, numSamples); // copy the converted floats into the MultiChannelBuffer // make sure it has the correct number of channels first buffer.setChannelCount(numChannels); for(int i = 0; i < convert.getChannelCount(); i++) { buffer.setChannel(i, convert.getChannel(i)); } return numSamples; } }