AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("Pitch-synchronous LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LPCC-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LPCC-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("Pitch-synchronous LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
@Nonnull public Bytes getFrameLength() { return new Bytes(infoFormat.getFrameLength()); }
@Override public AudioInputStream getAudioInputStream(InputStream in) throws UnsupportedAudioFileException, IOException { try { if(!in.markSupported()) in = new BufferedInputStream(in); in.mark(1000); final AudioFileFormat aff = getAudioFileFormat(in, AudioSystem.NOT_SPECIFIED); in.reset(); return new MP4AudioInputStream(in, aff.getFormat(), aff.getFrameLength()); } catch(UnsupportedAudioFileException e) { in.reset(); throw e; } catch(IOException e) { in.reset(); throw e; } }
@Override public AudioInputStream getAudioInputStream(InputStream in) throws UnsupportedAudioFileException, IOException { try { if(!in.markSupported()) in = new BufferedInputStream(in); in.mark(1000); final AudioFileFormat aff = getAudioFileFormat(in, AudioSystem.NOT_SPECIFIED); in.reset(); return new MP4AudioInputStream(in, aff.getFormat(), aff.getFrameLength()); } catch(UnsupportedAudioFileException e) { in.reset(); throw e; } catch(IOException e) { in.reset(); throw e; } }
/** * Return the AudioInputStream from the given InputStream. */ public AudioInputStream getAudioInputStream(InputStream inputStream, int medialength, int totalms) throws UnsupportedAudioFileException, IOException { if (TDebug.TraceAudioFileReader) TDebug.out("getAudioInputStream(InputStream inputStreamint medialength, int totalms)"); try { if (!inputStream.markSupported()) inputStream = new BufferedInputStream(inputStream); inputStream.mark(MARK_LIMIT); AudioFileFormat audioFileFormat = getAudioFileFormat(inputStream, medialength, totalms); inputStream.reset(); return new AudioInputStream(inputStream, audioFileFormat.getFormat(), audioFileFormat.getFrameLength()); } catch (UnsupportedAudioFileException e) { inputStream.reset(); throw e; } catch (IOException e) { inputStream.reset(); throw e; } }
audioFileFormat.getFormat(), audioFileFormat.getFrameLength()); if (TDebug.TraceAudioFileReader) { TDebug.out("TAudioFileReader.getAudioInputStream(InputStream, long): end");
@Override public AudioInputStream getAudioInputStream(final InputStream stream) throws UnsupportedAudioFileException, IOException { if (!nativeLibraryLoaded) throw new UnsupportedAudioFileException("Native library casampledsp not loaded."); final AudioFileFormat fileFormat = getAudioFileFormat(stream); return new CAAudioInputStream(new CAStreamInputStream(stream, 0), fileFormat.getFormat(), fileFormat.getFrameLength()); }
audioFileFormat.getFormat(), audioFileFormat.getFrameLength()); if (TDebug.TraceAudioFileReader) { TDebug.out("TAudioFileReader.getAudioInputStream(InputStream, long): end");
return new AudioInputStream(sequenceInputStream, audioFileFormat.getFormat(), audioFileFormat.getFrameLength());
@Override public AudioInputStream getAudioInputStream(final URL url) throws UnsupportedAudioFileException, IOException { if (!nativeLibraryLoaded) throw new UnsupportedAudioFileException("Native library casampledsp not loaded."); final AudioFileFormat fileFormat; final CANativePeerInputStream stream; if (isFile(url)) { fileFormat = getAudioFileFormat(url); stream = new CAURLInputStream(url); } else { final URLConnection urlConnection = url.openConnection(); final String contentType = urlConnection.getContentType(); final InputStream rawStream = buffer(url.openStream()); final Integer fileTypeHint = toFileTypeHint(contentType); fileFormat = getAudioFileFormat(rawStream, fileTypeHint); stream = new CAStreamInputStream(rawStream, fileTypeHint); } return new CAAudioInputStream(stream, fileFormat.getFormat(), fileFormat.getFrameLength()); }
public Track readSingle(Track track) { TrackData trackData = track.getTrackData(); File file = trackData.getFile(); String title = Util.removeExt(file.getName()); trackData.setTagFieldValues(FieldKey.TITLE, title); try { AudioFileFormat format = AudioSystem.getAudioFileFormat(file); trackData.setStartPosition(0); AudioFormat audioFormat = format.getFormat(); trackData.setSampleRate((int) audioFormat.getSampleRate()); trackData.setTotalSamples(format.getFrameLength()); trackData.setChannels(audioFormat.getChannels()); trackData.setCodec(Util.getFileExt(file).toUpperCase()); if (format.getFrameLength() > 0) trackData.setBitrate((int) (format.getByteLength() / format.getFrameLength() * audioFormat.getSampleRate() / 100)); } catch (Exception e) { System.out.println("Couldn't read file: " + trackData.getFile()); } return track; }
@Override public AudioInputStream getAudioInputStream(final InputStream stream) throws UnsupportedAudioFileException, IOException { if (!nativeLibraryLoaded) throw new UnsupportedAudioFileException("Native library qtsampledsp not loaded."); final AudioFileFormat fileFormat = getAudioFileFormat(stream); return new QTAudioInputStream(new QTStreamInputStream(stream), fileFormat.getFormat(), fileFormat.getFrameLength()); }
public AudioInputStream getAudioInputStream(final QTURL url) throws UnsupportedAudioFileException, IOException { if (!nativeLibraryLoaded) throw new UnsupportedAudioFileException("Native library qtsampledsp not loaded."); final AudioFileFormat fileFormat = getAudioFileFormat(url); return new QTAudioInputStream(new QTFileInputStream(url), fileFormat.getFormat(), fileFormat.getFrameLength()); }
int frameLength = medialength; // if frameSize not specified, use byte length, see AudioInputStream if (!(format.getFrameSize() == AudioSystem.NOT_SPECIFIED || format.getFrameSize() <= 0)) frameLength = audioFileFormat.getFrameLength(); return new AudioInputStream(sequenceInputStream, format, frameLength);
@Override public StreamInfo getStreamInfo (final InputStream is) throws SoundTransformException { if (is instanceof AudioInputStream) { final AudioInputStream ais = (AudioInputStream) is; return this.fromAudioFormat (ais.getFormat (), ais.getFrameLength ()); } try { final AudioFileFormat aff = AudioSystem.getAudioFileFormat (is); return this.fromAudioFormat (aff.getFormat (), aff.getFrameLength ()); } catch (final UnsupportedAudioFileException e) { throw new SoundTransformException (AudioFormatParserErrorCode.WRONG_TYPE, e, is); } catch (final IOException e) { throw new SoundTransformException (AudioFormatParserErrorCode.WRONG_TYPE, e, is); } } }