@Override public long getMicrosecondPosition () { return convertFramesToMilliseconds(getFramePosition()); }
@Override public void setMicrosecondPosition (long milliseconds) { framePosition = convertMillisecondsToFrames(milliseconds); }
@Override public long getMicrosecondLength () { return convertFramesToMilliseconds(getFrameLength()); }
@Override public void loop (int count) { //PlayN.log().debug("loop(" + count + ") - framePosition: " + framePosition); loopCount = count; countDown = count; active = true; inputStream.reset(); start(); }
@Override public void open (AudioFormat format, byte[] data, int offset, int bufferSize) throws LineUnavailableException { byte[] input = new byte[bufferSize]; for (int ii = 0; ii < input.length; ii++) { input[ii] = data[offset + ii]; } ByteArrayInputStream inputStream = new ByteArrayInputStream(input); try { AudioInputStream ais1 = AudioSystem.getAudioInputStream(inputStream); AudioInputStream ais2 = AudioSystem.getAudioInputStream(format, ais1); open(ais2); } catch (UnsupportedAudioFileException uafe) { throw new IllegalArgumentException(uafe); } catch (IOException ioe) { throw new IllegalArgumentException(ioe); } // TODO - throw IAE for invalid frame size, format. }
public void run () { try { AudioInputStream ais = rsrc.openAudioStream(); Clip clip = AudioSystem.getClip(); if (music) { clip = new BigClip(clip); } AudioFormat baseFormat = ais.getFormat(); if (baseFormat.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { AudioFormat decodedFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, // we have to force sample size to 16 baseFormat.getChannels(), baseFormat.getChannels()*2, baseFormat.getSampleRate(), false // big endian ); ais = AudioSystem.getAudioInputStream(decodedFormat, ais); } clip.open(ais); dispatchLoaded(sound, clip); } catch (Exception e) { dispatchLoadError(sound, e); } } });
setLoopPoints(0, audioData.length); dataLine = AudioSystem.getSourceDataLine(afTemp); dataLine.open();
byte[] tempData; if (format.getChannels() < 2) { tempData = convertMonoToStereo(data, bytesRead); framesRead = bytesRead / format.getFrameSize(); bytesRead *= 2;
@Override public void loop (int count) { //PlayN.log().debug("loop(" + count + ") - framePosition: " + framePosition); loopCount = count; countDown = count; active = true; inputStream.reset(); start(); }
@Override public void open (AudioFormat format, byte[] data, int offset, int bufferSize) throws LineUnavailableException { byte[] input = new byte[bufferSize]; for (int ii = 0; ii < input.length; ii++) { input[ii] = data[offset + ii]; } ByteArrayInputStream inputStream = new ByteArrayInputStream(input); try { AudioInputStream ais1 = AudioSystem.getAudioInputStream(inputStream); AudioInputStream ais2 = AudioSystem.getAudioInputStream(format, ais1); open(ais2); } catch (UnsupportedAudioFileException uafe) { throw new IllegalArgumentException(uafe); } catch (IOException ioe) { throw new IllegalArgumentException(ioe); } // TODO - throw IAE for invalid frame size, format. }
public void run () { try { AudioInputStream ais = rsrc.openAudioStream(); Clip clip = AudioSystem.getClip(); if (music) { clip = new BigClip(clip); } AudioFormat baseFormat = ais.getFormat(); if (baseFormat.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { AudioFormat decodedFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, // we have to force sample size to 16 baseFormat.getChannels(), baseFormat.getChannels()*2, baseFormat.getSampleRate(), false // big endian ); ais = AudioSystem.getAudioInputStream(decodedFormat, ais); } clip.open(ais); dispatchLoaded(sound, clip); } catch (Exception e) { dispatchLoadError(sound, e); } } });
setLoopPoints(0, audioData.length); dataLine = AudioSystem.getSourceDataLine(afTemp); dataLine.open();
byte[] tempData; if (format.getChannels() < 2) { tempData = convertMonoToStereo(data, bytesRead); framesRead = bytesRead / format.getFrameSize(); bytesRead *= 2;
@Override public long getMicrosecondPosition () { return convertFramesToMilliseconds(getFramePosition()); }
@Override public long getMicrosecondLength () { return convertFramesToMilliseconds(getFrameLength()); }
@Override public void setMicrosecondPosition (long milliseconds) { framePosition = convertMillisecondsToFrames(milliseconds); }
@Override public void loop (int count) { //PlayN.log().debug("loop(" + count + ") - framePosition: " + framePosition); loopCount = count; countDown = count; active = true; inputStream.reset(); start(); }
@Override public void open (AudioFormat format, byte[] data, int offset, int bufferSize) throws LineUnavailableException { byte[] input = new byte[bufferSize]; for (int ii = 0; ii < input.length; ii++) { input[ii] = data[offset + ii]; } ByteArrayInputStream inputStream = new ByteArrayInputStream(input); try { AudioInputStream ais1 = AudioSystem.getAudioInputStream(inputStream); AudioInputStream ais2 = AudioSystem.getAudioInputStream(format, ais1); open(ais2); } catch (UnsupportedAudioFileException uafe) { throw new IllegalArgumentException(uafe); } catch (IOException ioe) { throw new IllegalArgumentException(ioe); } // TODO - throw IAE for invalid frame size, format. }
), ais); clip = new BigClip(); } else { DataLine.Info info = new DataLine.Info(Clip.class, format);
setLoopPoints(0, audioData.length); dataLine = AudioSystem.getSourceDataLine(afTemp); dataLine.open();