/** * Create a new audio player in a separate thread for playing audio. * * @param as * The audio stream to play. * @return The audio player created. */ public static AudioPlayer createAudioPlayer(final AudioStream as) { final AudioPlayer ap = new AudioPlayer(as); new Thread(ap).start(); return ap; }
/** * Create a new audio player in a separate thread for playing audio. * * @param as * The audio stream to play. * @return The audio player created. */ public static AudioPlayer createAudioPlayer(final AudioStream as) { final AudioPlayer ap = new AudioPlayer(as); new Thread(ap).start(); return ap; }
/** * Create a new audio player in a separate thread for playing audio. To find * out device names, use {@link AudioUtils#getDevices()}. * * @param as * The audio stream to play. * @param device * The name of the device to use. * @return The audio player created. */ public static AudioPlayer createAudioPlayer(final AudioStream as, final String device) { final AudioPlayer ap = new AudioPlayer(as, device); new Thread(ap).start(); return ap; }
/** * Create a new audio player in a separate thread for playing audio. To find * out device names, use {@link AudioUtils#getDevices()}. * * @param as * The audio stream to play. * @param device * The name of the device to use. * @return The audio player created. */ public static AudioPlayer createAudioPlayer(final AudioStream as, final String device) { final AudioPlayer ap = new AudioPlayer(as, device); new Thread(ap).start(); return ap; }
/** * Plays a sound through the audio API. */ protected void playNormalSound( AudioStream s ) { AudioPlayer ap = new AudioPlayer( s ); ap.run(); }
/** * Plays a sound through the audio API. */ protected void playNormalSound( AudioStream s ) { AudioPlayer ap = new AudioPlayer( s ); ap.run(); }
public static void main(String[] args) { try { // final Video<MBFImage> video = new VideoCapture(320, 240); final JavaSoundAudioGrabber audio = new JavaSoundAudioGrabber(new AudioFormat(16, 44.1, 2)); audio.setMaxBufferSize(1024); new Thread(audio).start(); Thread.sleep(100); // final VideoDisplay<MBFImage> display = // VideoDisplay.createVideoDisplay(video, audio); new Thread(new AudioPlayer(audio)).start(); } catch (final Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
public static void main(String[] args) { try { // final Video<MBFImage> video = new VideoCapture(320, 240); final JavaSoundAudioGrabber audio = new JavaSoundAudioGrabber(new AudioFormat(16, 44.1, 2)); audio.setMaxBufferSize(1024); new Thread(audio).start(); Thread.sleep(100); // final VideoDisplay<MBFImage> display = // VideoDisplay.createVideoDisplay(video, audio); new Thread(new AudioPlayer(audio)).start(); } catch (final Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
/** * Plays a processed sound through the audio API. */ protected void playProcessedSound( AudioStream s ) { VolumeAdjustProcessor vap = new VolumeAdjustProcessor( 0.4f, s ); AudioPlayer ap = new AudioPlayer( vap ); ap.run(); }
/** * Plays a processed sound through the audio API. */ protected void playProcessedSound( AudioStream s ) { VolumeAdjustProcessor vap = new VolumeAdjustProcessor( 0.4f, s ); AudioPlayer ap = new AudioPlayer( vap ); ap.run(); }
/** * Construct a video display with the given video and audio * * @param v * The video * @param a * The audio * @param screen * The frame to draw into. */ public VideoDisplay(final Video<T> v, final AudioStream a, final ImageComponent screen) { this.video = v; // If we're given audio, we create an audio player that will also // act as our synchronisation time keeper. if (a != null) { this.audioPlayer = new AudioPlayer(a); this.timeKeeper = this.audioPlayer; } // If no audio is provided, we'll use a basic time keeper else this.timeKeeper = new BasicVideoTimeKeeper(this.video.countFrames() == -1); this.screen = screen; this.videoDisplayListeners = new ArrayList<VideoDisplayListener<T>>(); this.stateListeners = new ArrayList<VideoDisplayStateListener>(); this.positionListeners = new ArrayList<VideoPositionListener>(); }
/** * Construct a video display with the given video and audio * * @param v * The video * @param a * The audio * @param screen * The frame to draw into. */ public VideoDisplay(final Video<T> v, final AudioStream a, final ImageComponent screen) { this.video = v; // If we're given audio, we create an audio player that will also // act as our synchronisation time keeper. if (a != null) { this.audioPlayer = new AudioPlayer(a); this.timeKeeper = this.audioPlayer; } // If no audio is provided, we'll use a basic time keeper else this.timeKeeper = new BasicVideoTimeKeeper(this.video.countFrames() == -1); this.screen = screen; this.videoDisplayListeners = new ArrayList<VideoDisplayListener<T>>(); this.stateListeners = new ArrayList<VideoDisplayStateListener>(); this.positionListeners = new ArrayList<VideoPositionListener>(); }
final AudioPlayer ap = new AudioPlayer( audio );
final AudioPlayer ap = new AudioPlayer( audio );
/** * Main * @param args CLAs */ public static void main( String[] args ) { double fc = 1000; // mid-point 1000Hz double q = 1600; // HPF @ 200Hz, LPF @ 1800Hz XuggleAudio s = new XuggleAudio( new File("videoplayback.mp4") ); EQFilter lpf = new EQFilter( s, EQType.LPF, fc+q/2 ); EQFilter hpf = new EQFilter( lpf, EQType.HPF, fc-q/2 ); // AudioSpectragram as = new AudioSpectragram( hpf ); // as.addListener( new SpectragramCompleteListener() // { // @Override // public void spectragramComplete( AudioSpectragram as ) // { // DisplayUtilities.display( as.getLastGeneratedView() ); // } // } ); // as.processStream(); AudioPlayer ap = new AudioPlayer( hpf ); ap.run(); } }
/** * Main * @param args CLAs */ public static void main( String[] args ) { double fc = 1000; // mid-point 1000Hz double q = 1600; // HPF @ 200Hz, LPF @ 1800Hz XuggleAudio s = new XuggleAudio( new File("videoplayback.mp4") ); EQFilter lpf = new EQFilter( s, EQType.LPF, fc+q/2 ); EQFilter hpf = new EQFilter( lpf, EQType.HPF, fc-q/2 ); // AudioSpectragram as = new AudioSpectragram( hpf ); // as.addListener( new SpectragramCompleteListener() // { // @Override // public void spectragramComplete( AudioSpectragram as ) // { // DisplayUtilities.display( as.getLastGeneratedView() ); // } // } ); // as.processStream(); AudioPlayer ap = new AudioPlayer( hpf ); ap.run(); } }
final JFrame f = aw.showWindow( "Audio" ); final AudioPlayer ap = new AudioPlayer( xa ); ap.addAudioEventListener( new AudioEventListener()
final JFrame f = aw.showWindow( "Audio" ); final AudioPlayer ap = new AudioPlayer( xa ); ap.addAudioEventListener( new AudioEventListener()
final AudioPlayer ap = new AudioPlayer( mixer ); new Thread( ap ).start();
final AudioPlayer ap = new AudioPlayer( mixer ); new Thread( ap ).start();