/** * Chainable constructor. * @param as The audio stream to process * @param af The format to process. */ protected BeatDetector( final AudioStream as, final AudioFormat af ) { super( as ); this.filter1Out = 0.0f; this.filter2Out = 0.0f; this.peakEnv = 0.0f; this.beatTrigger = false; this.prevBeatPulse = false; this.format = af; this.setSampleRate( (float)(af.getSampleRateKHz()*1000f) ); }
/** * Chainable constructor. * @param as The audio stream to process * @param af The format to process. */ protected BeatDetector( final AudioStream as, final AudioFormat af ) { super( as ); this.filter1Out = 0.0f; this.filter2Out = 0.0f; this.peakEnv = 0.0f; this.beatTrigger = false; this.prevBeatPulse = false; this.format = af; this.setSampleRate( (float)(af.getSampleRateKHz()*1000f) ); }
/** * Setup the filters based on the settings of this class. */ private void setupFilters() { this.filters = new ArrayList<FeedForwardCombFilter>(); for( int i = this.startOfRange; i < this.endOfRange; i++ ) { // Get the frequency of the given note final double f = WesternScaleNote.createNote( i ).frequency; // Add a feed-forward comb filter for that frequency this.filters.add( new FeedForwardCombFilter( f, this.format.getSampleRateKHz()*1000d, 1f ) ); } }
/** * Setup the filters based on the settings of this class. */ private void setupFilters() { this.filters = new ArrayList<FeedForwardCombFilter>(); for( int i = this.startOfRange; i < this.endOfRange; i++ ) { // Get the frequency of the given note final double f = WesternScaleNote.createNote( i ).frequency; // Add a feed-forward comb filter for that frequency this.filters.add( new FeedForwardCombFilter( f, this.format.getSampleRateKHz()*1000d, 1f ) ); } }
@Override public SampleChunk getSampleChunk( final int length, final double time, final double freq, final int gain, final AudioFormat format ) { // Work out how many samples per frequency wave final double samplesPerWave = format.getSampleRateKHz()*1000d/freq; // Phase offset in samples. (f*t)-floor(f*t) is the part number // of waves at this point (assuming the first wave starts at a // phase of zero). final double p = 2*Math.PI*((freq*time)-Math.floor(freq*time)); // Create an appropriate sample buffer final SampleBuffer sb = SampleBufferFactory.createSampleBuffer( format, length ); // Fill it with sin waves final double z = 2*Math.PI/samplesPerWave; for( int i = 0; i < length; i++ ) sb.set( i, (float)(Math.sin( i*z+p )*gain) ); return sb.getSampleChunk(); }
/** * {@inheritDoc} */ @Override public String toString() { return "[Audio: "+getSampleRateKHz()+"KHz, "+getNBits()+"bit, "+ getNumChannels()+" channel"+(getNumChannels()>1?"s":"")+ ", "+(isSigned?"signed":"unsigned")+", "+ (isBigEndian?"big-endian":"little-endian")+"]"; }
/** * {@inheritDoc} */ @Override public String toString() { return "[Audio: "+getSampleRateKHz()+"KHz, "+getNBits()+"bit, "+ getNumChannels()+" channel"+(getNumChannels()>1?"s":"")+ ", "+(isSigned?"signed":"unsigned")+", "+ (isBigEndian?"big-endian":"little-endian")+"]"; }
@Override public SampleChunk getSampleChunk( final int length, final double time, final double freq, final int gain, final AudioFormat format ) { final SampleBuffer sb = SampleBufferFactory.createSampleBuffer( format, length ); final double samplesPerWave = format.getSampleRateKHz()*1000d/freq; // phase offset in samples final int p = (int)( samplesPerWave * ((freq*time)-Math.floor(freq*time))); for( int i = 0; i < length; i++ ) { final int x = (i+p) % (int)samplesPerWave; sb.set( i, (float)(x*(gain/samplesPerWave)) ); } return sb.getSampleChunk(); }
@Override public SampleChunk getSampleChunk( final int length, final double time, final double freq, final int gain, final AudioFormat format ) { final SampleBuffer sb = SampleBufferFactory.createSampleBuffer( format, length ); final double samplesPerWave = format.getSampleRateKHz()*1000d/freq; // phase offset in samples final int p = (int)( samplesPerWave * ((freq*time)-Math.floor(freq*time))); for( int i = 0; i < length; i++ ) { final int x = (i+p) % (int)samplesPerWave; sb.set( i, (float)(x*(gain/samplesPerWave)) ); } return sb.getSampleChunk(); }
/** * Updates the sample rate in the source processor's format to match the output format. * * @param ap The source processor * @param output The output format * @return The fixed source format */ private static AudioFormat getFormatSR( final AudioProcessor ap, final AudioFormat output ) { if( ap == null ) return output; final AudioFormat f = ap.getFormat().clone(); f.setSampleRateKHz( output.getSampleRateKHz() ); return f; }
/** * Updates the sample rate in the source processor's format to match the output format. * * @param ap The source processor * @param output The output format * @return The fixed source format */ private static AudioFormat getFormatSR( final AudioProcessor ap, final AudioFormat output ) { if( ap == null ) return output; final AudioFormat f = ap.getFormat().clone(); f.setSampleRateKHz( output.getSampleRateKHz() ); return f; }
/** * Construct with given stream and window parameters. Note that the window * parameters are given in milliseconds and converted into a number of * samples by the method. * * @param stream The audio stream * @param windowSizeMillis The window size in milliseconds * @param overlapMillis The overlap between windows in milliseconds */ public EffectiveSoundPressure( final AudioStream stream, final int windowSizeMillis, final int overlapMillis ) { super( stream, (int) (stream.getFormat().getSampleRateKHz() * windowSizeMillis * stream.getFormat().getNumChannels()) ); this.setWindowStep( (int) (stream.getFormat().getSampleRateKHz() * overlapMillis * stream.getFormat().getNumChannels()) ); }
/** * Construct with given stream and window parameters. Note that the window * parameters are given in milliseconds and converted into a number of * samples by the method. * * @param stream The audio stream * @param windowSizeMillis The window size in milliseconds * @param overlapMillis The overlap between windows in milliseconds */ public EffectiveSoundPressure( final AudioStream stream, final int windowSizeMillis, final int overlapMillis ) { super( stream, (int) (stream.getFormat().getSampleRateKHz() * windowSizeMillis * stream.getFormat().getNumChannels()) ); this.setWindowStep( (int) (stream.getFormat().getSampleRateKHz() * overlapMillis * stream.getFormat().getNumChannels()) ); }
/** * Process the given sample buffer. * @param sb The sample buffer * @return The sample buffer */ public SampleBuffer process( final SampleBuffer sb ) { final double[][] chanSamples = sb.asDoubleChannelArray(); this.lastCalculatedFeature = new double[chanSamples.length][]; for( int c = 0; c < sb.getFormat().getNumChannels(); c++ ) this.lastCalculatedFeature[c] = this.process( chanSamples[c], sb.getFormat().getSampleRateKHz()*1000d ); return sb; }
/** * {@inheritDoc} */ @Override public AudioFormat clone() { AudioFormat af = new AudioFormat( getNBits(), getSampleRateKHz(), getNumChannels() ); af.setBigEndian( isBigEndian ); af.setSigned( isSigned ); return af; }
/** * {@inheritDoc} */ @Override public AudioFormat clone() { AudioFormat af = new AudioFormat( getNBits(), getSampleRateKHz(), getNumChannels() ); af.setBigEndian( isBigEndian ); af.setSigned( isSigned ); return af; }
/** * Process the given sample buffer. * @param sb The sample buffer * @return The sample buffer */ public SampleBuffer process( final SampleBuffer sb ) { final double[][] chanSamples = sb.asDoubleChannelArray(); this.lastCalculatedFeature = new double[chanSamples.length][]; for( int c = 0; c < sb.getFormat().getNumChannels(); c++ ) this.lastCalculatedFeature[c] = this.process( chanSamples[c], sb.getFormat().getSampleRateKHz()*1000d ); return sb; }
/** * Get a Java Sound API AudioFormat object using this object's * properties. * * @return The Java Sound API Audio Format object. */ public javax.sound.sampled.AudioFormat getJavaAudioFormat() { // Convert the OpenIMAJ audio format to a Java Sound audio format object return new javax.sound.sampled.AudioFormat( (int)this.getSampleRateKHz() * 1000, this.getNBits(), this.getNumChannels(), this.isSigned(), this.isBigEndian() ); } }
/** * Get a Java Sound API AudioFormat object using this object's * properties. * * @return The Java Sound API Audio Format object. */ public javax.sound.sampled.AudioFormat getJavaAudioFormat() { // Convert the OpenIMAJ audio format to a Java Sound audio format object return new javax.sound.sampled.AudioFormat( (int)this.getSampleRateKHz() * 1000, this.getNBits(), this.getNumChannels(), this.isSigned(), this.isBigEndian() ); } }
/** * {@inheritDoc} * @see org.openimaj.audio.AudioStream#nextSampleChunk() */ @Override public SampleChunk nextSampleChunk() { final Oscillator o = this.oscillator; if( !this.noteOn ) return null; // o = Oscillator.NONE; final SampleChunk x = o.getSampleChunk( this.sampleChunkLength, this.currentTime, this.frequency, this.gain, this.format ); this.applyADSREnvelope( x.getSampleBuffer() ); this.currentTime += x.getSampleBuffer().size() / (this.format.getSampleRateKHz()*1000d); this.currentTimeMS = this.currentTime * 1000d; return x; }