private MutableAudioFrame getBridgeFrame() { if (bridgeFrame == null) { bridgeFrame = new MutableAudioFrame(); bridgeFrame.setBuffer(ByteBuffer.allocate(format.maximumChunkSize())); } return bridgeFrame; }
/** * @param context Processing context to determine the destination buffer from. * @param encoder Encoder to encode the chunk with. */ public BufferingPostProcessor(AudioProcessingContext context, AudioChunkEncoder encoder) { this.encoder = encoder; this.context = context; this.offeredFrame = new MutableAudioFrame(); this.outputBuffer = ByteBuffer.allocateDirect(context.outputFormat.maximumChunkSize()); offeredFrame.setFormat(context.outputFormat); }
/** * @param context Configuration and output information for processing * @param inputFrequency Sample rate of the opus track * @param inputChannels Number of channels in the opus track */ public OpusPacketRouter(AudioProcessingContext context, int inputFrequency, int inputChannels) { this.context = context; this.inputFrequency = inputFrequency; this.inputChannels = inputChannels; this.headerBytes = new byte[2]; this.offeredFrame = new MutableAudioFrame(); this.lastFrameSize = 0; offeredFrame.setVolume(100); offeredFrame.setFormat(context.outputFormat); }
public static long consumeTrack(AudioPlayer player) throws Exception { ByteBuffer buffer = ByteBuffer.allocate(960 * 2 * 2); MutableAudioFrame frame = new MutableAudioFrame(); frame.setBuffer(buffer); CRC32 crc = new CRC32(); while (player.getPlayingTrack() != null && player.provide(frame, 10, TimeUnit.SECONDS)) { buffer.flip(); crc.update(buffer.array(), buffer.position(), buffer.remaining()); } return crc.getValue(); } }