Панорамирование каналов Exoplayer
Я пытаюсь следовать этому сообщению здесь: Настройка приложения для вывода ВСЕХ аудио на один канал для панорамирования звука из экзоплеера влево или вправо.
Код из MainActivity.java:
StereoVolumeProcessor stereoVolumeProcessor = new StereoVolumeProcessor();
stereoVolumeProcessor.setChannelMap(new int[]{0,1});
stereoVolumeProcessor.setVolume(1, 0);
AudioProcessor.AudioFormat inputAudioFormat = new AudioProcessor.AudioFormat(
44100, 2, C.ENCODING_PCM_16BIT);
try {
stereoVolumeProcessor.configure(inputAudioFormat);
} catch (AudioProcessor.UnhandledAudioFormatException e) {
e.printStackTrace();
}
RenderersFactory factory = new DefaultRenderersFactory(context){
@Override
protected AudioProcessor[] buildAudioProcessors() {
return new AudioProcessor[] {stereoVolumeProcessor};
}
};
LoadControl loadControl = new DefaultLoadControl.Builder()
.setBufferDurationsMs(
DefaultLoadControl.DEFAULT_MIN_BUFFER_MS,
DefaultLoadControl.DEFAULT_MAX_BUFFER_MS,
DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_MS,
DefaultLoadControl.DEFAULT_BUFFER_FOR_PLAYBACK_AFTER_REBUFFER_MS)
.createDefaultLoadControl();
SimpleExoPlayer testPlayer = ExoPlayerFactory.newSimpleInstance(context,factory, new DefaultTrackSelector(context), loadControl);
dataSourceFactory = new DefaultDataSourceFactory(activity.getApplicationContext(),
Util.getUserAgent(activity.getApplicationContext(), "TEST"));
test_filepath = "asset:///test.mp3";
test_file_uri = Uri.parse(test_filepath);
testSource = new ProgressiveMediaSource.Factory(dataSourceFactory)
.createMediaSource(test_file_uri);
testPlayer.prepare(testSource);
Код из StereoVolumeProcessor.java:
public class StereoVolumeProcessor implements AudioProcessor {
private int channelCount;
private int sampleRateHz;
private int[] pendingOutputChannels;
private boolean active;
private int[] outputChannels;
private ByteBuffer buffer;
private ByteBuffer outputBuffer;
private boolean inputEnded;
private float[] volume;
private static final int LEFT_SPEAKER = 0;
private static final int RIGHT_SPEAKER = 1;
public StereoVolumeProcessor() {
buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
}
public void setChannelMap(int[] outputChannels) {
Log.d("audioProcessor", "setChannelMap called");
pendingOutputChannels = outputChannels;
}
//@Override
public boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws AudioProcessor.UnhandledAudioFormatException {
Log.d("audioProcessor", "configure called");
if(volume == null){
Log.d("audioProcessor", "volume null error called");
throw new IllegalStateException("volume has not been set! Call setVolume(float left,float right)");
}
boolean outputChannelsChanged = !Arrays.equals(pendingOutputChannels, outputChannels);
outputChannels = pendingOutputChannels;
if (outputChannels == null) {
active = false;
return outputChannelsChanged;
}
if (encoding != C.ENCODING_PCM_16BIT) {
//throw new AudioProcessor.UnhandledAudioFormatException(sampleRateHz, channelCount, encoding);
Log.d("audioProcessor","encoding is not PCM 16_BIT");
}
if (!outputChannelsChanged && this.sampleRateHz == sampleRateHz
&& this.channelCount == channelCount) {
Log.d("audioProcessor","outputchannels not changed ");
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
active = true;
return true;
//return setInputFormat(sampleRateHz, channelCount, encoding);
}
@Override
public AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException {
Log.d("audioProcessor","overriden audioformat configure called");
//return null;
@Nullable int[] outputChannels = pendingOutputChannels;
if (outputChannels == null) {
return AudioFormat.NOT_SET;
}
if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
Log.d("audioprocessor","encoding not correct");
throw new UnhandledAudioFormatException(inputAudioFormat);
}
boolean active = inputAudioFormat.channelCount != outputChannels.length;
for (int i = 0; i < outputChannels.length; i++) {
int channelIndex = outputChannels[i];
if (channelIndex >= inputAudioFormat.channelCount) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
active |= (channelIndex != i);
}
Log.d("audioprocessor", "overriden audioformat - encoding: " + inputAudioFormat.encoding + " channelCount: " + inputAudioFormat.channelCount + " samplerate: " + inputAudioFormat.sampleRate + " outputchannels.length: " + outputChannels.length);
return active
? new AudioFormat(inputAudioFormat.sampleRate, outputChannels.length, C.ENCODING_PCM_16BIT)
: AudioFormat.NOT_SET;
}
@Override
public boolean isActive() {
return active;
}
//@Override
public int getOutputChannelCount() {
Log.d("audioProcessor", "getOutputChannelCount called");
return outputChannels == null ? channelCount : outputChannels.length;
}
//@Override
public int getOutputEncoding() {
Log.d("audioProcessor", "getOutputEncoding called");
return C.ENCODING_PCM_16BIT;
}
/**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {@link #configure(int, int, int)} and is undefined if the instance is not
* active.
*/
//@Override
public int getOutputSampleRateHz() {
Log.d("audioProcessor", "getOutputSampleRateHz called");
return sampleRateHz;
}
@Override
public void queueInput(ByteBuffer inputBuffer) {
Log.d("audioProcessor", "queueInput called");
int position = inputBuffer.position();
int limit = inputBuffer.limit();
int size = limit - position;
if (buffer.capacity() < size) {
buffer = ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder());
} else {
buffer.clear();
}
if(isActive()){
int ch = 0;
for(int i = position;i<limit;i+=2){
short sample = (short) (inputBuffer.getShort(i)* volume[ch++]);
buffer.putShort(sample);
ch%=channelCount;
}
}else{
throw new IllegalStateException();
}
inputBuffer.position(limit);
buffer.flip();
outputBuffer = buffer;
}
@Override
public void queueEndOfStream() {
Log.d("audioProcessor", "queueEndofStream called");
inputEnded = true;
}
/**
* Sets the volume of right and left channels/speakers
* The values are between 0.0 and 1.0
*
* @param left
* @param right
*/
public void setVolume(float left,float right){
Log.d("audioProcessor", "setVolume called");
volume = new float[]{left,right};
}
public float getLeftVolume(){
Log.d("audioProcessor", "getleftvolume called");
return volume[LEFT_SPEAKER];
}
public float getRightVolume(){
Log.d("audioProcessor", "getrightvolume called");
return volume[RIGHT_SPEAKER];
}
@Override
public ByteBuffer getOutput() {
Log.d("audioProcessor", "getoutput called");
ByteBuffer outputBuffer = this.outputBuffer;
this.outputBuffer = EMPTY_BUFFER;
return outputBuffer;
}
@SuppressWarnings("ReferenceEquality")
@Override
public boolean isEnded() {
Log.d("audioProcessor", "isEnded called");
return inputEnded && outputBuffer == EMPTY_BUFFER;
}
@Override
public void flush() {
Log.d("audioProcessor", "flush called");
outputBuffer = EMPTY_BUFFER;
inputEnded = false;
}
@Override
public void reset() {
Log.d("audioProcessor", "reset called");
flush();
buffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE;
sampleRateHz = Format.NO_VALUE;
outputChannels = null;
active = false;
}
}
Я заметил, что queueInput() никогда не вызывается, и панорамирование не работает. Не уверен, что я делаю неправильно здесь.