I'm trying to record audio of video-call session. Audio is saved to file but the sound is distorted. Why is this happening? How can I fix it?
Source Code:
/**
* Implements the AudioRecordSamplesReadyCallback interface and writes
* recorded raw audio samples to an output file.
*/
public class RecordedAudioToFileController implements SamplesReadyCallback, WebRtcAudioRecord.WebRtcAudioRecordSamplesReadyCallback {
private static final String TAG = "AUDIO RECORDER";
private static final long MAX_FILE_SIZE_IN_BYTES = 58348800L * 10;
private final Object lock = new Object();
private final ExecutorService executor;
@Nullable
private OutputStream rawAudioFileOutputStream;
private boolean isRunning;
private long fileSizeInBytes;
private String audioPath;
public RecordedAudioToFileController(ExecutorService executor, String audioPath) {
Log.d(TAG, "ctor");
this.executor = executor;
this.audioPath = audioPath;
}
/**
* Should be called on the same executor thread as the one provided at
* construction.
*/
public boolean start() {
Log.d(TAG, "start");
synchronized (lock) {
isRunning = true;
}
return true;
}
/**
* Should be called on the same executor thread as the one provided at
* construction.
*/
public void stop() {
Log.d(TAG, "stop");
synchronized (lock) {
isRunning = false;
if (rawAudioFileOutputStream != null) {
try {
rawAudioFileOutputStream.close();
} catch (IOException e) {
Log.e(TAG, "Failed to close file with saved input audio: " + e);
}
rawAudioFileOutputStream = null;
}
fileSizeInBytes = 0;
}
}
// // Checks if external storage is available for read and write.
// private boolean isExternalStorageWritable() {
// String state = Environment.getExternalStorageState();
// return Environment.MEDIA_MOUNTED.equals(state);
// }
// Utilizes audio parameters to create a file name which contains sufficient
// information so that the file can be played using an external file player.
// Example: /sdcard/recorded_audio_16bits_48000Hz_mono.pcm.
private void openRawAudioOutputFile(int sampleRate, int channelCount) {
final File outputFile = new File(audioPath);
try {
rawAudioFileOutputStream = new FileOutputStream(outputFile);
} catch (FileNotFoundException e) {
Log.e(TAG, "Failed to open audio output file: " + e.getMessage());
}
Log.d(TAG, "Opened file for recording: " + audioPath);
}
// Called when new audio samples are ready.
@Override
public void onWebRtcAudioRecordSamplesReady(JavaAudioDeviceModule.AudioSamples samples) {
Log.d(TAG, "onWebRtcAudioRecordSamplesReady called with JavaAudioDeviceModule.AudioSamples");
// The native audio layer on Android should use 16-bit PCM format.
if (samples.getAudioFormat() != AudioFormat.ENCODING_PCM_16BIT) {
Log.e(TAG, "Invalid audio format");
return;
}
synchronized (lock) {
// Abort early if stop() has been called.
if (!isRunning) {
return;
}
// Open a new file for the first callback only since it allows us to add audio parameters to
// the file name.
if (rawAudioFileOutputStream == null) {
openRawAudioOutputFile(samples.getSampleRate(), samples.getChannelCount());
fileSizeInBytes = 0;
}
}
// Append the recorded 16-bit audio samples to the open output file.
executor.execute(() -> {
if (rawAudioFileOutputStream != null) {
try {
// Set a limit on max file size. 58348800 bytes corresponds to
// approximately 10 minutes of recording in mono at 48kHz.
if (fileSizeInBytes < MAX_FILE_SIZE_IN_BYTES) {
// Writes samples.getData().length bytes to output stream.
rawAudioFileOutputStream.write(samples.getData());
fileSizeInBytes += samples.getData().length;
}
} catch (IOException e) {
Log.e(TAG, "Failed to write audio to file: " + e.getMessage());
}
}
});
}
// Called when new audio samples are ready.
@Override
public void onWebRtcAudioRecordSamplesReady(WebRtcAudioRecord.AudioSamples samples) {
Log.d(TAG, "onWebRtcAudioRecordSamplesReady called with WebRtcAudioRecord.AudioSamples");
onWebRtcAudioRecordSamplesReady(new JavaAudioDeviceModule.AudioSamples(samples.getAudioFormat(),
samples.getChannelCount(), samples.getSampleRate(), samples.getData()));
}
}