Error when instancing AudioRecord

3,423 views
Skip to first unread message

Breno

unread,
Feb 2, 2009, 5:52:18 AM2/2/09
to android-platform
Hi, i'm using the "sdk 1.5", built from source code, and i enable
(removed @hide directive) audio streaming and all media files. I
created the eclipse plugin as well.

I'm testing the AudioRecord class, and in a simple activity,
when i instanciate the class, an error appear in log cat:

02-02 10:38:33.939: ERROR/AudioRecord-JNI(200): Error creating
AudioRecord instance: initialization check failed.
02-02 10:38:33.997: ERROR/AudioRecord-Java(200):
[ android.media.AudioRecord ] Error code -8 when initializing native
AudioRecord object.

I looked in source code, and, this error could be caused by permission
in audio hardware. But i 'm not been able to solve this issue. Does
anybody have idea of what is happenning? below my code as well my
manifest.


package com.ttt;

import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.Timer;
import java.util.TimerTask;

import android.app.Activity;
import android.media.AmrInputStream;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
import android.widget.ToggleButton;

public class AudioStreaming extends Activity {

public static final int AUDIO_SAMPLE_FREQUENCY_11025HZ = 11025;
public static final int AMR_BUFFER_SIZE = 140;
public static final int AUDIO_BUFFER_SIZE = 5 * AMR_BUFFER_SIZE;
public static final int AUDIO_TIMER = 200;
public static final int AUDIO_TIMER_DELAY = 5;
public static final String path = "/sdcard/stream.amr";

private Timer mAudioTimer;
public ToggleButton m_bt_button;
public TextView m_tv_text;
public static boolean isRecording = false;
public AudioRecord audioRecord = null;
public MediaRecorder recorder;
public AmrInputStream amrInputStream;
public byte[] audioBuffer = new byte[AUDIO_BUFFER_SIZE];
public byte[] amrAudioBuffer;
public int offset = 0;
public int bytesRead = 0;
public int bytesConverted = 0;
public InputStream is;
public ByteArrayInputStream bais;
public VoicePlayer player;

/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
m_bt_button = (ToggleButton) findViewById(R.id.bt_record);
m_tv_text = (TextView) findViewById(R.id.tv_info);

m_tv_text.setText("Stopped");
m_bt_button.setOnClickListener(mRecord);

try {
// ------- HERE THE CODE PROGRAM CRASHES
WITHOUT LAUNCH EXCEPTION
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
AUDIO_SAMPLE_FREQUENCY_11025HZ,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, AUDIO_BUFFER_SIZE);
} catch (IllegalArgumentException e){
Log.e("Audio Stream", "Exception while creating audioRecord: " +
e.getMessage());
}

player = new VoicePlayer(path);

}

public void startAudioTimer() {
if (mAudioTimer == null) {
audioRecord.startRecording();
mAudioTimer = new Timer();
mAudioTimer.scheduleAtFixedRate(new AudioTask(), AUDIO_TIMER_DELAY,
AUDIO_TIMER);
Log.i("Audio Streaming Teste", "Audio Timer Started");
}
}

public void stopAudioTimer() {
if (mAudioTimer != null) {
audioRecord.stop();
mAudioTimer = null;
Log.i("Audio Streaming Teste", "Audio Timer Stopped");
}
}

private class AudioTask extends TimerTask {

public AudioTask() {
super();
}

@Override
public void run() {
bytesRead = audioRecord.read(audioBuffer, offset, AMR_BUFFER_SIZE);
is = new ByteArrayInputStream(audioBuffer, offset,
AMR_BUFFER_SIZE);
amrInputStream = new AmrInputStream(is);
try {
bytesConverted = amrInputStream.read(amrAudioBuffer, offset,
AMR_BUFFER_SIZE);
if (bytesConverted == -1) {
stopAudioTimer();
player.play();
}
else {
if (bytesConverted == bytesRead) {
player.writeAmrFile(amrAudioBuffer);
}
else {
Log.e("Audio Player", "bytes converted its different from bytes
read");
}

}

} catch (IOException e) {
Log.e("Audio Streaming", "Exception while converting from PCM16 to
AMR: " + e.getMessage());
}

}
}

private OnClickListener mRecord = new OnClickListener() {

public void onClick(View arg0) {
isRecording ^= true;

if (isRecording) {
startAudioTimer();
m_tv_text.setText("Recording...");

} else {
stopAudioTimer();
try {
player.play();
m_tv_text.setText("playing...");
} catch (FileNotFoundException e) {
Log.e("Audio Player", "Exception: " + e.getMessage());
} catch (IOException e) {
Log.e("Audio Player", "Exception: " + e.getMessage());
}
}

}

};
}

<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mot"
android:versionCode="1"
android:versionName="1.0">
<uses-permission android:name="android.permission.RECORD_AUDIO"/
>
<application android:icon="@drawable/icon" android:label="@string/
app_name">
<activity android:name=".AudioStreaming"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category
android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="3" />
</manifest>


Thanks a lot


Breno

Dave Sparks

unread,
Feb 2, 2009, 5:14:16 PM2/2/09
to android-platform
Have you checked to see if the native AudioRecord stream is being
opened?

Is this in master? What platform?

Frankly, I'm not surprised to see problems, this code has only been
tested in our internal Cupcake tree.

Breno

unread,
Feb 3, 2009, 12:08:28 PM2/3/09
to android-platform, Dave Sparks
Hey Dave, thanks your reply. I found the bug, looking in source code.
The audio buffer is limited to minimum of 4096 bytes in my case, and
there is not even a LOGV message informing. This was though to find:

AudioRecord.cpp, line 131

// TODO: Get input frame count from hardware.
int minFrameCount = 1024*2;

if (frameCount == 0) {
frameCount = minFrameCount;
} else if (frameCount < minFrameCount) {
return BAD_VALUE;
}

and android_media_AudioRecord.cpp, line 154

int frameSize = nbChannels * bytesPerSample;
size_t frameCount = buffSizeInBytes / frameSize;

where , line 146,

int bytesPerSample = audioFormat==javaAudioRecordFields.PCM16 ? 2 :
1;

in my case, PCM_16 = 2

and

AudioRecord.java, line 266

switch (channelConfig) {
case AudioFormat.CHANNEL_CONFIGURATION_DEFAULT:
case AudioFormat.CHANNEL_CONFIGURATION_MONO:
mChannelCount = 1;
mChannelConfiguration =
AudioFormat.CHANNEL_CONFIGURATION_MONO;
break;
case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
mChannelCount = 2;
mChannelConfiguration =
AudioFormat.CHANNEL_CONFIGURATION_STEREO;
break;
default:
mChannelCount = 0;
mChannelConfiguration =
AudioFormat.CHANNEL_CONFIGURATION_INVALID;
throw (new IllegalArgumentException("Unsupported channel
configuration."));
}

in my case, mono, 1.
So, putting all together, the buffer size must be at
least 4096 bytes. If you could update the code, to put a message, or
update the javadoc to inform this, we apreciate. Another thing, the
software don't accept 11025Hz with MONO and PCM_16. The valid
configuration is 8000Hz, MONO and PCM_16. This works. But now i'm
trying to figured out how to put this all together to work. Take a
look at my code. I'm expecting that the code enter in listener, when
there is X frames, but never enter in listener:

public class AudioService extends Service {

public static final int AUDIO_BUFFER_SIZE = 100000;
public static final int AUDIO_SAMPLE_FREQUENCY_8000HZ = 8000;

public AudioRecord audioRecord;
public byte[] audioBuffer = new byte[AUDIO_BUFFER_SIZE];

@Override
public void onCreate() {
super.onCreate();
Log.i(AudioStreaming.logTag, "AudioService created");
try {
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
AUDIO_SAMPLE_FREQUENCY_8000HZ,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, AUDIO_BUFFER_SIZE);
} catch (IllegalArgumentException e) {
Log.e(AudioStreaming.logTag,
"Exception while creating audioRecord: " + e.getMessage());
}

audioRecord.setPeriodicNotificationListener(mNotification);
audioRecord.setPositionNotificationPeriod(50);
}

public OnPeriodicNotificationListener mNotification = new
OnPeriodicNotificationListener() {
public void onPeriodicNotification(AudioRecord arg0) {
// bytesRead = arg0.read(audioBuffer, 0,
// AudioStreaming.AUDIO_BUFFER_SIZE);
Log.e(AudioStreaming.logTag,
"Enter in OnPeriodicNotificationListener");
// Log.i(AudioStreaming.logTag,
// "Bytes read from read(byte[],int,int): " + bytesRead);
}
};

@Override
public void onStart(Intent intent, int startId) {
super.onStart(intent, startId);
Log.i(AudioStreaming.logTag, "AudioService started");
audioRecord.startRecording();
}

@Override
public IBinder onBind(Intent arg0) {
return null;
}

@Override
public void onDestroy() {
super.onDestroy();
audioRecord.stop();
audioRecord.release();
}
}

What i'm doing wrong? thanks a lot your always support,

Breno

Breno

unread,
Feb 3, 2009, 12:13:00 PM2/3/09
to android-platform
I missed to say, this is in master branch. I didn't download cupcake
branch, and i'm using ubunto 8.10.

Breno
> ...
>
> read more »

Dave Sparks

unread,
Feb 4, 2009, 12:20:08 AM2/4/09
to android-platform
There is an upcoming change that will let you query the minimum buffer
size.
> ...
>
> read more »
Message has been deleted

Breno

unread,
Feb 4, 2009, 7:24:15 AM2/4/09
to android-platform
Dave,

The changes are being commited to Master or Cupcake
branch?

thanks

breno
> ...
>
> read more »

Dave Sparks

unread,
Feb 4, 2009, 11:43:58 AM2/4/09
to android-platform
The changes are coming in the Cupcake branch, but will be merged to
master. Please be advised that we are experiencing some problems with
the external pushes that are causing delays. We hope to resume pushes
soon.
> ...
>
> read more »
Reply all
Reply to author
Forward
0 new messages