I am trying to figure out how to play a JSpeex encoded audio file on
android but am stuck.
Speex or JSpeex (the Java implementation) is royalty free and is well
suited to voice applications. It provides as much compression as mp3.
Apparently it will be implemented in android:
http://code.google.com/p/android/issues/detail?id=354&can=4&colspec=ID%20Type%20Version%20Security%20Status%20Owner%20Summary
the project page is here
http://sourceforge.net/project/showfiles.php?group_id=84548
I am trying to figure out how to fit it into the android calls
MediaPlayer mp = new android.media.MediaPlayer();
mp.setDataSource("/data/data/com.jo.blockpad/files/jo-russkelly_files/
2-0.spx");
mp.prepare();
mp.start();
Any help appreciated.
thanks,
Anil
-------------------------------------------------
calling it from Java Sound example
--------------------------------------------------
import java.io.IOException;
import java.net.URL;
import java.util.Observable;
import java.util.Observer;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineEvent;
import javax.sound.sampled.LineListener;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.Mixer;
import javax.sound.sampled.SourceDataLine;
import javax.swing.JOptionPane;
/**
* Audio playing code has been adapted from Matthias Pfisterer's
* AudioPlayer.java
*
* Anil
*/
private MyObservable observable = new MyObservable();
private static int DEFAULT_EXTERNAL_BUFFER_SIZE = 128000;
int nExternalBufferSize = DEFAULT_EXTERNAL_BUFFER_SIZE;
int nInternalBufferSize = AudioSystem.NOT_SPECIFIED;
boolean bForceConversion = false;
private static boolean DEBUG = false;
SourceDataLine line = null;
private Object snippet = null;
public void playClip(String urlStr) throws Exception {
// important - otherwise skim() will fail to move to the next node
this.snippet = snippetRef;
/**
* Flag for forcing a conversion. If set to true, a conversion of
the
* AudioInputStream (AudioSystem.getAudioInputStream(...,
* AudioInputStream)) is done even if the format of the original
* AudioInputStream would be supported for SourceDataLines directly.
* This flag is set by the command line options "-E" and "-S".
*/
boolean bForceConversion = false;
/**
* Endianess value to use in conversion. If a conversion of the
* AudioInputStream is done, this values is used as endianess in the
* target AudioFormat. The default value can be altered by the
command
* line option "-B".
*/
boolean bBigEndian = false;
/**
* Sample size value to use in conversion. If a conversion of the
* AudioInputStream is done, this values is used as sample size in
the
* target AudioFormat. The default value can be altered by the
command
* line option "-S".
*/
int nSampleSizeInBits = 16;
String strMixerName = null;
int nExternalBufferSize = DEFAULT_EXTERNAL_BUFFER_SIZE;
int nInternalBufferSize = AudioSystem.NOT_SPECIFIED;
AudioInputStream audioInputStream = null;
URL url = new URL(urlStr);
audioInputStream = AudioSystem.getAudioInputStream(url);
if (DEBUG)
out("AudioPlayer.main(): primary AIS: " + audioInputStream);
/*
* From the AudioInputStream, i.e. from the sound file, we fetch
* information about the format of the audio data. These information
* include the sampling frequency, the number of linkSnippets and
the
* size of the samples. These information are needed to ask Java
Sound
* for a suitable output line for this audio stream.
*/
AudioFormat audioFormat = audioInputStream.getFormat();
if (DEBUG)
out("AudioPlayer.main(): primary format: " + audioFormat);
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
audioFormat,
nInternalBufferSize);
boolean bIsSupportedDirectly = AudioSystem.isLineSupported(info);
if (!bIsSupportedDirectly || bForceConversion) {
AudioFormat sourceFormat = audioFormat;
AudioFormat targetFormat = new
AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
sourceFormat.getSampleRate(), nSampleSizeInBits, sourceFormat
.getChannels(), sourceFormat.getChannels()
* (nSampleSizeInBits / 8), sourceFormat.getSampleRate(),
bBigEndian);
if (DEBUG) {
out("AudioPlayer.main(): source format: " + sourceFormat);
out("AudioPlayer.main(): target format: " + targetFormat);
}
audioInputStream = AudioSystem.getAudioInputStream(targetFormat,
audioInputStream);
audioFormat = audioInputStream.getFormat();
if (DEBUG)
out("AudioPlayer.main(): converted AIS: " + audioInputStream);
if (DEBUG)
out("AudioPlayer.main(): converted format: " + audioFormat);
}
line = getSourceDataLine(strMixerName, audioFormat,
nInternalBufferSize);
if (line == null) {
out("AudioPlayer: cannot get SourceDataLine for format " +
audioFormat);
JOptionPane.showMessageDialog(null,
"AudioPlayer: cannot get SourceDataLine for format", "Cannot
play",
JOptionPane.ERROR_MESSAGE);
return;
}
if (DEBUG)
out("AudioPlayer.main(): line: " + line);
if (DEBUG)
out("AudioPlayer.main(): line format: " + line.getFormat());
// Anil
line.addLineListener(new LineListener() {
public void update(LineEvent event) {
System.out.println("update(). SpeechPlayer LineListener. " +
event.getLine() + " "
+ event.getType());
// we want to notify only on the events: when clip runs out, and
// when forcibly stopped.
// ie. the STOP event. Otherwise the application will hang if
// the blocking array is full.
// Anil 01/24/2006
if (event.getType() == LineEvent.Type.STOP) {
observable.setChanged();
System.out.println("SpeechPlayer. About to notify "
+ observable.countObservers()
+ " observers regarding line event STOP.");
observable.notifyObservers(snippet);
}
}
});
/*
* Still not enough. The line now can receive data, but will not
pass
* them on to the audio output device (which means to your sound
card).
* This has to be activated.
*/
line.start();
/*
* Ok, finally the line is prepared. Now comes the real job: we have
to
* write data to the line. We do this in a loop. First, we read data
* from the AudioInputStream to a buffer. Then, we write from this
* buffer to the Line. This is done until the end of the file is
* reached, which is detected by a return value of -1 from the read
* method of the AudioInputStream.
*/
int nBytesRead = 0;
byte[] abData = new byte[nExternalBufferSize];
if (DEBUG)
out("AudioPlayer.main(): starting main loop");
while (nBytesRead != -1) {
try {
nBytesRead = audioInputStream.read(abData, 0, abData.length);
} catch (IOException e) {
e.printStackTrace();
ceasePlaying(false, false);
JOptionPane.showMessageDialog(null, e, "ERROR",
JOptionPane.ERROR_MESSAGE);
return;
}
if (DEBUG)
out("AudioPlayer.main(): read from AudioInputStream (bytes): "
+ nBytesRead);
if (nBytesRead >= 0) {
int nBytesWritten = line.write(abData, 0, nBytesRead);
if (DEBUG)
out("AudioPlayer.main(): written to SourceDataLine (bytes): "
+ nBytesWritten);
}
}
if (DEBUG)
out("AudioPlayer.main(): finished main loop");
/*
* Wait until all data is played. This is only necessary because of
the
* bug noted below. (If we do not wait, we would interrupt the
playback
* by prematurely closing the line and exiting the VM.)
*
* Thanks to Margie Fitch for bringing me on the right path to this
* solution.
*/
if (DEBUG)
out("AudioPlayer.main(): before drain");
line.drain();
/*
* All data are played. We can close the shop.
*/
if (DEBUG)
out("AudioPlayer.main(): before close");
line.close();
}
// TODO: maybe can used by others. AudioLoop?
// In this case, move to AudioCommon.
private static SourceDataLine getSourceDataLine(String strMixerName,
AudioFormat audioFormat, int nBufferSize) {
/*
* Asking for a line is a rather tricky thing. We have to construct
an
* Info object that specifies the desired preferences for the line.
* First, we have to say which kind of line we want. The
possibilities
* are: SourceDataLine (for playback), Clip (for repeated playback)
and
* TargetDataLine (for recording). Here, we want to do normal
playback,
* so we ask for a SourceDataLine. Then, we have to pass an
AudioFormat
* object, so that the Line knows which format the data passed to it
* will have. Furthermore, we can give Java Sound a hint about how
big
* the internal buffer for the line should be. This isn't used here,
* signaling that we don't care about the exact size. Java Sound
will
* use some default value for the buffer size.
*/
SourceDataLine line = null;
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
audioFormat,
nBufferSize);
try {
if (strMixerName != null) {
Mixer.Info mixerInfo = AudioCommon.getMixerInfo(strMixerName);
if (mixerInfo == null) {
out("AudioPlayer: mixer not found: " + strMixerName);
String err = "AudioPlayer: mixer not found: " + strMixerName;
JOptionPane.showMessageDialog(null, err, "ERROR",
JOptionPane.ERROR_MESSAGE);
return null;
}
Mixer mixer = AudioSystem.getMixer(mixerInfo);
line = (SourceDataLine) mixer.getLine(info);
} else {
line = (SourceDataLine) AudioSystem.getLine(info);
}
/*
* The line is there, but it is not yet ready to receive audio
data.
* We have to open the line.
*/
line.open(audioFormat, nBufferSize);
} catch (LineUnavailableException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
return line;
}
public static void main(String[] args) throws Exception {
SpeechPlayer sp = new SpeechPlayer();
sp.getObservable().addObserver(new Observer() {
public void update(Observable o, Object arg) {
System.out.println("notification reached " + arg);
}
});
sp.playClip("file:/C:\\Tmp\\lost_ogg_sync_Anil.spx",0,0,false,null,
false);
//sp.playClip("
http://juwo.com/nodepad/anilsvoice.spx", 0, 0, false,
null);
}