Frame frame=grabber.grabFrame();
frame.imageframe.samplesint frequency =44100; int minBufSize = AudioTrack.getMinBufferSize(frequency, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, frequency, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, minBufSize, AudioTrack.MODE_STREAM); mAudioTrack.play();FloatBuffer fb;ShortBuffer sb;short[] shorts;ByteBuffer byteBuffer =null;byte[] bytez; try {
for (Buffer b : samples) {
fb = (FloatBuffer)b; byteBuffer = ByteBuffer.allocate(b.capacity()*4); byteBuffer.order(ByteOrder.LITTLE_ENDIAN); (byteBuffer).asFloatBuffer().put(fb); byteBuffer.rewind(); bytez = new byte[byteBuffer.capacity()]; byteBuffer.get(bytez); shorts=new short[bytez.length/2]; ByteBuffer.wrap(bytez).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(shorts);
mAudioTrack.write(shorts, 0, shorts.length); } } catch(Exception e) { e.printStackTrace(); }package com.shihabsoft.castanythingtopc;
import android.media.AudioFormat;import android.media.AudioManager;import android.media.AudioTrack;
public class AndroidAudioDevice{ AudioTrack track; short[] buffer = new short[1024]; public AndroidAudioDevice(int sampleRate,int channels) { int minSize =AudioTrack.getMinBufferSize( sampleRate, channels==1?AudioFormat.CHANNEL_CONFIGURATION_MONO:AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT ); track = new AudioTrack( AudioManager.STREAM_MUSIC, sampleRate, channels==1?AudioFormat.CHANNEL_CONFIGURATION_MONO:AudioFormat.CHANNEL_CONFIGURATION_STEREO, AudioFormat.ENCODING_PCM_16BIT, minSize, AudioTrack.MODE_STREAM); track.play(); }
public void writeSamples(float[] samples) { fillBuffer( samples ); track.write( buffer, 0, samples.length ); }
private void fillBuffer( float[] samples ) { if( buffer.length < samples.length ) buffer = new short[samples.length];
for( int i = 0; i < samples.length; i++ ) buffer[i] = (short)(samples[i] * Short.MAX_VALUE);; } }
AndroidAudioDevice aaD=new AndroidAudioDevice(sampleRate Hz,numberOfChannels);
aaD.writeSamples(floatArray);final java.nio.Buffer[] samples=vFrame.samples;//Getting the samples from the Frame from grabFrame()
float[] smpls;
if(aaD.track.getChannelCount()==1)//For using with mono track{ Buffer b=samples[0]; fb = (FloatBuffer)b; fb.rewind(); smpls=new float[fb.capacity()]; fb.get(smpls);}else if(aaD.track.getChannelCount()==2)//For using with stereo track{ FloatBuffer b1=(FloatBuffer) samples[0]; FloatBuffer b2=(FloatBuffer) samples[1]; smpls=new float[b1.capacity()+b2.capacity()]; for(int i=0;i<b1.capacity();i++) { smpls[2*i]=b1.get(i); smpls[2*i+1]=b2.get(i); }
}
aaD.writeSamples(smpls);
--
--- You received this message because you are subscribed to the Google Groups "javacv" group.
To unsubscribe from this group and stop receiving emails from it, send an email to javacv+unsubscribe@googlegroups.com.