Problem with using ffmpeg on Android
I am using ffmpeg on Android for play audio. But it doesn't work well.
This is my Java code.
public class AudioPlayerActivity extends Activity
{
private AudioTrack track;
private FileOutputStream os;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
createEngine();
//MediaPlayer mp = new MediaPlayer();
//mp.start();
int bufSize = AudioTrack.getMinBufferSize(44100,
AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
track = new AudioTrack(AudioManager.STREAM_MUSIC,
44100,
AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT,
bufSize,
AudioTrack.MODE_STREAM);
byte[] bytes = new byte[bufSize];
try {
os = new FileOutputStream("/mnt/sdcard/test.out",false);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//loadFile("/mnt/sdcard/test.mp3", bytes);
String file = "/mnt/sdcard/Wildlife.WMV";
//String file = "/mnt/sdcard/test.mp3";
loadFile(file, bytes);
try {
os.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
void playSound(byte[] buf, int size) {
//android.util.Log.v("ROHAUPT", "RAH Playing");
if(track.getPlayState()!=AudioTrack.PLAYSTATE_PLAYING) {
Log.d("test", "aaaaaaaaaaaaaaaaaaaaa");
track.play();
Log.d("test", "aaaaaaaaaaaaaaaaaaaaa");
}
track.write(buf, 0, size);
try {
os.write(buf,0,size);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private native void createEngine();
private native int loadFile(String file, byte[] array);
/** Load jni .so on initialization*/
static {
System.loadLibrary("audioplayer");
}
}
And this is my C code.
jint Java_team_tj_audio_AudioPlayerActivity_loadFile(JNIEnv* env, jobject obj, jstring file, jbyteArray array)
{
AVFormatContext *gFormatCtx = NULL;
AVCodecContext *gAudioCodecCtx = NULL;
AVCodec *gAudioCodec = NULL;
int gAudioStreamIdx = -1;
char *gAudioBuffer = NULL;
int i, outsize = 0;
AVPacket packet;
PacketQueue audioq;
const char *str;
str = (*env)->GetStringUTFChars(env, file, NULL);
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID play = (*env)->GetMethodID(env, cls, "playSound", "([BI)V");
if (gFormatCtx != NULL)
return -1;
if (av_open_input_file(&gFormatCtx, str, NULL, 0, NULL) != 0)
return -2;
if (av_find_stream_info(gFormatCtx) < 0)
return -3;
for (i = 0; i < gFormatCtx->nb_streams; i++) {
if (gFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
gAudioStreamIdx = i;
break;
}
}
if (gAudioStreamIdx == -1)
return -4;
gAudioCodecCtx = gFormatCtx->streams[gAudioStreamIdx]->codec;
gAudioCodec = avcodec_find_decoder(gAudioCodecCtx->codec_id);
if (gAudioCodec == NULL)
return -5;
if (avcodec_open(gAudioCodecCtx, gAudioCodec) < 0)
return -6;
gAudioBuffer = (char *) av_malloc(AVCODEC_MAX_AUDIO_FRAME_SIZE * 2);
while (av_read_frame(gFormatCtx, &packet) >= 0) {
if (packet.stream_index == gAudioStreamIdx)
{
packet_queue_put(&audioq, &packet);
int data_size = AVCODEC_MAX_AUDIO_FRAME_SIZE * 2;
int size=packet.size;
while(size > 0)
{
开发者_运维知识库 int len = avcodec_decode_audio3(gFormatCtx->streams[packet.stream_index]->codec,
(int16_t *) gAudioBuffer, &data_size, &packet);
__android_log_print(ANDROID_LOG_INFO, "JNI", "bbbbbbbbbbbbbbbbb %d", len);
size = packet.size-len;
__android_log_print(ANDROID_LOG_INFO, "JNI", "bbbbbbbbbbbbbbbbb %d", packet.size);
__android_log_print(ANDROID_LOG_INFO, "JNI", "bbbbbbbbbbbbbbbbb %d", data_size);
if (data_size > 0) {
jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL);
memcpy(bytes, gAudioBuffer, data_size); //
(*env)->ReleaseByteArrayElements(env, array, bytes, 0);
(*env)->CallVoidMethod(env, obj, play, array, data_size);
}
}
}
else
av_free_packet(&packet);
}
return 0;
}
What I did wrong?
精彩评论