初始化的過程上一篇其實並未徹底分析完,這回接着來。java層的initPlayer函數中,最後還有native_setup的調用,走的是c層的IjkMediaPlayer_native_setup。來看看他幹了什麼吧:java
IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this) { MPTRACE("%s\n", __func__); IjkMediaPlayer *mp = ijkmp_android_create(message_loop); JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN); jni_set_media_player(env, thiz, mp); ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this)); ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp)); ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp)); LABEL_RETURN: ijkmp_dec_ref_p(&mp); }
首先建立播放器IjkMediaPlayer,傳入一個message_loop。繼續看ijkmp_android_create:android
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*)) { IjkMediaPlayer *mp = ijkmp_create(msg_loop); if (!mp) goto fail; mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface(); if (!mp->ffplayer->vout) goto fail; mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer); if (!mp->ffplayer->pipeline) goto fail; ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout); return mp; fail: ijkmp_dec_ref_p(&mp); return NULL; }
好吧,往下繼續看ijkmp_create:異步
IjkMediaPlayer *ijkmp_create(int (*msg_loop)(void*)) { IjkMediaPlayer *mp = (IjkMediaPlayer *) mallocz(sizeof(IjkMediaPlayer)); if (!mp) goto fail; mp->ffplayer = ffp_create(); if (!mp->ffplayer) goto fail; mp->msg_loop = msg_loop; ijkmp_inc_ref(mp); pthread_mutex_init(&mp->mutex, NULL); return mp; fail: ijkmp_destroy_p(&mp); return NULL; }
一上來爲結構體IjkMediaPlayer分配空間,而後填充裏面的內容,例如ffplayer和msg_loop。結構體以下:jvm
struct IjkMediaPlayer { volatile int ref_count; pthread_mutex_t mutex; FFPlayer *ffplayer; int (*msg_loop)(void*); SDL_Thread *msg_thread; SDL_Thread _msg_thread; int mp_state; char *data_source; void *weak_thiz; int restart; int restart_from_beginning; int seek_req; long seek_msec; };
ffmpeg的player和sdl,以及msg_loop,其他是狀態及seek的內容。
回來到ijkmp_create。看這裏將這個函數指針給了mp的msg_loop。而後是ijkmp_inc_ref,這裏設置了mp的引用計數加1。好吧,先回來看下這個loop是什麼東西,回到最初的IjkMediaPlayer_native_setup。ide
static int message_loop(void *arg) { MPTRACE("%s\n", __func__); JNIEnv *env = NULL; (*g_jvm)->AttachCurrentThread(g_jvm, &env, NULL ); IjkMediaPlayer *mp = (IjkMediaPlayer*) arg; JNI_CHECK_GOTO(mp, env, NULL, "mpjni: native_message_loop: null mp", LABEL_RETURN); message_loop_n(env, mp); LABEL_RETURN: ijkmp_dec_ref_p(&mp); (*g_jvm)->DetachCurrentThread(g_jvm); MPTRACE("message_loop exit"); return 0; }
AttachCurrentThread爲了獲取JNIEnv,而後關鍵點是message_loop_n:函數
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp) { jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp); JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN); while (1) { AVMessage msg; int retval = ijkmp_get_msg(mp, &msg, 1); if (retval < 0) break; // block-get should never return 0 assert(retval > 0); switch (msg.what) { case FFP_MSG_FLUSH: MPTRACE("FFP_MSG_FLUSH:\n"); post_event(env, weak_thiz, MEDIA_NOP, 0, 0); break; case FFP_MSG_ERROR: MPTRACE("FFP_MSG_ERROR: %d\n", msg.arg1); post_event(env, weak_thiz, MEDIA_ERROR, MEDIA_ERROR_IJK_PLAYER, msg.arg1); break; case FFP_MSG_PREPARED: MPTRACE("FFP_MSG_PREPARED:\n"); post_event(env, weak_thiz, MEDIA_PREPARED, 0, 0); break; case FFP_MSG_COMPLETED: MPTRACE("FFP_MSG_COMPLETED:\n"); post_event(env, weak_thiz, MEDIA_PLAYBACK_COMPLETE, 0, 0); break; case FFP_MSG_VIDEO_SIZE_CHANGED: MPTRACE("FFP_MSG_VIDEO_SIZE_CHANGED: %d, %d\n", msg.arg1, msg.arg2); post_event(env, weak_thiz, MEDIA_SET_VIDEO_SIZE, msg.arg1, msg.arg2); break; case FFP_MSG_SAR_CHANGED: MPTRACE("FFP_MSG_SAR_CHANGED: %d, %d\n", msg.arg1, msg.arg2); post_event(env, weak_thiz, MEDIA_SET_VIDEO_SAR, msg.arg1, msg.arg2); break; case FFP_MSG_VIDEO_RENDERING_START: MPTRACE("FFP_MSG_VIDEO_RENDERING_START:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_RENDERING_START, 0); break; case FFP_MSG_AUDIO_RENDERING_START: MPTRACE("FFP_MSG_AUDIO_RENDERING_START:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_RENDERING_START, 0); break; case FFP_MSG_VIDEO_ROTATION_CHANGED: MPTRACE("FFP_MSG_VIDEO_ROTATION_CHANGED: %d\n", msg.arg1); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_ROTATION_CHANGED, msg.arg1); break; case FFP_MSG_BUFFERING_START: MPTRACE("FFP_MSG_BUFFERING_START:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0); break; case FFP_MSG_BUFFERING_END: MPTRACE("FFP_MSG_BUFFERING_END:\n"); post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0); break; case FFP_MSG_BUFFERING_UPDATE: // MPTRACE("FFP_MSG_BUFFERING_UPDATE: %d, %d", msg.arg1, msg.arg2); post_event(env, weak_thiz, MEDIA_BUFFERING_UPDATE, msg.arg1, msg.arg2); break; case FFP_MSG_BUFFERING_BYTES_UPDATE: break; case FFP_MSG_BUFFERING_TIME_UPDATE: break; case FFP_MSG_SEEK_COMPLETE: MPTRACE("FFP_MSG_SEEK_COMPLETE:\n"); post_event(env, weak_thiz, MEDIA_SEEK_COMPLETE, 0, 0); break; case FFP_MSG_PLAYBACK_STATE_CHANGED: break; case FFP_MSG_TIMED_TEXT: if (msg.obj) { jstring text = (*env)->NewStringUTF(env, (char *)msg.obj); post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, text); J4A_DeleteLocalRef__p(env, &text); } else { post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, NULL); } break; default: ALOGE("unknown FFP_MSG_xxx(%d)\n", msg.what); break; } msg_free_res(&msg); } LABEL_RETURN: ; }
這明顯是個事件處理loop,關鍵是post_event,裏面就一句話:J4AC_IjkMediaPlayer__postEventFromNative(env, weak_this, what, arg1, arg2, NULL);最後追到J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__postEventFromNative,裏面是(*env)->CallStaticVoidMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_postEventFromNative, weakThiz, what, arg1, arg2, obj);調用java層的函數,再看下去:J4A_loadClass__J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer裏面:oop
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id; name = "postEventFromNative"; sign = "(Ljava/lang/Object;IIILjava/lang/Object;)V"; class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_postEventFromNative = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign); if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_postEventFromNative == NULL) goto fail;
不用說了吧,走的是java層的函數postEventFromNative。post
@CalledByNative private static void postEventFromNative(Object weakThiz, int what, int arg1, int arg2, Object obj) { if (weakThiz == null) return; @SuppressWarnings("rawtypes") IjkMediaPlayer mp = (IjkMediaPlayer) ((WeakReference) weakThiz).get(); if (mp == null) { return; } if (what == MEDIA_INFO && arg1 == MEDIA_INFO_STARTED_AS_NEXT) { // this acquires the wakelock if needed, and sets the client side // state mp.start(); } if (mp.mEventHandler != null) { Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj); mp.mEventHandler.sendMessage(m); } }
這裏把弱引用的IjkMediaPlayer取出來了,而後調用了mp.mEventHandler.sendMessage(m);那麼這個IjkMediaPlayer是哪裏來的呢?答案就在IjkMediaPlayer_native_setup,也就是java層的native_setup函數中傳遞進去的,在最初的initPlayer函數中調用的。那麼java層的postEventFromNative裏面的mp.mEventHandler是什麼呢?就是initPlayer裏面建立的looper。這下子串起來了吧,java層創建的IjkMediaPlayer,並填充eventhandler,c層在觸發特定的一些動做(例如打開直播等),會調用java層的函數向looper裏面發送message,因而java層就收到了內容,能夠進行相關處理了。整個過程是個異步的過程,並不阻塞。至於ffmpeg的東西,都是在c層建立並填充的。ui