摘要:初始化的过程上一篇其实并未完全分析完,这回接着来。层的函数中,最后还有的调用,走的是层的。结构体如下的和,以及,其余是状态及的内容。整个过程是个异步的过程,并不阻塞。至于的东西,都是在层创建并填充的。
初始化的过程上一篇其实并未完全分析完,这回接着来。java层的initPlayer函数中,最后还有native_setup的调用,走的是c层的IjkMediaPlayer_native_setup。来看看他干了什么吧:
</>复制代码
IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
MPTRACE("%s
", __func__);
IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
jni_set_media_player(env, thiz, mp);
ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
}
首先创建播放器IjkMediaPlayer,传入一个message_loop。继续看ijkmp_android_create:
</>复制代码
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
IjkMediaPlayer *mp = ijkmp_create(msg_loop);
if (!mp)
goto fail;
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
if (!mp->ffplayer->vout)
goto fail;
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
if (!mp->ffplayer->pipeline)
goto fail;
ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);
return mp;
fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}
好吧,往下继续看ijkmp_create:
</>复制代码
IjkMediaPlayer *ijkmp_create(int (*msg_loop)(void*))
{
IjkMediaPlayer *mp = (IjkMediaPlayer *) mallocz(sizeof(IjkMediaPlayer));
if (!mp)
goto fail;
mp->ffplayer = ffp_create();
if (!mp->ffplayer)
goto fail;
mp->msg_loop = msg_loop;
ijkmp_inc_ref(mp);
pthread_mutex_init(&mp->mutex, NULL);
return mp;
fail:
ijkmp_destroy_p(&mp);
return NULL;
}
一上来为结构体IjkMediaPlayer分配空间,然后填充里面的内容,例如ffplayer和msg_loop。结构体如下:
</>复制代码
struct IjkMediaPlayer {
volatile int ref_count;
pthread_mutex_t mutex;
FFPlayer *ffplayer;
int (*msg_loop)(void*);
SDL_Thread *msg_thread;
SDL_Thread _msg_thread;
int mp_state;
char *data_source;
void *weak_thiz;
int restart;
int restart_from_beginning;
int seek_req;
long seek_msec;
};
ffmpeg的player和sdl,以及msg_loop,其余是状态及seek的内容。
回来到ijkmp_create。看这里将这个函数指针给了mp的msg_loop。然后是ijkmp_inc_ref,这里设置了mp的引用计数加1。好吧,先回来看下这个loop是什么东西,回到最初的IjkMediaPlayer_native_setup。
</>复制代码
static int message_loop(void *arg)
{
MPTRACE("%s
", __func__);
JNIEnv *env = NULL;
(*g_jvm)->AttachCurrentThread(g_jvm, &env, NULL );
IjkMediaPlayer *mp = (IjkMediaPlayer*) arg;
JNI_CHECK_GOTO(mp, env, NULL, "mpjni: native_message_loop: null mp", LABEL_RETURN);
message_loop_n(env, mp);
LABEL_RETURN:
ijkmp_dec_ref_p(&mp);
(*g_jvm)->DetachCurrentThread(g_jvm);
MPTRACE("message_loop exit");
return 0;
}
AttachCurrentThread为了获取JNIEnv,然后关键点是message_loop_n:
</>复制代码
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp)
{
jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp);
JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN);
while (1) {
AVMessage msg;
int retval = ijkmp_get_msg(mp, &msg, 1);
if (retval < 0)
break;
// block-get should never return 0
assert(retval > 0);
switch (msg.what) {
case FFP_MSG_FLUSH:
MPTRACE("FFP_MSG_FLUSH:
");
post_event(env, weak_thiz, MEDIA_NOP, 0, 0);
break;
case FFP_MSG_ERROR:
MPTRACE("FFP_MSG_ERROR: %d
", msg.arg1);
post_event(env, weak_thiz, MEDIA_ERROR, MEDIA_ERROR_IJK_PLAYER, msg.arg1);
break;
case FFP_MSG_PREPARED:
MPTRACE("FFP_MSG_PREPARED:
");
post_event(env, weak_thiz, MEDIA_PREPARED, 0, 0);
break;
case FFP_MSG_COMPLETED:
MPTRACE("FFP_MSG_COMPLETED:
");
post_event(env, weak_thiz, MEDIA_PLAYBACK_COMPLETE, 0, 0);
break;
case FFP_MSG_VIDEO_SIZE_CHANGED:
MPTRACE("FFP_MSG_VIDEO_SIZE_CHANGED: %d, %d
", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_SET_VIDEO_SIZE, msg.arg1, msg.arg2);
break;
case FFP_MSG_SAR_CHANGED:
MPTRACE("FFP_MSG_SAR_CHANGED: %d, %d
", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_SET_VIDEO_SAR, msg.arg1, msg.arg2);
break;
case FFP_MSG_VIDEO_RENDERING_START:
MPTRACE("FFP_MSG_VIDEO_RENDERING_START:
");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_RENDERING_START, 0);
break;
case FFP_MSG_AUDIO_RENDERING_START:
MPTRACE("FFP_MSG_AUDIO_RENDERING_START:
");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_AUDIO_RENDERING_START, 0);
break;
case FFP_MSG_VIDEO_ROTATION_CHANGED:
MPTRACE("FFP_MSG_VIDEO_ROTATION_CHANGED: %d
", msg.arg1);
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_ROTATION_CHANGED, msg.arg1);
break;
case FFP_MSG_BUFFERING_START:
MPTRACE("FFP_MSG_BUFFERING_START:
");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0);
break;
case FFP_MSG_BUFFERING_END:
MPTRACE("FFP_MSG_BUFFERING_END:
");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0);
break;
case FFP_MSG_BUFFERING_UPDATE:
// MPTRACE("FFP_MSG_BUFFERING_UPDATE: %d, %d", msg.arg1, msg.arg2);
post_event(env, weak_thiz, MEDIA_BUFFERING_UPDATE, msg.arg1, msg.arg2);
break;
case FFP_MSG_BUFFERING_BYTES_UPDATE:
break;
case FFP_MSG_BUFFERING_TIME_UPDATE:
break;
case FFP_MSG_SEEK_COMPLETE:
MPTRACE("FFP_MSG_SEEK_COMPLETE:
");
post_event(env, weak_thiz, MEDIA_SEEK_COMPLETE, 0, 0);
break;
case FFP_MSG_PLAYBACK_STATE_CHANGED:
break;
case FFP_MSG_TIMED_TEXT:
if (msg.obj) {
jstring text = (*env)->NewStringUTF(env, (char *)msg.obj);
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, text);
J4A_DeleteLocalRef__p(env, &text);
}
else {
post_event2(env, weak_thiz, MEDIA_TIMED_TEXT, 0, 0, NULL);
}
break;
default:
ALOGE("unknown FFP_MSG_xxx(%d)
", msg.what);
break;
}
msg_free_res(&msg);
}
LABEL_RETURN:
;
}
这明显是个事件处理loop,关键是post_event,里面就一句话:J4AC_IjkMediaPlayer__postEventFromNative(env, weak_this, what, arg1, arg2, NULL);最后追到J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer__postEventFromNative,里面是(*env)->CallStaticVoidMethod(env, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id, class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_postEventFromNative, weakThiz, what, arg1, arg2, obj);调用java层的函数,再看下去:J4A_loadClass__J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer里面:
</>复制代码
class_id = class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.id;
name = "postEventFromNative";
sign = "(Ljava/lang/Object;IIILjava/lang/Object;)V";
class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_postEventFromNative = J4A_GetStaticMethodID__catchAll(env, class_id, name, sign);
if (class_J4AC_tv_danmaku_ijk_media_player_IjkMediaPlayer.method_postEventFromNative == NULL)
goto fail;
不用说了吧,走的是java层的函数postEventFromNative。
</>复制代码
@CalledByNative
private static void postEventFromNative(Object weakThiz, int what,
int arg1, int arg2, Object obj) {
if (weakThiz == null)
return;
@SuppressWarnings("rawtypes")
IjkMediaPlayer mp = (IjkMediaPlayer) ((WeakReference) weakThiz).get();
if (mp == null) {
return;
}
if (what == MEDIA_INFO && arg1 == MEDIA_INFO_STARTED_AS_NEXT) {
// this acquires the wakelock if needed, and sets the client side
// state
mp.start();
}
if (mp.mEventHandler != null) {
Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj);
mp.mEventHandler.sendMessage(m);
}
}
这里把弱引用的IjkMediaPlayer取出来了,然后调用了mp.mEventHandler.sendMessage(m);那么这个IjkMediaPlayer是哪里来的呢?答案就在IjkMediaPlayer_native_setup,也就是java层的native_setup函数中传递进去的,在最初的initPlayer函数中调用的。那么java层的postEventFromNative里面的mp.mEventHandler是什么呢?就是initPlayer里面创建的looper。这下子串起来了吧,java层建立的IjkMediaPlayer,并填充eventhandler,c层在触发特定的一些动作(例如打开直播等),会调用java层的函数向looper里面发送message,于是java层就收到了内容,可以进行相关处理了。整个过程是个异步的过程,并不阻塞。至于ffmpeg的东西,都是在c层创建并填充的。
文章版权归作者所有,未经允许请勿转载,若此文章存在违规行为,您可以联系管理员删除。
转载请注明本文地址:https://www.ucloud.cn/yun/66734.html
摘要:下面是,读取头信息头信息。猜测网络部分至少在一开始就应当初始化好的,因此在的过程里面找,在中找到了。就先暂时分析到此吧。 这章要简单分析下ijkplayer是如何从文件或网络读取数据源的。还是read_thread函数中的关键点avformat_open_input函数: int avformat_open_input(AVFormatContext **ps, const char ...
摘要:分别为音频视频和字母进行相关处理。向下跟踪两层,会发现,核心函数是。至此解码算完了。整个过程真是粗略分析啊,对自己也很抱歉,暂时先这样吧。 上文中说到在read_thread线程中有个关键函数:avformat_open_input(utils.c),应当是读取视频文件的,这个函数属于ffmpeg层。这回进入到其中去看下: int avformat_open_input(AVForma...
摘要:我们下面先从读取线程入手。无论这个循环前后干了什么,都是要走这一步,读取数据帧。从开始,我理解的是计算出当前数据帧的时间戳后再计算出播放的起始时间到当前时间,然后看这个时间戳是否在此范围内。 ijkplayer现在比较流行,因为工作关系,接触了他,现在做个简单的分析记录吧。我这里直接跳过java层代码,进入c层,因为大多数的工作都是通过jni调用到c层来完成的,java层的内容并不是主...
摘要:基本上就是对一个数据帧的描述。我理解的是一个未解码的压缩数据帧。 read_thread这个最关键的读取线程中,逐步跟踪,可以明确stream_component_open---> decoder_start---> video_thread--->ffplay_video_thread。这个调用过程,在解码开始后的异步解码线程中,调用的是ffplay_video_thread。具体可...
阅读 2123·2023-04-26 00:16
阅读 3564·2021-11-15 11:38
阅读 3260·2019-08-30 12:50
阅读 3252·2019-08-29 13:59
阅读 821·2019-08-29 13:54
阅读 2611·2019-08-29 13:42
阅读 3399·2019-08-26 11:45
阅读 2261·2019-08-26 11:36