Skip to content

Commit 62e4333

Browse files
committedApr 18, 2017
android/MediaCodec: add decode mode,mediacodec decode by single thread
·
k0.8.8k0.8.0
1 parent f32fee4 commit 62e4333

File tree

3 files changed

+488
-1
lines changed

3 files changed

+488
-1
lines changed
 

‎ijkmedia/ijkplayer/android/pipeline/ffpipenode_android_mediacodec_vdec.c

Lines changed: 485 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,9 @@
4747

4848
#define MAX_FAKE_FRAMES (2)
4949

50+
#define ACODEC_RETRY -1
51+
#define ACODEC_EXIT -2
52+
5053
typedef struct AMC_Buf_Out {
5154
int port;
5255
int acodec_serial;
@@ -380,6 +383,249 @@ static int amc_fill_frame(
380383
return -1;
381384
}
382385

386+
static int feed_input_buffer2(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *enqueue_count)
387+
{
388+
IJKFF_Pipenode_Opaque *opaque = node->opaque;
389+
FFPlayer *ffp = opaque->ffp;
390+
IJKFF_Pipeline *pipeline = opaque->pipeline;
391+
VideoState *is = ffp->is;
392+
Decoder *d = &is->viddec;
393+
PacketQueue *q = d->queue;
394+
sdl_amedia_status_t amc_ret = 0;
395+
int ret = 0;
396+
ssize_t input_buffer_index = 0;
397+
ssize_t copy_size = 0;
398+
int64_t time_stamp = 0;
399+
uint32_t queue_flags = 0;
400+
401+
if (enqueue_count)
402+
*enqueue_count = 0;
403+
404+
if (d->queue->abort_request) {
405+
ret = ACODEC_EXIT;
406+
goto fail;
407+
}
408+
409+
if (!d->packet_pending || d->queue->serial != d->pkt_serial) {
410+
#if AMC_USE_AVBITSTREAM_FILTER
411+
#else
412+
H264ConvertState convert_state = {0, 0};
413+
#endif
414+
AVPacket pkt;
415+
do {
416+
if (d->queue->nb_packets == 0)
417+
SDL_CondSignal(d->empty_queue_cond);
418+
if (ffp_packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished) < 0) {
419+
ret = -1;
420+
goto fail;
421+
}
422+
if (ffp_is_flush_packet(&pkt) || opaque->acodec_flush_request) {
423+
// request flush before lock, or never get mutex
424+
opaque->acodec_flush_request = true;
425+
SDL_LockMutex(opaque->acodec_mutex);
426+
if (SDL_AMediaCodec_isStarted(opaque->acodec)) {
427+
if (opaque->input_packet_count > 0) {
428+
// flush empty queue cause error on OMX.SEC.AVC.Decoder (Nexus S)
429+
SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout);
430+
SDL_AMediaCodec_flush(opaque->acodec);
431+
opaque->input_packet_count = 0;
432+
}
433+
// If codec is configured in synchronous mode, codec will resume automatically
434+
// SDL_AMediaCodec_start(opaque->acodec);
435+
}
436+
opaque->acodec_flush_request = false;
437+
SDL_CondSignal(opaque->acodec_cond);
438+
SDL_UnlockMutex(opaque->acodec_mutex);
439+
d->finished = 0;
440+
d->next_pts = d->start_pts;
441+
d->next_pts_tb = d->start_pts_tb;
442+
}
443+
} while (ffp_is_flush_packet(&pkt) || d->queue->serial != d->pkt_serial);
444+
av_packet_split_side_data(&pkt);
445+
av_packet_unref(&d->pkt);
446+
d->pkt_temp = d->pkt = pkt;
447+
d->packet_pending = 1;
448+
449+
if (opaque->ffp->mediacodec_handle_resolution_change &&
450+
opaque->codecpar->codec_id == AV_CODEC_ID_H264) {
451+
uint8_t *size_data = NULL;
452+
int size_data_size = 0;
453+
AVPacket *avpkt = &d->pkt_temp;
454+
size_data = av_packet_get_side_data(avpkt, AV_PKT_DATA_NEW_EXTRADATA, &size_data_size);
455+
// minimum avcC(sps,pps) = 7
456+
if (size_data && size_data_size >= 7) {
457+
int got_picture = 0;
458+
AVFrame *frame = av_frame_alloc();
459+
AVDictionary *codec_opts = NULL;
460+
const AVCodec *codec = opaque->decoder->avctx->codec;
461+
AVCodecContext *new_avctx = avcodec_alloc_context3(codec);
462+
int change_ret = 0;
463+
464+
if (!new_avctx)
465+
return AVERROR(ENOMEM);
466+
467+
avcodec_parameters_to_context(new_avctx, opaque->codecpar);
468+
av_freep(&new_avctx->extradata);
469+
new_avctx->extradata = av_mallocz(size_data_size + AV_INPUT_BUFFER_PADDING_SIZE);
470+
if (!new_avctx->extradata) {
471+
avcodec_free_context(&new_avctx);
472+
return AVERROR(ENOMEM);
473+
}
474+
memcpy(new_avctx->extradata, size_data, size_data_size);
475+
new_avctx->extradata_size = size_data_size;
476+
477+
av_dict_set(&codec_opts, "threads", "1", 0);
478+
change_ret = avcodec_open2(new_avctx, codec, &codec_opts);
479+
av_dict_free(&codec_opts);
480+
if (change_ret < 0) {
481+
avcodec_free_context(&new_avctx);
482+
return change_ret;
483+
}
484+
485+
change_ret = avcodec_decode_video2(new_avctx, frame, &got_picture, avpkt);
486+
if (change_ret < 0) {
487+
avcodec_free_context(&new_avctx);
488+
return change_ret;
489+
} else {
490+
if (opaque->codecpar->width != new_avctx->width &&
491+
opaque->codecpar->height != new_avctx->height) {
492+
ALOGW("AV_PKT_DATA_NEW_EXTRADATA: %d x %d\n", new_avctx->width, new_avctx->height);
493+
avcodec_parameters_from_context(opaque->codecpar, new_avctx);
494+
opaque->aformat_need_recreate = true;
495+
ffpipeline_set_surface_need_reconfigure_l(pipeline, true);
496+
}
497+
}
498+
499+
av_frame_unref(frame);
500+
avcodec_free_context(&new_avctx);
501+
}
502+
}
503+
504+
if (opaque->codecpar->codec_id == AV_CODEC_ID_H264 || opaque->codecpar->codec_id == AV_CODEC_ID_HEVC) {
505+
convert_h264_to_annexb(d->pkt_temp.data, d->pkt_temp.size, opaque->nal_size, &convert_state);
506+
int64_t time_stamp = d->pkt_temp.pts;
507+
if (!time_stamp && d->pkt_temp.dts)
508+
time_stamp = d->pkt_temp.dts;
509+
if (time_stamp > 0) {
510+
time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q);
511+
} else {
512+
time_stamp = 0;
513+
}
514+
}
515+
}
516+
517+
if (d->pkt_temp.data) {
518+
// reconfigure surface if surface changed
519+
// NULL surface cause no display
520+
if (ffpipeline_is_surface_need_reconfigure_l(pipeline)) {
521+
jobject new_surface = NULL;
522+
523+
// request reconfigure before lock, or never get mutex
524+
ffpipeline_lock_surface(pipeline);
525+
ffpipeline_set_surface_need_reconfigure_l(pipeline, false);
526+
new_surface = ffpipeline_get_surface_as_global_ref_l(env, pipeline);
527+
ffpipeline_unlock_surface(pipeline);
528+
529+
if (!opaque->aformat_need_recreate &&
530+
(opaque->jsurface == new_surface ||
531+
(opaque->jsurface && new_surface && (*env)->IsSameObject(env, new_surface, opaque->jsurface)))) {
532+
ALOGI("%s: same surface, reuse previous surface\n", __func__);
533+
J4A_DeleteGlobalRef__p(env, &new_surface);
534+
} else {
535+
if (opaque->aformat_need_recreate) {
536+
ALOGI("%s: recreate aformat\n", __func__);
537+
ret = recreate_format_l(env, node);
538+
if (ret) {
539+
ALOGE("amc: recreate_format_l failed\n");
540+
goto fail;
541+
}
542+
opaque->aformat_need_recreate = false;
543+
}
544+
545+
ret = reconfigure_codec_l(env, node, new_surface);
546+
547+
J4A_DeleteGlobalRef__p(env, &new_surface);
548+
549+
if (ret != 0) {
550+
ALOGE("%s: reconfigure_codec failed\n", __func__);
551+
ret = 0;
552+
goto fail;
553+
}
554+
555+
if (q->abort_request || opaque->acodec_flush_request) {
556+
ret = 0;
557+
goto fail;
558+
}
559+
}
560+
}
561+
562+
queue_flags = 0;
563+
input_buffer_index = SDL_AMediaCodec_dequeueInputBuffer(opaque->acodec, timeUs);
564+
if (input_buffer_index < 0) {
565+
if (SDL_AMediaCodec_isInputBuffersValid(opaque->acodec)) {
566+
// timeout
567+
ret = 0;
568+
goto fail;
569+
} else {
570+
// enqueue fake frame
571+
queue_flags |= AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME;
572+
copy_size = d->pkt_temp.size;
573+
}
574+
} else {
575+
SDL_AMediaCodecFake_flushFakeFrames(opaque->acodec);
576+
577+
copy_size = SDL_AMediaCodec_writeInputData(opaque->acodec, input_buffer_index, d->pkt_temp.data, d->pkt_temp.size);
578+
if (!copy_size) {
579+
ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__);
580+
ret = -1;
581+
goto fail;
582+
}
583+
}
584+
585+
time_stamp = d->pkt_temp.pts;
586+
if (time_stamp == AV_NOPTS_VALUE && d->pkt_temp.dts != AV_NOPTS_VALUE)
587+
time_stamp = d->pkt_temp.dts;
588+
if (time_stamp >= 0) {
589+
time_stamp = av_rescale_q(time_stamp, is->video_st->time_base, AV_TIME_BASE_Q);
590+
} else {
591+
time_stamp = 0;
592+
}
593+
// ALOGE("queueInputBuffer, %lld\n", time_stamp);
594+
amc_ret = SDL_AMediaCodec_queueInputBuffer(opaque->acodec, input_buffer_index, 0, copy_size, time_stamp, queue_flags);
595+
if (amc_ret != SDL_AMEDIA_OK) {
596+
ALOGE("%s: SDL_AMediaCodec_getInputBuffer failed\n", __func__);
597+
ret = -1;
598+
goto fail;
599+
}
600+
// ALOGE("%s: queue %d/%d", __func__, (int)copy_size, (int)input_buffer_size);
601+
opaque->input_packet_count++;
602+
if (enqueue_count)
603+
++*enqueue_count;
604+
}
605+
606+
if (copy_size < 0) {
607+
d->packet_pending = 0;
608+
} else {
609+
d->pkt_temp.dts =
610+
d->pkt_temp.pts = AV_NOPTS_VALUE;
611+
if (d->pkt_temp.data) {
612+
d->pkt_temp.data += copy_size;
613+
d->pkt_temp.size -= copy_size;
614+
if (d->pkt_temp.size <= 0)
615+
d->packet_pending = 0;
616+
} else {
617+
// FIXME: detect if decode finished
618+
// if (!got_frame) {
619+
d->packet_pending = 0;
620+
d->finished = d->pkt_serial;
621+
// }
622+
}
623+
}
624+
625+
fail:
626+
return ret;
627+
}
628+
383629
static int feed_input_buffer(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *enqueue_count)
384630
{
385631
IJKFF_Pipenode_Opaque *opaque = node->opaque;
@@ -934,6 +1180,150 @@ static int drain_output_buffer_l(JNIEnv *env, IJKFF_Pipenode *node, int64_t time
9341180
return ret;
9351181
}
9361182

1183+
static int drain_output_buffer2_l(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *dequeue_count, AVFrame *frame, int *got_frame)
1184+
{
1185+
IJKFF_Pipenode_Opaque *opaque = node->opaque;
1186+
FFPlayer *ffp = opaque->ffp;
1187+
SDL_AMediaCodecBufferInfo bufferInfo;
1188+
ssize_t output_buffer_index = 0;
1189+
1190+
if (dequeue_count)
1191+
*dequeue_count = 0;
1192+
1193+
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
1194+
ALOGE("%s:create: SetupThreadEnv failed\n", __func__);
1195+
return ACODEC_RETRY;
1196+
}
1197+
1198+
output_buffer_index = SDL_AMediaCodecFake_dequeueOutputBuffer(opaque->acodec, &bufferInfo, timeUs);
1199+
if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED) {
1200+
ALOGI("AMEDIACODEC__INFO_OUTPUT_BUFFERS_CHANGED\n");
1201+
return ACODEC_RETRY;
1202+
} else if (output_buffer_index == AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED) {
1203+
ALOGI("AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n");
1204+
SDL_AMediaFormat_deleteP(&opaque->output_aformat);
1205+
opaque->output_aformat = SDL_AMediaCodec_getOutputFormat(opaque->acodec);
1206+
if (opaque->output_aformat) {
1207+
int width = 0;
1208+
int height = 0;
1209+
int color_format = 0;
1210+
int stride = 0;
1211+
int slice_height = 0;
1212+
int crop_left = 0;
1213+
int crop_top = 0;
1214+
int crop_right = 0;
1215+
int crop_bottom = 0;
1216+
1217+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "width", &width);
1218+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "height", &height);
1219+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "color-format", &color_format);
1220+
1221+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "stride", &stride);
1222+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "slice-height", &slice_height);
1223+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-left", &crop_left);
1224+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-top", &crop_top);
1225+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-right", &crop_right);
1226+
SDL_AMediaFormat_getInt32(opaque->output_aformat, "crop-bottom", &crop_bottom);
1227+
1228+
// TI decoder could crash after reconfigure
1229+
// ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, width, height);
1230+
// opaque->frame_width = width;
1231+
// opaque->frame_height = height;
1232+
ALOGI(
1233+
"AMEDIACODEC__INFO_OUTPUT_FORMAT_CHANGED\n"
1234+
" width-height: (%d x %d)\n"
1235+
" color-format: (%s: 0x%x)\n"
1236+
" stride: (%d)\n"
1237+
" slice-height: (%d)\n"
1238+
" crop: (%d, %d, %d, %d)\n"
1239+
,
1240+
width, height,
1241+
SDL_AMediaCodec_getColorFormatName(color_format), color_format,
1242+
stride,
1243+
slice_height,
1244+
crop_left, crop_top, crop_right, crop_bottom);
1245+
}
1246+
return ACODEC_RETRY;
1247+
// continue;
1248+
} else if (output_buffer_index == AMEDIACODEC__INFO_TRY_AGAIN_LATER) {
1249+
ALOGI("AMEDIACODEC__INFO_TRY_AGAIN_LATER\n");
1250+
return 0;
1251+
// continue;
1252+
} else if (output_buffer_index < 0) {
1253+
ALOGI("AMEDIACODEC__INFO_TRY_AGAIN_LATER 2\n");
1254+
return 0;
1255+
} else if (output_buffer_index >= 0) {
1256+
ffp->stat.vdps = SDL_SpeedSamplerAdd(&opaque->sampler, FFP_SHOW_VDPS_MEDIACODEC, "vdps[MediaCodec]");
1257+
1258+
if (dequeue_count)
1259+
++*dequeue_count;
1260+
1261+
if (opaque->n_buf_out) {
1262+
AMC_Buf_Out *buf_out;
1263+
if (opaque->off_buf_out < opaque->n_buf_out) {
1264+
// ALOGD("filling buffer... %d", opaque->off_buf_out);
1265+
buf_out = &opaque->amc_buf_out[opaque->off_buf_out++];
1266+
buf_out->acodec_serial = SDL_AMediaCodec_getSerial(opaque->acodec);
1267+
buf_out->port = output_buffer_index;
1268+
buf_out->info = bufferInfo;
1269+
buf_out->pts = pts_from_buffer_info(node, &bufferInfo);
1270+
sort_amc_buf_out(opaque->amc_buf_out, opaque->off_buf_out);
1271+
} else {
1272+
double pts;
1273+
1274+
pts = pts_from_buffer_info(node, &bufferInfo);
1275+
if (opaque->last_queued_pts != AV_NOPTS_VALUE &&
1276+
pts < opaque->last_queued_pts) {
1277+
// FIXME: drop unordered picture to avoid dither
1278+
// ALOGE("early picture, drop!");
1279+
// SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, output_buffer_index, false);
1280+
// goto done;
1281+
}
1282+
/* already sorted */
1283+
buf_out = &opaque->amc_buf_out[opaque->off_buf_out - 1];
1284+
/* new picture is the most aged, send now */
1285+
if (pts < buf_out->pts) {
1286+
amc_fill_frame(node, frame, got_frame, output_buffer_index, SDL_AMediaCodec_getSerial(opaque->acodec), &bufferInfo);
1287+
opaque->last_queued_pts = pts;
1288+
// ALOGD("pts = %f", pts);
1289+
ALOGI("AMEDIACODEC__INFO_TRY_AGAIN_LATER 3\n");
1290+
} else {
1291+
int i;
1292+
1293+
/* find one to send */
1294+
for (i = opaque->off_buf_out - 1; i >= 0; i--) {
1295+
buf_out = &opaque->amc_buf_out[i];
1296+
if (pts > buf_out->pts) {
1297+
amc_fill_frame(node, frame, got_frame, buf_out->port, buf_out->acodec_serial, &buf_out->info);
1298+
opaque->last_queued_pts = buf_out->pts;
1299+
// ALOGD("pts = %f", buf_out->pts);
1300+
/* replace for sort later */
1301+
buf_out->acodec_serial = SDL_AMediaCodec_getSerial(opaque->acodec);
1302+
buf_out->port = output_buffer_index;
1303+
buf_out->info = bufferInfo;
1304+
buf_out->pts = pts_from_buffer_info(node, &bufferInfo);
1305+
sort_amc_buf_out(opaque->amc_buf_out, opaque->n_buf_out);
1306+
break;
1307+
}
1308+
}
1309+
/* need to discard current buffer */
1310+
if (i < 0) {
1311+
// ALOGE("buffer too small, drop picture!");
1312+
if (!(bufferInfo.flags & AMEDIACODEC__BUFFER_FLAG_FAKE_FRAME)) {
1313+
SDL_AMediaCodec_releaseOutputBuffer(opaque->acodec, output_buffer_index, false);
1314+
return 0;
1315+
}
1316+
}
1317+
}
1318+
}
1319+
} else {
1320+
amc_fill_frame(node, frame, got_frame, output_buffer_index, SDL_AMediaCodec_getSerial(opaque->acodec), &bufferInfo);
1321+
}
1322+
}
1323+
1324+
return 0;
1325+
}
1326+
9371327
static int drain_output_buffer(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *dequeue_count, AVFrame *frame, int *got_frame)
9381328
{
9391329
IJKFF_Pipenode_Opaque *opaque = node->opaque;
@@ -987,6 +1377,96 @@ static void func_destroy(IJKFF_Pipenode *node)
9871377
}
9881378
}
9891379

1380+
static int drain_output_buffer2(JNIEnv *env, IJKFF_Pipenode *node, int64_t timeUs, int *dequeue_count, AVFrame *frame, AVRational frame_rate)
1381+
{
1382+
IJKFF_Pipenode_Opaque *opaque = node->opaque;
1383+
FFPlayer *ffp = opaque->ffp;
1384+
VideoState *is = ffp->is;
1385+
AVRational tb = is->video_st->time_base;
1386+
int got_frame = 0;
1387+
int ret = -1;
1388+
double duration;
1389+
double pts;
1390+
while (ret) {
1391+
got_frame = 0;
1392+
ret = drain_output_buffer2_l(env, node, timeUs, dequeue_count, frame, &got_frame);
1393+
1394+
if (opaque->decoder->queue->abort_request) {
1395+
if (got_frame && frame->opaque)
1396+
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
1397+
1398+
return ACODEC_EXIT;
1399+
}
1400+
1401+
if (ret != 0) {
1402+
if (got_frame && frame->opaque)
1403+
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
1404+
}
1405+
}
1406+
1407+
if (got_frame) {
1408+
duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
1409+
pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
1410+
ret = ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial);
1411+
if (ret) {
1412+
if (frame->opaque)
1413+
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
1414+
}
1415+
av_frame_unref(frame);
1416+
}
1417+
1418+
return ret;
1419+
}
1420+
1421+
static int func_run_sync_loop(IJKFF_Pipenode *node) {
1422+
JNIEnv *env = NULL;
1423+
IJKFF_Pipenode_Opaque *opaque = node->opaque;
1424+
FFPlayer *ffp = opaque->ffp;
1425+
VideoState *is = ffp->is;
1426+
Decoder *d = &is->viddec;
1427+
PacketQueue *q = d->queue;
1428+
int ret = 0;
1429+
int dequeue_count = 0;
1430+
int enqueue_count = 0;
1431+
AVFrame *frame = NULL;
1432+
AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
1433+
if (!opaque->acodec) {
1434+
return ffp_video_thread(ffp);
1435+
}
1436+
1437+
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
1438+
ALOGE("%s: SetupThreadEnv failed\n", __func__);
1439+
return -1;
1440+
}
1441+
1442+
frame = av_frame_alloc();
1443+
if (!frame)
1444+
goto fail;
1445+
1446+
while (!q->abort_request) {
1447+
ret = drain_output_buffer2(env, node, AMC_OUTPUT_TIMEOUT_US, &dequeue_count, frame, frame_rate);
1448+
ret = feed_input_buffer2(env, node, AMC_INPUT_TIMEOUT_US, &enqueue_count);
1449+
}
1450+
1451+
fail:
1452+
av_frame_free(&frame);
1453+
opaque->abort = true;
1454+
if (opaque->n_buf_out) {
1455+
free(opaque->amc_buf_out);
1456+
opaque->n_buf_out = 0;
1457+
opaque->amc_buf_out = NULL;
1458+
opaque->off_buf_out = 0;
1459+
opaque->last_queued_pts = AV_NOPTS_VALUE;
1460+
}
1461+
if (opaque->acodec) {
1462+
SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout);
1463+
}
1464+
SDL_AMediaCodec_stop(opaque->acodec);
1465+
SDL_AMediaCodec_decreaseReferenceP(&opaque->acodec);
1466+
ALOGI("MediaCodec: %s: exit: %d", __func__, ret);
1467+
return ret;
1468+
}
1469+
9901470
static int func_run_sync(IJKFF_Pipenode *node)
9911471
{
9921472
JNIEnv *env = NULL;
@@ -1112,7 +1592,11 @@ IJKFF_Pipenode *ffpipenode_create_video_decoder_from_android_mediacodec(FFPlayer
11121592
jobject jsurface = NULL;
11131593

11141594
node->func_destroy = func_destroy;
1115-
node->func_run_sync = func_run_sync;
1595+
if (ffp->mediacodec_sync) {
1596+
node->func_run_sync = func_run_sync_loop;
1597+
} else {
1598+
node->func_run_sync = func_run_sync;
1599+
}
11161600
node->func_flush = func_flush;
11171601
opaque->pipeline = pipeline;
11181602
opaque->ffp = ffp;

‎ijkmedia/ijkplayer/ff_ffplay_def.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -679,6 +679,7 @@ typedef struct FFPlayer {
679679
IjkIOManagerContext *ijkio_manager_ctx;
680680

681681
int enable_accurate_seek;
682+
int mediacodec_sync;
682683
} FFPlayer;
683684

684685
#define fftime_to_milliseconds(ts) (av_rescale(ts, 1000, AV_TIME_BASE))

‎ijkmedia/ijkplayer/ff_ffplay_options.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,8 @@ static const AVOption ffp_context_options[] = {
183183
OPTION_OFFSET(opensles), OPTION_INT(0, 0, 1) },
184184
{ "soundtouch", "SoundTouch: enable",
185185
OPTION_OFFSET(soundtouch_enable), OPTION_INT(0, 0, 1) },
186+
{ "mediacodec-sync", "mediacodec: use msg_queue for synchronise",
187+
OPTION_OFFSET(mediacodec_sync), OPTION_INT(0, 0, 1) },
186188

187189
{ NULL }
188190
};

0 commit comments

Comments
 (0)
Please sign in to comment.