From: Debian Multimedia Maintainers Date: Fri, 25 May 2018 13:56:56 +0000 (-0400) Subject: ffmpeg_2.9 X-Git-Tag: archive/raspbian/1.0.1+dfsg1-4+rpi1~1^2^2^2^2^2~1 X-Git-Url: https://dgit.raspbian.org/?a=commitdiff_plain;h=a033fce22a3d7012e9fd688456af041ab2786a28;p=gpac.git ffmpeg_2.9 Gbp-Pq: Name ffmpeg_2.9.patch --- diff --git a/applications/dashcast/audio_decoder.c b/applications/dashcast/audio_decoder.c index b5fc1bf..775d88c 100644 --- a/applications/dashcast/audio_decoder.c +++ b/applications/dashcast/audio_decoder.c @@ -205,11 +205,7 @@ int dc_audio_decoder_read(AudioInputFile *audio_input_file, AudioInputData *audi packet.data = NULL; packet.size = 0; -#ifndef FF_API_AVFRAME_LAVC - avcodec_get_frame_defaults(audio_input_data->aframe); -#else av_frame_unref(audio_input_data->aframe); -#endif avcodec_decode_audio4(codec_ctx, audio_input_data->aframe, &got_frame, &packet); @@ -240,11 +236,7 @@ int dc_audio_decoder_read(AudioInputFile *audio_input_file, AudioInputData *audi if (packet.stream_index == audio_input_file->astream_idx) { /* Set audio frame to default */ -#ifndef FF_API_AVFRAME_LAVC - avcodec_get_frame_defaults(audio_input_data->aframe); -#else av_frame_unref(audio_input_data->aframe); -#endif /* Decode audio frame */ if (avcodec_decode_audio4(codec_ctx, audio_input_data->aframe, &got_frame, &packet) < 0) { diff --git a/applications/dashcast/audio_encoder.c b/applications/dashcast/audio_encoder.c index c0d6a79..badf365 100644 --- a/applications/dashcast/audio_encoder.c +++ b/applications/dashcast/audio_encoder.c @@ -85,11 +85,7 @@ int dc_audio_encoder_open(AudioOutputFile *audio_output_file, AudioDataConf *aud audio_output_file->frame_bytes = audio_output_file->codec_ctx->frame_size * av_get_bytes_per_sample(DC_AUDIO_SAMPLE_FORMAT) * DC_AUDIO_NUM_CHANNELS; -#ifndef FF_API_AVFRAME_LAVC - avcodec_get_frame_defaults(audio_output_file->aframe); -#else av_frame_unref(audio_output_file->aframe); -#endif audio_output_file->aframe->nb_samples = audio_output_file->codec_ctx->frame_size; diff --git a/applications/dashcast/video_decoder.c b/applications/dashcast/video_decoder.c index de708f9..eafc691 100644 --- a/applications/dashcast/video_decoder.c +++ b/applications/dashcast/video_decoder.c @@ -253,11 +253,7 @@ int dc_video_decoder_read(VideoInputFile *video_input_file, VideoInputData *vide video_data_node->source_number = source_number; /* Flush decoder */ memset(&packet, 0, sizeof(AVPacket)); -#ifndef FF_API_AVFRAME_LAVC - avcodec_get_frame_defaults(video_data_node->vframe); -#else av_frame_unref(video_data_node->vframe); -#endif avcodec_decode_video2(codec_ctx, video_data_node->vframe, &got_frame, &packet); if (got_frame) { @@ -292,11 +288,7 @@ int dc_video_decoder_read(VideoInputFile *video_input_file, VideoInputData *vide video_data_node->source_number = source_number; /* Set video frame to default */ -#ifndef FF_API_AVFRAME_LAVC - avcodec_get_frame_defaults(video_data_node->vframe); -#else av_frame_unref(video_data_node->vframe); -#endif /* Decode video frame */ if (avcodec_decode_video2(codec_ctx, video_data_node->vframe, &got_frame, &packet) < 0) { diff --git a/applications/dashcast/video_encoder.c b/applications/dashcast/video_encoder.c index 1b07cf1..08c70a6 100644 --- a/applications/dashcast/video_encoder.c +++ b/applications/dashcast/video_encoder.c @@ -86,7 +86,7 @@ int dc_video_encoder_open(VideoOutputFile *video_output_file, VideoDataConf *vid video_output_file->codec_ctx->time_base.den = video_data_conf->time_base.den; video_output_file->codec_ctx->time_base.num = video_data_conf->time_base.num * video_data_conf->time_base.den / video_data_conf->framerate; } - video_output_file->codec_ctx->pix_fmt = PIX_FMT_YUV420P; + video_output_file->codec_ctx->pix_fmt = AV_PIX_FMT_YUV420P; video_output_file->codec_ctx->gop_size = video_data_conf->framerate; // video_output_file->codec_ctx->codec_id = video_codec->id; @@ -96,7 +96,7 @@ int dc_video_encoder_open(VideoOutputFile *video_output_file, VideoDataConf *vid // video_output_file->codec_ctx->height = video_data_conf->height; // video_output_file->codec_ctx->time_base = (AVRational) {1 , // video_output_file->video_data_conf->framerate}; -// video_output_file->codec_ctx->codec->pix_fmt = PIX_FMT_YUV420P; +// video_output_file->codec_ctx->codec->pix_fmt = AV_PIX_FMT_YUV420P; video_output_file->codec_ctx->gop_size = video_data_conf->framerate; // // av_opt_set(video_output_file->codec_ctx->priv_data, "preset", "ultrafast", 0); diff --git a/applications/dashcast/video_muxer.c b/applications/dashcast/video_muxer.c index 7f0ae3c..84ace7e 100644 --- a/applications/dashcast/video_muxer.c +++ b/applications/dashcast/video_muxer.c @@ -636,7 +636,7 @@ int dc_ffmpeg_video_muxer_open(VideoOutputFile *video_output_file, char *filenam video_stream->codec->time_base = video_codec_ctx->time_base; - video_stream->codec->pix_fmt = PIX_FMT_YUV420P; + video_stream->codec->pix_fmt = AV_PIX_FMT_YUV420P; video_stream->codec->gop_size = video_codec_ctx->time_base.den; //video_output_file->video_data_conf->framerate; av_opt_set(video_stream->codec->priv_data, "preset", "ultrafast", 0); diff --git a/applications/dashcast/video_scaler.c b/applications/dashcast/video_scaler.c index 0135d9d..5b5ec2c 100644 --- a/applications/dashcast/video_scaler.c +++ b/applications/dashcast/video_scaler.c @@ -125,7 +125,7 @@ int dc_video_scaler_data_init(VideoInputData *video_input_data, VideoScaledData dc_consumer_init(&video_scaled_data->consumer, video_cb_size, name); video_scaled_data->num_producers = max_source; - video_scaled_data->out_pix_fmt = PIX_FMT_YUV420P; + video_scaled_data->out_pix_fmt = AV_PIX_FMT_YUV420P; GF_SAFE_ALLOC_N(video_scaled_data->vsprop, max_source, VideoScaledProp); memset(video_scaled_data->vsprop, 0, max_source * sizeof(VideoScaledProp)); @@ -198,7 +198,7 @@ int dc_video_scaler_scale(VideoInputData *video_input_data, VideoScaledData *vid video_scaled_data_node->cropped_frame->width = video_input_data->vprop[index].width - video_input_data->vprop[index].crop_x; video_scaled_data_node->cropped_frame->height = video_input_data->vprop[index].height - video_input_data->vprop[index].crop_y; #endif - if (av_picture_crop((AVPicture*)video_scaled_data_node->cropped_frame, (AVPicture*)video_data_node->vframe, PIX_FMT_YUV420P, video_input_data->vprop[index].crop_y, video_input_data->vprop[index].crop_x) < 0) { + if (av_picture_crop((AVPicture*)video_scaled_data_node->cropped_frame, (AVPicture*)video_data_node->vframe, AV_PIX_FMT_YUV420P, video_input_data->vprop[index].crop_y, video_input_data->vprop[index].crop_x) < 0) { GF_LOG(GF_LOG_ERROR, GF_LOG_DASH, ("Video scaler: error while cropping picture.\n")); return -1; } diff --git a/modules/ffmpeg_in/ffmpeg_decode.c b/modules/ffmpeg_in/ffmpeg_decode.c index 53bbd6d..f117bda 100644 --- a/modules/ffmpeg_in/ffmpeg_decode.c +++ b/modules/ffmpeg_in/ffmpeg_decode.c @@ -327,11 +327,7 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) } } } -#if !defined(FF_API_AVFRAME_LAVC) - *frame = avcodec_alloc_frame(); -#else *frame = av_frame_alloc(); -#endif } #ifdef HAS_HEVC @@ -398,11 +394,7 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) #if defined(USE_AVCTX3) -#if !defined(FF_API_AVFRAME_LAVC) - ffd->audio_frame = avcodec_alloc_frame(); -#else ffd->audio_frame = av_frame_alloc(); -#endif #endif @@ -415,7 +407,7 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) case AV_CODEC_ID_GIF: #endif case AV_CODEC_ID_RAWVIDEO: - if ((*ctx)->pix_fmt==PIX_FMT_YUV420P) { + if ((*ctx)->pix_fmt==AV_PIX_FMT_YUV420P) { ffd->pix_fmt = GF_PIXEL_YV12; } else { ffd->pix_fmt = GF_PIXEL_RGB_24; @@ -423,11 +415,7 @@ static GF_Err FFDEC_AttachStream(GF_BaseDecoder *plug, GF_ESD *esd) break; case AV_CODEC_ID_DVD_SUBTITLE: -#if !defined(FF_API_AVFRAME_LAVC) - *frame = avcodec_alloc_frame(); -#else *frame = av_frame_alloc(); -#endif #ifdef USE_AVCODEC2 { @@ -856,7 +844,7 @@ redecode: *outBufferLength = ffd->out_size; // assert(inBufferLength==ffd->out_size); - if (ffd->raw_pix_fmt==PIX_FMT_BGR24) { + if (ffd->raw_pix_fmt==AV_PIX_FMT_BGR24) { s32 i, j; for (j=0; jheight; j++) { u8 *src = (u8 *) inBuffer + j*3*ctx->width; @@ -995,7 +983,7 @@ redecode: stride = frame->linesize[0]; #ifndef NO_10bit - if ((ctx->pix_fmt == PIX_FMT_YUV420P10LE) && ffd->output_as_8bit && (frame->linesize[0] >= 2*w) ) { + if ((ctx->pix_fmt == AV_PIX_FMT_YUV420P10LE) && ffd->output_as_8bit && (frame->linesize[0] >= 2*w) ) { ffd->conv_to_8bit = 1; stride=w; } @@ -1004,7 +992,7 @@ redecode: /*recompute outsize in case on-the-fly change*/ if ((w != ctx->width) || (h != ctx->height) || (ffd->direct_output && (stride != ffd->stride)) - || ((ffd->out_pix_fmt==GF_PIXEL_YV12) && (ctx->pix_fmt != PIX_FMT_YUV420P) && !ffd->output_as_8bit ) + || ((ffd->out_pix_fmt==GF_PIXEL_YV12) && (ctx->pix_fmt != AV_PIX_FMT_YUV420P) && !ffd->output_as_8bit ) //need to realloc the conversion buffer || (ffd->conv_to_8bit && !ffd->conv_buffer && ffd->direct_output) ) { @@ -1015,7 +1003,7 @@ redecode: } #ifndef NO_10bit //this YUV format is handled natively in GPAC - else if ((ctx->pix_fmt == PIX_FMT_YUV420P10LE) && !ffd->output_as_8bit) { + else if ((ctx->pix_fmt == AV_PIX_FMT_YUV420P10LE) && !ffd->output_as_8bit) { ffd->stride = ffd->direct_output ? frame->linesize[0] : ctx->width*2; outsize = ffd->stride * ctx->height * 3 / 2; ffd->out_pix_fmt = GF_PIXEL_YV12_10; @@ -1161,18 +1149,18 @@ redecode: if (ffd->out_pix_fmt==GF_PIXEL_RGB_24) { pict.data[0] = (uint8_t *)outBuffer; pict.linesize[0] = 3*ctx->width; - pix_out = PIX_FMT_RGB24; + pix_out = AV_PIX_FMT_RGB24; } else { pict.data[0] = (uint8_t *)outBuffer; pict.data[1] = (uint8_t *)outBuffer + ffd->stride * ctx->height; pict.data[2] = (uint8_t *)outBuffer + 5 * ffd->stride * ctx->height / 4; pict.linesize[0] = ffd->stride; pict.linesize[1] = pict.linesize[2] = ffd->stride/2; - pix_out = PIX_FMT_YUV420P; + pix_out = AV_PIX_FMT_YUV420P; #ifndef NO_10bit //this YUV format is handled natively in GPAC - if (ctx->pix_fmt==PIX_FMT_YUV420P10LE) { - pix_out = PIX_FMT_YUV420P10LE; + if (ctx->pix_fmt==AV_PIX_FMT_YUV420P10LE) { + pix_out = AV_PIX_FMT_YUV420P10LE; } #endif if (!mmlevel && frame->interlaced_frame) { diff --git a/modules/redirect_av/ffmpeg_ts_muxer.c b/modules/redirect_av/ffmpeg_ts_muxer.c index 6cc6e4b..93b6f5a 100644 --- a/modules/redirect_av/ffmpeg_ts_muxer.c +++ b/modules/redirect_av/ffmpeg_ts_muxer.c @@ -16,7 +16,7 @@ #define STREAM_FRAME_RATE 25 /* 25 images/s */ #define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE)) -#define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */ +#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */ #define PACKETS_BUFFER_LEN 1024 diff --git a/modules/redirect_av/redirect_av.c b/modules/redirect_av/redirect_av.c index 452ac22..d73976c 100644 --- a/modules/redirect_av/redirect_av.c +++ b/modules/redirect_av/redirect_av.c @@ -253,7 +253,7 @@ static u32 video_encoding_thread_run(void *param) assert( currentFrameTimeProcessed != avr->frameTime); currentFrameTimeProcessed = avr->frameTime; { - avpicture_fill ( ( AVPicture * ) avr->RGBpicture, avr->frame, PIX_FMT_RGB24, avr->srcWidth, avr->srcHeight ); + avpicture_fill ( ( AVPicture * ) avr->RGBpicture, avr->frame, AV_PIX_FMT_RGB24, avr->srcWidth, avr->srcHeight ); assert( avr->swsContext ); sws_scale ( avr->swsContext, #ifdef USE_AVCODEC2 @@ -318,7 +318,7 @@ exit: #define VIDEO_RATE 400000 static Bool start_if_needed(GF_AVRedirect *avr) { - enum PixelFormat pxlFormatForCodec = PIX_FMT_YUV420P; + enum AVPixelFormat pxlFormatForCodec = AV_PIX_FMT_YUV420P; if (avr->is_open) return 0; gf_mx_p(avr->frameMutex); @@ -367,13 +367,13 @@ static Bool start_if_needed(GF_AVRedirect *avr) { } if (avr->videoCodec->id == CODEC_ID_MJPEG) { - pxlFormatForCodec = PIX_FMT_YUVJ420P; + pxlFormatForCodec = AV_PIX_FMT_YUVJ420P; } - avr->RGBpicture = avcodec_alloc_frame(); + avr->RGBpicture = av_frame_alloc(); assert ( avr->RGBpicture ); avr->RGBpicture->data[0] = NULL; - avr->YUVpicture = avcodec_alloc_frame(); + avr->YUVpicture = av_frame_alloc(); assert ( avr->YUVpicture ); { u32 sz = sizeof ( uint8_t ) * avpicture_get_size ( pxlFormatForCodec, avr->srcWidth, avr->srcHeight ); @@ -524,7 +524,7 @@ static void avr_on_video_reconfig ( void *udta, u32 width, u32 height, u8 bpp ) avr->frame = gf_malloc ( sizeof ( char ) *avr->size ); avr->srcWidth = width; avr->srcHeight = height; - avr->swsContext = sws_getCachedContext ( avr->swsContext, avr->srcWidth, avr->srcHeight, PIX_FMT_RGB24, avr->srcWidth, avr->srcHeight, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL ); + avr->swsContext = sws_getCachedContext ( avr->swsContext, avr->srcWidth, avr->srcHeight, AV_PIX_FMT_RGB24, avr->srcWidth, avr->srcHeight, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL ); gf_mx_v(avr->frameMutex); } } @@ -787,7 +787,7 @@ void avr_delete ( GF_BaseInterface *ifce ) avr->videoCodec = NULL; if ( avr->YUVpicture ) { - av_free ( avr->YUVpicture ); + av_frame_free ( &avr->YUVpicture ); } if ( avr->yuv_data ) av_free ( avr->yuv_data ); @@ -795,7 +795,7 @@ void avr_delete ( GF_BaseInterface *ifce ) avr->YUVpicture = NULL; if ( avr->RGBpicture ) { - av_free ( avr->RGBpicture ); + av_frame_free ( &avr->RGBpicture ); } avr->RGBpicture = NULL; if ( avr->swsContext )