Video from Set of Images have RGB problem
HI Guys,
I used ffmpeg for creating video from sequence of Images. Following is My coding.
-(void)imageToMov:(NSString*)videoName imageNumber:(int)imageNumber{
[[NSFileManager defaultManager]createDirectoryAtPath:[Utilities documentsPath:[NSString stringWithFormat:@"CoachedFiles/"]] attributes:nil];
[[NSFileManager defaultManager]createDirectoryAtPath:[Utilities documentsPath:[NSString stringWithFormat:@"CoachedFiles/%@/",videoName]] attributes:nil];
//创建文件
[[NSFileManager defaultManager]createFileAtPath:[Utilities documentsPath:[NSString stringWithFormat:@"CoachedFiles/%@/%@.mov",videoName,videoName]] contents:nil attributes:nil];
//[[NSFileManager defaultManager]createFileAtPath:[Utilities documentsPath:[NSString stringWithFormat:@"temp/temp.mov"]] contents:nil attributes:nil];
const char *outfilename = [[Utilities documentsPath:[NSString stringWithFormat:@"CoachedFiles/%@/%@.mov",videoName,videoName]]UTF8String];
UIImage * tempImage = [UIImage imageWithContentsOfFile:[Utilities documentsPath:[NSString stringWithFormat:@"temp/temp0000.jpeg"]]];
AVFormatContext *pFormatCtxEnc;
AVCodecContext *pCodecCtxEnc;
AVCodec *pCodecEnc;
AVFrame *pFrameEnc;
AVOutputFormat *pOutputFormat;
AVStream *video_st;
int i;
int outbuf_size;
uint8_t *outbuf;
int out_size;
// Register all formats and codecs
av_register_all();
// auto detect the output format from the name. default is mpeg.
pOutputFormat = av_guess_format(NULL, outfilename, NULL);
if (pOutputFormat == NULL)
return;
// allocate the output media context
pFormatCtxEnc = avformat_alloc_context();
if (pFormatCtxEnc == NULL)
return;
pFormatCtxEnc->oformat = pOutputFormat;
sprintf(pFormatCtxEnc->filename, "%s", outfilename);
video_st = av_new_stream(pFormatCtxEnc, 0); // 0 for video
pCodecCtxEnc = video_st->codec;
pCodecCtxEnc->codec_id = pOutputFormat->video_codec;
pCodecCtxEnc->codec_type = CODEC_TYPE_VIDEO;
// put sample parameters
pCodecCtxEnc->bit_rate = 500000;
// resolution must be a multiple of two
pCodecCtxEnc->width = tempImage.size.width;
pCodecCtxEnc->height = tempImage.size.height;
// frames per second
pCodecCtxEnc->time_base.den = 1;
pCodecCtxEnc->time_base.num = 1;
pCodecCtxEnc->pix_fmt = PIX_FMT_YUV420P;
pCodecCtxEnc->gop_size = 12; /* emit one intra frame every ten frames */
if (pCodecCtxEnc->codec_id == CODEC_ID_MPEG1VIDEO){
/* needed to avoid using macroblocks in which some coeffs overflow
this doesnt happen with normal video, it just happens here as the
motion of the chroma plane doesnt match the luma plane */
pCodecCtxEnc->mb_decision=2;
}
// some formats want stream headers to be seperate
if(!strcmp(pFormatCtxEnc->oformat->name, "mp4") || !strcmp(pFormatCtxEnc->oformat->name, "mov") || !strcmp(pFormatCtxEnc->oformat->name, "3gp"))
pCodecCtxEnc->flags |= CODEC_FLAG_GLOBAL_HEADER;
// set the output parameters (must be done even if no parameters).
if (av_set_parameters(pFormatCtxEnc, NULL) < 0) {
return;
}
// find the video encoder
pCodecEnc = avcodec_find_encoder(pCodecCtxEnc->codec_id);
if (pCodecEnc == NULL)
return;
// open it
if (avcodec_open(pCodecCtxEnc, pCodecEnc) < 0) {
return;
}
if (!(pFormatCtxEnc->oformat->flags & AVFMT_RAWPICTURE)) {
/* allocate output buffer */
/* XXX: API change will be done */
outbuf_size = 500000;
outbuf = av_malloc(outbuf_size);
}
pFrameEnc= avcodec_alloc_frame();
// open the output file, if needed
if (!(pOutputFormat->flags & AVFMT_NOFILE)) {
if (url_fopen(&pFormatCtxEnc->pb, outfilename, URL_WRONLY) < 0) {
//fprintf(stderr, "Could not open '%s'\n", filename);
return;
}
}
// write the stream header, if any
av_write_header(pFormatCtxEnc);
// Read frames and save frames to disk
int size = pCodecCtxEnc->width * pCodecCtxEnc->height;
uint8_t * picture_buf;
picture_buf = malloc((size * 3) / 2);
pFrameEnc->data[0] = picture_buf;
pFrameEnc->data[1] = pFrameEnc->data[0] + size;
pFrameEnc->data[2] = pFrameEnc->data[1] + size / 4;
pFrameEnc->linesize[0] = pCodecCtxEnc->width;
pFrameEnc->linesize[1] = pCodecCtxEnc->width / 2;
pFrameEnc->linesize[2] = pCodecCtxEnc->width / 2;
for (i=0;i<imageNumber;i++){
NSString *imgName = [NSString stringWithFormat:@"temp/temp%04d.jpeg",i];
NSLog(@"%@",imgName);
UIImage * image = [UIImage imageWithContentsOfFile:[Utilities documentsPath:imgName]];
[imgName release];
//创建avpicture
AVPicture pict;
//格式保持bgra,后面是输入图片的长宽
avpicture_alloc(&pict, PIX_FMT_BGRA, image.size.width, image.size.height);
//读取图片数据
CGImageRef cgimage = [image CGImage];
CGDataProviderRef dataProvider = CGImageGetDataProvider(cgimage);
CFDataRef data = CGDataProviderCopyData(dataProvider);
const uint8_t * imagedata = CFDataGetBytePtr(data);
//向avpicture填充数据
avpicture_fill(&pict, imagedata, PIX_FMT_BGRA, image.size.width, image.size.height);
//定义转换格式,从bgra转到yuv420
static int sws_flags = SWS_FAST_BILINEAR;
struct SwsContext * img_convert_ctx = sws_getContext(image.size.width,
image.size.height,
PIX_FMT_BGRA,
image.size.width,
image.size.height,
PIX_FMT_YUV420P,
sws_flags, NULL, NULL, NULL);
//转换图象数据格式
sws_scale (img_convert_ctx, pict.data, pict.linesize,
0, image.size.height,
pFrameEnc->data, pFrameEnc->linesize);
if (pFormatCtxEnc->oformat->flags & AVFMT_RAWPICTURE) {
/* raw video case. The API will change slightly in the near
futur for that */
AVPacket pkt;
av_init_packet(&pkt);
pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index= video_st->index;
pkt.data= (uint8_t *)pFrameEnc;
pkt.size= sizeof(AVPicture);
av_write_frame(pFormatCtxEnc, &pkt);
} else {
// encode the image
out_size = avcodec_encode_video(pCodecCtxEnc, outbuf, outbuf_size, pFrameEnc);
// if zero size, it means the image was buffered
if (out_size != 0) {
AVPacket pkt;
av_init_packet(&pkt);
pkt.pts= pCodecCtxEnc->coded_frame->pts;
if(pCodecCtxEnc->coded_frame->key_frame)
pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index= video_st->index;
开发者_如何学编程 pkt.data= outbuf;
pkt.size= out_size;
// write the compressed frame in the media file
av_write_frame(pFormatCtxEnc, &pkt);
}
}
}
// get the delayed frames
for(; out_size; i++) {
out_size = avcodec_encode_video(pCodecCtxEnc, outbuf, outbuf_size, NULL);
if (out_size != 0) {
AVPacket pkt;
av_init_packet(&pkt);
pkt.pts= pCodecCtxEnc->coded_frame->pts;
if(pCodecCtxEnc->coded_frame->key_frame)
pkt.flags |= PKT_FLAG_KEY;
pkt.stream_index= video_st->index;
pkt.data= outbuf;
pkt.size= out_size;
// write the compressed frame in the media file
av_write_frame(pFormatCtxEnc, &pkt);
}
}
// Close the codec
//avcodec_close(pCodecCtxDec);
avcodec_close(pCodecCtxEnc);
// Free the YUV frame
//av_free(pFrameDec);
av_free(pFrameEnc);
av_free(outbuf);
// write the trailer, if any
av_write_trailer(pFormatCtxEnc);
// free the streams
for(i = 0; i < pFormatCtxEnc->nb_streams; i++) {
av_freep(&pFormatCtxEnc->streams[i]->codec);
av_freep(&pFormatCtxEnc->streams[i]);
}
if (!(pOutputFormat->flags & AVFMT_NOFILE)) {
/* close the output file */
//comment out this code to fix the record video issue. Kevin 2010-07-11
//url_fclose(&pFormatCtxEnc->pb);
}
/* free the stream */
av_free(pFormatCtxEnc);
if([[NSFileManager defaultManager]fileExistsAtPath:[Utilities documentsPath:[NSString stringWithFormat:@"temp/"]] isDirectory:NULL]){
[[NSFileManager defaultManager] removeItemAtPath:[Utilities documentsPath:[NSString stringWithFormat:@"temp/"]] error:nil];
}
//[self MergeVideoFileWithVideoName:videoName];
[self SaveFileDetails:videoName];
[alertView dismissWithClickedButtonIndex:0 animated:YES];
}
Now the Problem is Video is created successfully and the RGB color is greenish. Please notify my mistake on this coding.
I am not an expert on colors but I think your problem might be in the color space you use on your img_convert_ctx
. You are using PIX_FMT_BGRA
. That's different colorspace then RGB it's BGR, it has reversed colors. Try using PIX_FMT_RGBA
instead.
精彩评论