I'm trying to write some code that creates an MPEG4 video from a bunch of bmp 
files.  My code will work successfully if I try to create an MPEG1 video but 
when I try to encode MPEG4 video I get an unhandled exception on frame 12 when 
I call avcodec_encode_video.  My first thought is that I was overwriting the 
buffer but according to what I'm seeing I should have plenty of room left when 
the problem occurs.

I've looked through the last several months of the archive for this mailing 
list and googled around but I haven't found any answers on what I may be doing 
wrong.  Could anyone point me in the right direction?  I'm a beginner with 
respect to working with the ffmpeg API so I'd also welcome any suggestions on 
how I could simplify my code such as if there is a preferred way to load an 
image file in FFmpeg API rather than using the Qt calls or how to avoid using 
the RGBtoYUV420P function I'm using.  Thanks for any help you can provide!

My build configuration:
FFmpeg source from about 2 weeks ago built with the whatever the latest version 
of mingw was about 2 weeks ago.  I'm building the code below in VS2005 using 
shared libraries that were built using the instructions from here: 
http://www.ffmpeg.org/general.html#SEC24.  The bmp images I'm using to make the 
video can be downloaded from http://www.filedropper.com/images_82.



#include <QtCore/QCoreApplication>
#include <QImage>
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
};

AVFrame* initializeFrame(PixelFormat pixfmt, int width, int height)
{
    AVFrame* pFrame = avcodec_alloc_frame();
    int size = avpicture_get_size(pixfmt, width, height);
    uint8_t* pPicBuf = (uint8_t*)malloc(size);
    avpicture_fill((AVPicture*)pFrame, pPicBuf, pixfmt, width, height);
    return pFrame;
}

#define rgbtoyuv(r, g, b, y, u, v) \
    y=(uint8_t)(((int)30*r   +(int)59*g +(int)11*b)/100); \
    u=(uint8_t)(((int)-17*r  -(int)33*g +(int)50*b+12800)/100); \
    v=(uint8_t)(((int)50*r   -(int)42*g -(int)8*b+12800)/100);
void RGBtoYUV420P ( const uint8_t *pRGB, uint8_t *pYUV, uint iRGBIncrement, 
bool bSwapRGB, int iWidth, int iHeight,  bool bFlip )
{
    const unsigned iPlaneSize = iWidth * iHeight;
    const unsigned iHalfWidth = iWidth >> 1;

    // Get pointers to the data
    uint8_t *yplane  = pYUV;
    uint8_t *uplane  = pYUV + iPlaneSize;
    uint8_t *vplane  = pYUV + iPlaneSize + (iPlaneSize >> 2);
    const uint8_t *pRGBIndex = pRGB;
    int iRGBIdx[3];
    iRGBIdx[0] = 0;
    iRGBIdx[1] = 1;
    iRGBIdx[2] = 2;
    if ( bSwapRGB )  {
        iRGBIdx[0] = 2;
        iRGBIdx[2] = 0;
    }

    for (int y = 0; y < (int) iHeight; y++) {
        uint8_t *yline  = yplane + (y * iWidth);
        uint8_t *uline  = uplane + ((y >> 1) * iHalfWidth);
        uint8_t *vline  = vplane + ((y >> 1) * iHalfWidth);

        if ( bFlip ) // Flip horizontally
            pRGBIndex = pRGB + ( iWidth * ( iHeight -1 -y ) * iRGBIncrement );

        for ( int x=0; x<iWidth; x+=2 ) {
            rgbtoyuv ( pRGBIndex[iRGBIdx[0]], pRGBIndex[iRGBIdx[1]], 
pRGBIndex[iRGBIdx[2]], *yline, *uline, *vline );
            pRGBIndex += iRGBIncrement;
            yline++;
            rgbtoyuv ( pRGBIndex[iRGBIdx[0]], pRGBIndex[iRGBIdx[1]], 
pRGBIndex[iRGBIdx[2]], *yline, *uline, *vline );
            pRGBIndex += iRGBIncrement;
            yline++;
            uline++;
            vline++;
        }
    }
}

void encodeVideo(const char* filename)
{
    // Find the video encoder
    //AVCodec* codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO);
    AVCodec* codec = avcodec_find_encoder(CODEC_ID_MPEG4);
    if (!codec)
    {
        fprintf(stderr, "Codec not found\n");
        exit(1);
    }

    // Initialize codec.
    AVCodecContext* codecContext = avcodec_alloc_context();

    // Put sample parameters
    codecContext->bit_rate = 400000;

    // Resolution must be a multiple of two
    codecContext->width = 1000;
    codecContext->height = 888;

    // Set the number of frames per second
    codecContext->time_base.num = 1;
    codecContext->time_base.den = 25;
    codecContext->gop_size = 10; // Emit one intra frame every ten frames
    codecContext->max_b_frames=1;
    codecContext->pix_fmt = PIX_FMT_YUV420P;

    // Open the codec.
    if (avcodec_open(codecContext, codec) < 0)
    {
        fprintf(stderr, "could not open codec\n");
        exit(1);
    }

    // Open the output file
    FILE* outputFile = fopen(filename, "wb");
    if (!outputFile)
    {
        fprintf(stderr, "could not open %s\n", filename);
        exit(1);
    }

    // Create buffer.
    int outbuf_size = codecContext->width * codecContext->height;
    uint8_t* outbuf = new uint8_t [ ( ( outbuf_size * 3 ) / 2 ) + 100 ];
    int actualSize = (( ( outbuf_size * 3 ) / 2 ) + 100) * sizeof(uint8_t);

    int srcWidth = codecContext->width;
    int srcHeight = codecContext->height;

    // Setup frame.
    AVFrame* srcFrame = initializeFrame(PIX_FMT_RGB8, srcWidth, srcHeight);
    srcFrame->data[0] = outbuf;
    srcFrame->data[1] = srcFrame->data[0] + outbuf_size;
    srcFrame->data[2] = srcFrame->data[1] + outbuf_size / 4;
    srcFrame->linesize[0] = codecContext->width;
    srcFrame->linesize[1] = codecContext->width / 2;
    srcFrame->linesize[2] = codecContext->width / 2;


    QImage curFrameImage;

    // Encode 1 second of video.
    int out_size = NULL;
    for(int i=0; i<25; i++)
    {
        fflush(stdout);

        curFrameImage.load("C:\\images\\img" + QString::number(i+1) + ".bmp");
        RGBtoYUV420P(curFrameImage.bits(), outbuf, curFrameImage.depth()/8, 
true, srcWidth, srcHeight, false);
        out_size = avcodec_encode_video(codecContext, outbuf, outbuf_size, 
srcFrame);
        fwrite(outbuf, 1, out_size, outputFile);
    }

    // Get the delayed frames
    while(out_size)
    {
        fflush(stdout);
        out_size = avcodec_encode_video(codecContext, outbuf, outbuf_size, 
NULL);
        fwrite(outbuf, 1, out_size, outputFile);
    }

    // Add sequence end code to have a real mpeg file.
    outbuf[0] = 0x00;
    outbuf[1] = 0x00;
    outbuf[2] = 0x01;
    outbuf[3] = 0xb7;
    fwrite(outbuf, 1, 4, outputFile);

    // Clean up
    fclose(outputFile);
    free(outbuf);
    avcodec_close(codecContext);
    av_free(codecContext);
    av_free(srcFrame);
}

int main(int argc, char *argv[])
{
    QCoreApplication a(argc, argv);

    // Must be called before using avcodec lib.
    avcodec_init();

    // Register all the codecs.
    avcodec_register_all();

    encodeVideo("output.mpg");

    return 0;
}




________________________________
This e-mail and any files transmitted with it are confidential and are intended 
solely for the use of the individual or entity to whom they are addressed. If 
you are NOT the intended recipient or the person responsible for delivering the 
e-mail to the intended recipient, be advised that you have received this e-mail 
in error and that any use, dissemination, forwarding, printing or copying this 
e-mail is strictly prohibited.
_______________________________________________
libav-user mailing list
[email protected]
https://lists.mplayerhq.hu/mailman/listinfo/libav-user

Reply via email to