Skip to content
Snippets Groups Projects
output_example.c 12.7 KiB
Newer Older
  • Learn to ignore specific revisions
  • /*
     * Libavformat API example: Output a media file in any supported
     * libavformat format. The default codecs are used.
     * 
     * Copyright (c) 2003 Fabrice Bellard
     * 
     * Permission is hereby granted, free of charge, to any person obtaining a copy
     * of this software and associated documentation files (the "Software"), to deal
     * in the Software without restriction, including without limitation the rights
     * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
     * copies of the Software, and to permit persons to whom the Software is
     * furnished to do so, subject to the following conditions:
     * 
     * The above copyright notice and this permission notice shall be included in
     * all copies or substantial portions of the Software.
     * 
     * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
     * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
     * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
     * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
     * THE SOFTWARE.  
     */
    #include <stdlib.h>
    #include <stdio.h>
    #include <math.h>
    
    #include "avformat.h"
    
    /* 5 seconds stream duration */
    #define STREAM_DURATION 5.0
    
    
    /**************************************************************/
    /* audio output */
    
    
    int16_t *samples;
    uint8_t *audio_outbuf;
    int audio_outbuf_size;
    int audio_input_frame_size;
    
    /* 
     * add an audio output stream
     */
    AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
    {
        AVCodecContext *c;
        AVStream *st;
    
        st = av_new_stream(oc, 1);
        if (!st) {
            fprintf(stderr, "Could not alloc stream\n");
            exit(1);
        }
    
        c = &st->codec;
    
        c->codec_type = CODEC_TYPE_AUDIO;
    
        /* put sample parameters */
        c->bit_rate = 64000;
        c->sample_rate = 44100;
        c->channels = 2;
    
        return st;
    }
    
    void open_audio(AVFormatContext *oc, AVStream *st)
    {
        AVCodecContext *c;
        AVCodec *codec;
    
        c = &st->codec;
    
        /* find the audio encoder */
        codec = avcodec_find_encoder(c->codec_id);
        if (!codec) {
            fprintf(stderr, "codec not found\n");
            exit(1);
        }
    
    
        /* open it */
        if (avcodec_open(c, codec) < 0) {
            fprintf(stderr, "could not open codec\n");
            exit(1);
        }
    
        /* init signal generator */
        t = 0;
    
        tincr = 2 * M_PI * 110.0 / c->sample_rate;
        /* increment frequency by 110 Hz per second */
        tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
    
    
        audio_outbuf_size = 10000;
        audio_outbuf = malloc(audio_outbuf_size);
    
        /* ugly hack for PCM codecs (will be removed ASAP with new PCM
           support to compute the input frame size in samples */
        if (c->frame_size <= 1) {
            audio_input_frame_size = audio_outbuf_size / c->channels;
            switch(st->codec.codec_id) {
            case CODEC_ID_PCM_S16LE:
            case CODEC_ID_PCM_S16BE:
            case CODEC_ID_PCM_U16LE:
            case CODEC_ID_PCM_U16BE:
                audio_input_frame_size >>= 1;
                break;
            default:
                break;
            }
        } else {
            audio_input_frame_size = c->frame_size;
        }
        samples = malloc(audio_input_frame_size * 2 * c->channels);
    }
    
    
    Fabrice Bellard's avatar
    Fabrice Bellard committed
    /* prepare a 16 bit dummy audio frame of 'frame_size' samples and
       'nb_channels' channels */
    void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
    {
        int j, i, v;
        int16_t *q;
    
        q = samples;
        for(j=0;j<frame_size;j++) {
            v = (int)(sin(t) * 10000);
            for(i = 0; i < nb_channels; i++)
                *q++ = v;
            t += tincr;
            tincr += tincr2;
        }
    }
    
    
    void write_audio_frame(AVFormatContext *oc, AVStream *st)
    {
    
    Fabrice Bellard's avatar
    Fabrice Bellard committed
        int out_size;
    
    Fabrice Bellard's avatar
    Fabrice Bellard committed
        get_audio_frame(samples, audio_input_frame_size, c->channels);
    
    
        out_size = avcodec_encode_audio(c, audio_outbuf, audio_outbuf_size, samples);
    
        /* write the compressed frame in the media file */
        if (av_write_frame(oc, st->index, audio_outbuf, out_size) != 0) {
            fprintf(stderr, "Error while writing audio frame\n");
            exit(1);
        }
    }
    
    
    void close_audio(AVFormatContext *oc, AVStream *st)
    {
        avcodec_close(&st->codec);
        
        av_free(samples);
        av_free(audio_outbuf);
    }
    
    
    /**************************************************************/
    /* video output */
    
    
    uint8_t *video_outbuf;
    int frame_count, video_outbuf_size;
    
    /* add a video output stream */
    AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
    {
        AVCodecContext *c;
        AVStream *st;
    
        st = av_new_stream(oc, 0);
        if (!st) {
            fprintf(stderr, "Could not alloc stream\n");
            exit(1);
        }
        
        c = &st->codec;
    
        c->codec_type = CODEC_TYPE_VIDEO;
    
        /* put sample parameters */
        c->bit_rate = 400000;
        /* resolution must be a multiple of two */
        c->width = 352;  
        c->height = 288;
        /* frames per second */
        c->frame_rate = 25;  
        c->frame_rate_base= 1;
        c->gop_size = 12; /* emit one intra frame every twelve frames */
    
    
        return st;
    }
    
    AVFrame *alloc_picture(int pix_fmt, int width, int height)
    {
        AVFrame *picture;
        uint8_t *picture_buf;
        int size;
        
        picture = avcodec_alloc_frame();
        if (!picture)
            return NULL;
        size = avpicture_get_size(pix_fmt, width, height);
        picture_buf = malloc(size);
        if (!picture_buf) {
            av_free(picture);
            return NULL;
        }
        avpicture_fill((AVPicture *)picture, picture_buf, 
                       pix_fmt, width, height);
        return picture;
    }
        
    void open_video(AVFormatContext *oc, AVStream *st)
    {
        AVCodec *codec;
        AVCodecContext *c;
    
        c = &st->codec;
    
        /* find the video encoder */
        codec = avcodec_find_encoder(c->codec_id);
        if (!codec) {
            fprintf(stderr, "codec not found\n");
            exit(1);
        }
    
        /* open the codec */
    
        if (avcodec_open(c, codec) < 0) {
            fprintf(stderr, "could not open codec\n");
            exit(1);
        }
    
    
        video_outbuf = NULL;
        if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
            /* allocate output buffer */
            /* XXX: API change will be done */
            video_outbuf_size = 200000;
            video_outbuf = malloc(video_outbuf_size);
        }
    
        /* allocate the encoded raw picture */
        picture = alloc_picture(c->pix_fmt, c->width, c->height);
        if (!picture) {
            fprintf(stderr, "Could not allocate picture\n");
            exit(1);
        }
    
        /* if the output format is not YUV420P, then a temporary YUV420P
           picture is needed too. It is then converted to the required
           output format */
        tmp_picture = NULL;
        if (c->pix_fmt != PIX_FMT_YUV420P) {
            tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);
            if (!tmp_picture) {
                fprintf(stderr, "Could not allocate temporary picture\n");
                exit(1);
            }
        }
    }
    
    /* prepare a dummy image */
    void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
    
        for(y=0;y<height;y++) {
            for(x=0;x<width;x++) {
                pict->data[0][y * pict->linesize[0] + x] = x + y + i * 3;
    
        for(y=0;y<height/2;y++) {
            for(x=0;x<width/2;x++) {
                pict->data[1][y * pict->linesize[1] + x] = 128 + y + i * 2;
                pict->data[2][y * pict->linesize[2] + x] = 64 + x + i * 5;
    
    void write_video_frame(AVFormatContext *oc, AVStream *st)
    {
        int out_size, ret;
        AVCodecContext *c;
    
        c = &st->codec;
        
        if (c->pix_fmt != PIX_FMT_YUV420P) {
            /* as we only generate a YUV420P picture, we must convert it
               to the codec pixel format if needed */
            fill_yuv_image(tmp_picture, frame_count, c->width, c->height);
            img_convert((AVPicture *)picture, c->pix_fmt, 
                        (AVPicture *)tmp_picture, PIX_FMT_YUV420P,
                        c->width, c->height);
        } else {
            fill_yuv_image(picture, frame_count, c->width, c->height);
        }
    
        
        if (oc->oformat->flags & AVFMT_RAWPICTURE) {
            /* raw video case. The API will change slightly in the near
               futur for that */
            ret = av_write_frame(oc, st->index, 
                           (uint8_t *)picture, sizeof(AVPicture));
        } else {
            /* encode the image */
            out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
            
            /* write the compressed frame in the media file */
            ret = av_write_frame(oc, st->index, video_outbuf, out_size);
        }
        if (ret != 0) {
    
            fprintf(stderr, "Error while writing video frame\n");
            exit(1);
        }
    
        frame_count++;
    }
    
    void close_video(AVFormatContext *oc, AVStream *st)
    {
        avcodec_close(&st->codec);
        av_free(picture->data[0]);
        av_free(picture);
        if (tmp_picture) {
            av_free(tmp_picture->data[0]);
            av_free(tmp_picture);
        }
        av_free(video_outbuf);
    
    }
    
    /**************************************************************/
    /* media file output */
    
    int main(int argc, char **argv)
    {
        const char *filename;
        AVOutputFormat *fmt;
        AVFormatContext *oc;
    
        double audio_pts, video_pts;
    
        /* initialize libavcodec, and register all codecs and formats */
        av_register_all();
        
        if (argc != 2) {
            printf("usage: %s output_file\n"
    
                   "API example program to output a media file with libavformat.\n"
                   "The output format is automatically guessed according to the file extension.\n"
                   "Raw images can also be output by using '%%d' in the filename\n"
    
                   "\n", argv[0]);
            exit(1);
        }
        
        filename = argv[1];
    
        /* auto detect the output format from the name. default is
           mpeg. */
        fmt = guess_format(NULL, filename, NULL);
        if (!fmt) {
            printf("Could not deduce output format from file extension: using MPEG.\n");
            fmt = guess_format("mpeg", NULL, NULL);
        }
        if (!fmt) {
            fprintf(stderr, "Could not find suitable output format\n");
            exit(1);
        }
        
        /* allocate the output media context */
        oc = av_mallocz(sizeof(AVFormatContext));
        if (!oc) {
            fprintf(stderr, "Memory error\n");
            exit(1);
        }
        oc->oformat = fmt;
    
        snprintf(oc->filename, sizeof(oc->filename), "%s", filename);
    
    
        /* add the audio and video streams using the default format codecs
           and initialize the codecs */
        video_st = NULL;
        audio_st = NULL;
        if (fmt->video_codec != CODEC_ID_NONE) {
            video_st = add_video_stream(oc, fmt->video_codec);
        }
        if (fmt->audio_codec != CODEC_ID_NONE) {
            audio_st = add_audio_stream(oc, fmt->audio_codec);
        }
    
    
        /* set the output parameters (must be done even if no
           parameters). */
        if (av_set_parameters(oc, NULL) < 0) {
            fprintf(stderr, "Invalid output format parameters\n");
            exit(1);
        }
    
    
        dump_format(oc, 0, filename, 1);
    
    
        /* now that all the parameters are set, we can open the audio and
           video codecs and allocate the necessary encode buffers */
        if (video_st)
            open_video(oc, video_st);
        if (audio_st)
            open_audio(oc, audio_st);
    
    
        /* open the output file, if needed */
        if (!(fmt->flags & AVFMT_NOFILE)) {
            if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) {
                fprintf(stderr, "Could not open '%s'\n", filename);
                exit(1);
            }
        }
        
        /* write the stream header, if any */
        av_write_header(oc);
        
        for(;;) {
            /* compute current audio and video time */
            if (audio_st)
                audio_pts = (double)audio_st->pts.val * oc->pts_num / oc->pts_den;
            else
                audio_pts = 0.0;
            
            if (video_st)
                video_pts = (double)video_st->pts.val * oc->pts_num / oc->pts_den;
            else
                video_pts = 0.0;
    
            if ((!audio_st || audio_pts >= STREAM_DURATION) && 
                (!video_st || video_pts >= STREAM_DURATION))
                break;
            
            /* write interleaved audio and video frames */
    
            if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
    
                write_audio_frame(oc, audio_st);
            } else {
                write_video_frame(oc, video_st);
            }
        }
    
        /* close each codec */
    
        if (video_st)
            close_video(oc, video_st);
        if (audio_st)
            close_audio(oc, audio_st);
    
    
        /* write the trailer, if any */
        av_write_trailer(oc);
        
    
        /* free the streams */
        for(i = 0; i < oc->nb_streams; i++) {
            av_freep(&oc->streams[i]);
        }
    
    
        if (!(fmt->flags & AVFMT_NOFILE)) {
            /* close the output file */
            url_fclose(&oc->pb);
        }
    
        /* free the stream */
        av_free(oc);
    
        return 0;
    }