首页 > 解决方案 > 如何使用 ffmpeg 将视频帧重新编码到另一个编解码器?

问题描述

我正在尝试学习 ffmpeg,所以我开始了一个小项目,我将 MP4 视频流发送到我的 C# 应用程序,我想将视频重新编码为 webM 并将其发送到 icecast 服务器。

我的 icecast 服务器正在接收视频,但我无法重现它(每次按播放时视频时间都会更新,但视频不播放,我只看到一个黑框)

任何人都可以帮助我吗?我不知道我的代码有什么问题。

我的代码执行流程是 openInput->openOutput->streamingTest

   private void openInput()
    {
        _pInputFormatContext = ffmpeg.avformat_alloc_context();

        var pFormatContext = _pInputFormatContext;
        ffmpeg.avformat_open_input(&pFormatContext, configuration.Source, null, null).ThrowExceptionIfError();

        ffmpeg.avformat_find_stream_info(_pInputFormatContext, null).ThrowExceptionIfError();

        // find the first video stream
        for (var i = 0; i < _pInputFormatContext->nb_streams; i++)
            if (_pInputFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            {
                pInputStream = _pInputFormatContext->streams[i];
                break;
            }

        if (pInputStream == null) throw new InvalidOperationException("Could not found video stream.");

        _inputStreamIndex = pInputStream->index;
        _pInputCodecContext = pInputStream->codec;

        var codecId = _pInputCodecContext->codec_id;
        var pCodec = ffmpeg.avcodec_find_decoder(codecId);
        if (pCodec == null) throw new InvalidOperationException("Unsupported codec.");

        ffmpeg.avcodec_open2(_pInputCodecContext, pCodec, null).ThrowExceptionIfError();

        configuration.CodecName = ffmpeg.avcodec_get_name(codecId);
        configuration.FrameSize = new Size(_pInputCodecContext->width, _pInputCodecContext->height);
        configuration.PixelFormat = _pInputCodecContext->pix_fmt;

        _pPacket = ffmpeg.av_packet_alloc();
        _pFrame = ffmpeg.av_frame_alloc();
    }


    private bool openOutput()
    {

        int ret;
        _pOutputFormatContext = ffmpeg.avformat_alloc_context();
        fixed (AVFormatContext** ppOutputFormatContext = &_pOutputFormatContext)
        {
            ret = ffmpeg.avformat_alloc_output_context2(ppOutputFormatContext, null, "webm", configuration.Destination);
            if (ret < 0)
            {
                return false;
            }
        }

        AVOutputFormat* out_format = ffmpeg.av_guess_format(null, configuration.Destination, null);

        // Configure output video stream

        _pOutputStream = ffmpeg.avformat_new_stream(_pOutputFormatContext, null);

        AVStream* pInputVideoStream = null;

        for (var i = 0; i < _pInputFormatContext->nb_streams; i++)
        {
            if (_pInputFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            {
                pInputVideoStream = _pInputFormatContext->streams[i];
            }
        }

        ffmpeg.avcodec_parameters_copy(_pOutputStream->codecpar, pInputVideoStream->codecpar);

        _pOutputStream->codecpar->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO;
        _pOutputStream->codecpar->codec_id = AVCodecID.AV_CODEC_ID_VP8;

        AVDictionary* opt_dict;

        ffmpeg.av_dict_set(&opt_dict, "content_type", "video/webm", 0);
        ffmpeg.av_dict_set(&opt_dict, "user_agent", "GCS", 0);

        fixed (AVFormatContext** ppOutputFormatContext = &_pOutputFormatContext)
        {
            ret = ffmpeg.avio_open2(&_pOutputFormatContext->pb, configuration.Destination, ffmpeg.AVIO_FLAG_WRITE, null, &opt_dict);
            if (ret < 0)
            {
                return false;
            }
        }

        ret = ffmpeg.avformat_write_header(_pOutputFormatContext, null);

        if (ret < 0)
        {
            return false;
        }

        ffmpeg.av_dump_format(_pOutputFormatContext, 0, configuration.Destination, 1);

        return true;
    }


    private unsafe void streamingTest(object gggg)
    {
        isStreamUp = true;

        AVPacket frame = new AVPacket();
        AVPacket* pFrame = &frame;
        ffmpeg.av_init_packet(pFrame);
        updateState(VideoStreamStates.Streaming);

        try
        {
            long start_time = ffmpeg.av_gettime();
            DateTime lastFrame = DateTime.MinValue;
            while (isStreamUp)
            {
                if (cancelationToken.IsCancellationRequested)
                {
                    throw new TaskCanceledException();
                }

                try
                {
                    int error;
                    isReadingFrame = true;
                    do
                    {
                        error = ffmpeg.av_read_frame(_pInputFormatContext, pFrame);
                        if (error == ffmpeg.AVERROR_EOF)
                        {
                            frame = *pFrame;
                            continue;
                        }

                        error.ThrowExceptionIfError();
                    } while (frame.stream_index != _inputStreamIndex);

                    isWritingFrame = true;
                    //frame.stream_index = _outputStreamIndex;
                    _pOutputCodecContext = ffmpeg.avcodec_alloc_context3(_pOutputFormatContext->video_codec);

                    int ret = 0;
                    while (ret >= 0)
                    {
                        ret = ffmpeg.avcodec_receive_packet(_pOutputCodecContext, pFrame);
                    }

                    //ffmpeg.avcodec_send_frame(_pOutputCodecContext, pFrame);
                    //ffmpeg.avcodec_send_packet(_pOutputCodecContext, pFrame);

                    ret = ffmpeg.av_write_frame(_pOutputFormatContext, pFrame);

                    isWritingFrame = false;

                    if (frame.stream_index == _inputStreamIndex)
                    {
                        if (ret < 0)
                        {
                            Console.WriteLine("Missed frame");
                            missedFrames++;
                        }
                        else
                        {
                            Console.WriteLine("Sent frame");
                            sentFrames++;
                        }

                        AVRational time_base = _pInputFormatContext->streams[_inputStreamIndex]->time_base;
                        AVRational time_base_q = new AVRational();
                        time_base_q.num = 1;
                        time_base_q.den = ffmpeg.AV_TIME_BASE;

                        long pts_time = ffmpeg.av_rescale_q(frame.dts, time_base, time_base_q);
                        //long pts_time = ffmpeg.av_rescale_q(frame.dts, time_base_q, time_base);
                        long now_time = ffmpeg.av_gettime() - start_time;
                        if (pts_time > now_time)
                            ffmpeg.av_usleep((uint)(pts_time - now_time));
                    }
                    else
                        Console.WriteLine("????");
                }
                catch (Exception ex)
                {
                    Console.WriteLine("Erro ao enviar: " + ex.Message);
                }
                finally
                {
                    ffmpeg.av_packet_unref(pFrame);
                }
            }
        }
        catch (TaskCanceledException)
        {
            updateState(VideoStreamStates.Stopped);
        }
        catch (Exception e)
        {
            Console.WriteLine(e.Message.ToString());
        }
    }

标签: c#videoffmpegencode

解决方案


推荐阅读