소스 검색

新增解码前的数据回调功能,用于某些需要获取视频数据的场景

huihui 4 달 전
부모
커밋
4453b3fddc

+ 1 - 0
.gitignore

@@ -14,3 +14,4 @@ bin/win64/Debug/VideoPlayer.ilk
 bin/win64/Debug/VideoPlayer.pdb
 bin/win64/Release/VideoPlayer.exe
 .vscode/settings.json
+bin/linux/VideoPlayer

+ 5 - 2
CMakeLists.txt

@@ -19,7 +19,7 @@ set(CMAKE_AUTOUIC ON)
 #CMAKE_RUNTIME_OUTPUT_DIRECTORY:存放可执行软件的目录;
 #set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/archive)
 #set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/library)
-set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/bin/win64)
+# set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/bin/win64)
 #set(LIBRARY_OUTPUT_PATH ${CMAKE_SOURCE_DIR}/path)
 
 if(UNIX)
@@ -29,6 +29,8 @@ if(UNIX)
     set(CMAKE_PREFIX_PATH "/opt/Qt/Qt5.13.2")
     set(QT_QMAKE_EXECUTABLE "/opt/Qt/Qt5.13.2/bin/qmake")
 
+    set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/bin/linux)
+
 elseif(WIN32)
 
     message("current platform: Windows ")
@@ -40,11 +42,12 @@ elseif(WIN32)
     
     if(CMAKE_CL_64)    #CMAKE的内建变量,如果是true,就说明编译器的64位的,自然可以编译64bit的程序
         set(QT_DIR_NAME msvc2017_64)
-
+        set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/bin/win64)
         # set(CMAKE_PREFIX_PATH ${QT_DIR_ROOT}/msvc2017_64)
         # list(APPEND CMAKE_PREFIX_PATH ${QT_DIR_ROOT}/msvc2017_64)
     else()
         set(QT_DIR_NAME msvc2017)
+        set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/bin/win32)
     endif()
 
     foreach(item ${QT_DIR_ROOT})

+ 1 - 0
module/VideoPlayer/CMakeLists.txt

@@ -20,6 +20,7 @@ target_sources(${PROJECT_NAME} PRIVATE ${ALL_SOURCE})
 set(VIDEOPLAYER_INCLUDE 
     ${CMAKE_CURRENT_SOURCE_DIR} 
     ${CMAKE_CURRENT_SOURCE_DIR}/src
+    ${CMAKE_CURRENT_SOURCE_DIR}/src/frame
     ${LIB_INCLUDE}
     CACHE INTERNAL "Test Str")
 

+ 40 - 28
module/VideoPlayer/src/VideoPlayer/VideoPlayer.cpp

@@ -166,6 +166,12 @@ void VideoPlayer::seek(int64_t pos)
     }
 }
 
+void VideoPlayer::setAbility(bool video_decode, bool encoded_video_callback)
+{
+    m_enable_video_decode = video_decode;
+    m_enable_encoded_video_callback = encoded_video_callback;
+}
+
 void VideoPlayer::setMute(bool isMute)
 {
     mIsMute = isMute;
@@ -178,8 +184,14 @@ void VideoPlayer::setVolume(float value)
     m_pcm_player->setVolume(value);
 }
 
-double VideoPlayer::getCurrentTime()
+uint64_t VideoPlayer::getCurrentTime()
+{
+    return getAudioClock();
+}
+
+uint64_t VideoPlayer::getAudioClock()
 {
+    // std::lock_guard<std::mutex> lck(m_mutex_audio_clk);
     return audio_clock;
 }
 
@@ -334,11 +346,11 @@ void VideoPlayer::run()
 
     videoStream = -1;
     audioStream = -1;
-
+printf("%s:%d pFormatCtx->nb_streams=%d \n", __FILE__, __LINE__, pFormatCtx->nb_streams);
     ///循环查找视频中包含的流信息,
     for (int i = 0; i < pFormatCtx->nb_streams; i++)
     {
-        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
+        if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO && videoStream < 0)
         {
             videoStream = i;
         }
@@ -355,33 +367,35 @@ void VideoPlayer::run()
     {
         AVStream *video_stream = pFormatCtx->streams[videoStream];
 
-        ///查找视频解码器
-        pCodec = (AVCodec*)avcodec_find_decoder(video_stream->codecpar->codec_id);
-        pCodecCtx = avcodec_alloc_context3(pCodec);
-        pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+        if (m_enable_video_decode)
+        {
+            ///查找视频解码器
+            pCodec = (AVCodec*)avcodec_find_decoder(video_stream->codecpar->codec_id);
+            pCodecCtx = avcodec_alloc_context3(pCodec);
+            pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
 
-        avcodec_parameters_to_context(pCodecCtx, video_stream->codecpar);
+            avcodec_parameters_to_context(pCodecCtx, video_stream->codecpar);
 
-        if (pCodec == nullptr)
-        {
-            fprintf(stderr, "PCodec not found.\n");
-            doOpenVideoFileFailed();
-            goto end;
-        }
+            if (pCodec == nullptr)
+            {
+                fprintf(stderr, "PCodec not found.\n");
+                doOpenVideoFileFailed();
+                goto end;
+            }
 
-        ///打开视频解码器
-        if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
-        {
-            fprintf(stderr, "Could not open video codec.\n");
-            doOpenVideoFileFailed();
-            goto end;
+            ///打开视频解码器
+            if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
+            {
+                fprintf(stderr, "Could not open video codec.\n");
+                doOpenVideoFileFailed();
+                goto end;
+            }
         }
 
         mVideoStream = video_stream;
 
         ///启动视频解码线程
         m_thread_video->start();
-
     }
 
     if (audioStream >= 0)
@@ -607,7 +621,7 @@ std::cout<<" video:"<<pFormatCtx->streams[videoStream]->duration<<" "<<pFormatCt
                 mPauseStartTime = av_gettime();
             }
             seek_req = 0;
-            seek_time = seek_pos / 1000000.0;
+            seek_time = seek_pos / 1000;
             seek_flag_audio = 1;
             seek_flag_video = 1;
 
@@ -654,7 +668,7 @@ std::cout<<" video:"<<pFormatCtx->streams[videoStream]->duration<<" "<<pFormatCt
             continue;
         }
 // qDebug("%s mIsQuit=%d mIsPause=%d packet.stream_index=%d \n", __FUNCTION__, mIsQuit, mIsPause, packet.stream_index);
-// fprintf(stderr, "%s mIsQuit=%d mIsPause=%d packet.stream_index=%d \n", __FUNCTION__, mIsQuit, mIsPause, packet.stream_index);
+// fprintf(stderr, "%s mIsQuit=%d mIsPause=%d packet.stream_index=%d videoStream=%d audioStream=%d \n", __FUNCTION__, mIsQuit, mIsPause, packet.stream_index, videoStream, audioStream);
         if (packet.stream_index == videoStream)
         {
             inputVideoQuene(packet);
@@ -863,12 +877,10 @@ void VideoPlayer::doDisplayVideo(const uint8_t *yuv420Buffer, const int &width,
 //    fprintf(stderr, "%s width=%d height=%d \n", __FUNCTION__, width, height);
     if (m_event_handle != nullptr)
     {
-        VideoFramePtr videoFrame = std::make_shared<VideoFrame>();
-
-        VideoFrame * ptr = videoFrame.get();
+        VideoRawFramePtr videoFrame = std::make_shared<VideoRawFrame>();
 
-        ptr->initBuffer(width, height);
-        ptr->setYUVbuf(yuv420Buffer);
+        videoFrame->initBuffer(width, height, VideoRawFrame::FRAME_TYPE_YUV420P);
+        videoFrame->setFramebuf(yuv420Buffer);
 
         m_event_handle->onDisplayVideo(videoFrame);
     }

+ 24 - 6
module/VideoPlayer/src/VideoPlayer/VideoPlayer.h

@@ -82,7 +82,10 @@ public:
         virtual void onPlayerStateChanged(const VideoPlayer::State &state, const bool &hasVideo, const bool &hasAudio) = 0;
 
         ///播放视频,此函数不宜做耗时操作,否则会影响播放的流畅性。
-        virtual void onDisplayVideo(VideoFramePtr videoFrame) = 0;
+        virtual void onDisplayVideo(VideoRawFramePtr videoFrame) = 0;
+
+        virtual void onVideoBuffer(VideoEncodedFramePtr videoFrame){};
+        // virtual void onAudioBuffer(VideoRawFramePtr videoFrame) = 0;
     };
 
 public:
@@ -108,12 +111,23 @@ public:
 
     void seek(int64_t pos); //单位是微秒
 
+    /**
+     * 设置能力函数
+     * 
+     * @param video_decode 是否支持视频解码
+     * @param encoded_video_callback 是否支持编码后的视频回调
+     * 
+     * 此函数用于配置对象的视频处理能力,包括是否支持视频解码和是否支持编码后视频的回调
+     * 通过设置这些参数,可以控制对象在视频处理过程中的行为和功能
+     */
+    void setAbility(bool video_decode, bool encoded_video_callback);
+
     void setMute(bool isMute);
     void setVolume(float value);
     float getVolume(){return mVolume;}
 
     int64_t getTotalTime(); //单位微秒
-    double getCurrentTime(); //单位秒
+    uint64_t getCurrentTime(); //单位秒
 
     ///用于判断是否打开超时或读取超时
     bool mIsOpenStream; //是否正在打开流(用于回调函数中判断是打开流还是读取流)
@@ -143,7 +157,7 @@ private:
     int64_t         seek_pos; //跳转的位置 -- 微秒
     int             seek_flag_audio;//跳转标志 -- 用于音频线程中
     int             seek_flag_video;//跳转标志 -- 用于视频线程中
-    double          seek_time; //跳转的时间(秒)  值和seek_pos是一样的
+    int64_t        seek_time; //跳转的时间(毫秒秒)  值和seek_pos是一样的
 
     ///播放控制相关
     bool mIsNeedPause; //暂停后跳转先标记此变量
@@ -158,10 +172,12 @@ private:
     ///音视频同步相关
     uint64_t mVideoStartTime; //开始播放视频的时间
     uint64_t mPauseStartTime; //暂停开始的时间
-    double audio_clock; ///音频时钟(秒-小数)
-    double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
+    int64_t audio_clock; ///音频时钟(秒-小数)
+    int64_t video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
     AVStream *mVideoStream; //视频流
     AVStream *mAudioStream; //音频流
+    // std::mutex m_mutex_audio_clk;
+    uint64_t getAudioClock();
 
     ///视频相关
     AVFormatContext *pFormatCtx;
@@ -214,6 +230,8 @@ private:
     std::list<AVPacket> m_video_pkt_list;
     bool inputVideoQuene(const AVPacket &pkt);
     void clearVideoQuene();
+    bool m_enable_video_decode = true;
+    bool m_enable_encoded_video_callback = false; //是否回调解码之前的数据
 
     ///音频帧队列
     Thread *m_thread_audio = nullptr;
@@ -222,7 +240,7 @@ private:
     std::list<AVPacket> m_audio_pkt_list;
     bool inputAudioQuene(const AVPacket &pkt);
     void clearAudioQuene();
-    
+    // bool m_enable_audio_decode = false;
 
 //    ///本播放器中SDL仅用于播放音频,不用做别的用途
 //    ///SDL播放音频相关

+ 19 - 12
module/VideoPlayer/src/VideoPlayer/VideoPlayer_AudioThread.cpp

@@ -8,6 +8,7 @@
 
 #include "PcmPlayer/PcmVolumeControl.h"
 
+#include <iostream>
 #include <stdio.h>
 
 void VideoPlayer::decodeAudioThread()
@@ -15,7 +16,7 @@ void VideoPlayer::decodeAudioThread()
     fprintf(stderr, "%s start \n", __FUNCTION__);
     mIsAudioThreadFinished = false;
     
-    float pts_s = 0.0f; //时间戳(秒)
+    int64_t pts_ms = 0; //时间戳(毫秒)
 
     while (1)
     {
@@ -55,7 +56,7 @@ void VideoPlayer::decodeAudioThread()
         /* if update, update the audio clock w/pts */
         if (pkt->pts != AV_NOPTS_VALUE)
         {
-            pts_s = av_q2d(mAudioStream->time_base) * pkt->pts;
+            pts_ms = av_q2d(mAudioStream->time_base) * pkt->pts * 1000;
         }
 
         //收到这个数据 说明刚刚执行过跳转 现在需要把解码器的数据 清除一下
@@ -69,7 +70,7 @@ void VideoPlayer::decodeAudioThread()
         if (seek_flag_audio)
         {
             //发生了跳转 则跳过关键帧到目的时间的这几帧
-           if (pts_s < seek_time)
+           if (pts_ms < seek_time)
            {
                continue;
            }
@@ -109,11 +110,11 @@ void VideoPlayer::decodeAudioThread()
                     break;
                 }
 
-                /* if update, update the audio clock w/pts */
-                if (packet.pts != AV_NOPTS_VALUE)
-                {
-                    audio_clock = 1000 * av_q2d(mAudioStream->time_base) * packet.pts;
-                }
+                // /* if update, update the audio clock w/pts */
+                // if (packet.pts != AV_NOPTS_VALUE)
+                // {
+                //     audio_clock = (uint64_t)(av_q2d(mAudioStream->time_base) * packet.pts * 1000);
+                // }
 
                 int out_sample_rate = m_out_sample_rate;
 
@@ -124,7 +125,7 @@ void VideoPlayer::decodeAudioThread()
                 /// 因此就需要重新计算采样点个数(使用下面的函数)
                 /// 将in_sample_rate的采样次数换算成out_sample_rate对应的采样次数
                 int nb_samples = av_rescale_rnd(swr_get_delay(swrCtx, out_sample_rate) + aFrame->nb_samples, out_sample_rate, m_in_sample_rate, AV_ROUND_UP);
-    //qDebug()<<swr_get_delay(swrCtx, out_sample_rate) + aFrame->nb_samples<<aFrame->nb_samples<<nb_samples;
+// std::cout<<swr_get_delay(swrCtx, out_sample_rate) + aFrame->nb_samples<<aFrame->nb_samples<<nb_samples<<std::endl;
     //            int nb_samples = av_rescale_rnd(aFrame->nb_samples, out_sample_rate, m_in_sample_rate, AV_ROUND_INF);
                 if (aFrame_ReSample != nullptr)
                 {
@@ -185,11 +186,17 @@ void VideoPlayer::decodeAudioThread()
 
                     PCMFramePtr pcmFramePtr = std::make_shared<PCMFrame>();
                     pcmFramePtr->setFrameBuffer(audio_buf, resampled_data_size);
-                    pcmFramePtr->setFrameInfo(m_out_sample_rate, audio_tgt_channels, pts_s*1000);
+                    pcmFramePtr->setFrameInfo(m_out_sample_rate, audio_tgt_channels, pts_ms);
 
                     int audio_frame_size = m_pcm_player->inputPCMFrame(pcmFramePtr);
-//qDebug()<<resampled_data_size<<audio_frame_size;
-                    audio_clock = m_pcm_player->getCurrentPts() / 1000.0;
+
+                    if (audio_frame_size > 0) //大于0 表示音频播放设备处于打开状态
+                    {
+                        // std::lock_guard<std::mutex> lck(m_mutex_audio_clk);
+                        audio_clock = m_pcm_player->getCurrentPts();
+                    }
+                    
+// std::cout<<resampled_data_size<<" "<<audio_frame_size<<" "<<audio_clock<<" "<<pts_ms<<std::endl;
                 }
 
             }

+ 106 - 60
module/VideoPlayer/src/VideoPlayer/VideoPlayer_VideoThread.cpp

@@ -6,6 +6,8 @@
 
 #include "VideoPlayer/VideoPlayer.h"
 
+#include <iostream>
+
 //static const struct TextureFormatEntry {
 //    enum AVPixelFormat format;
 //    int texture_fmt;
@@ -222,8 +224,8 @@ void VideoPlayer::decodeVideoThread()
     int videoWidth  = 0;
     int videoHeight =  0;
 
-    double video_pts = 0; //当前视频的pts
-    double audio_pts = 0; //音频pts
+    int64_t video_pts = 0; //当前视频的pts
+    int64_t audio_pts = 0; //音频pts
 
     ///解码视频相关
     AVFrame *pFrame = nullptr;
@@ -248,6 +250,56 @@ void VideoPlayer::decodeVideoThread()
 
     bool is_key_frame_getted = false;
 
+    auto avSyncFunc = [&]
+    {
+        ///音视频同步,实现的原理就是,判断是否到显示此帧图像的时间了,没到则休眠5ms,然后继续判断
+        while(1)
+        {
+            if (mIsQuit)
+            {
+                break;
+            }
+
+            if (mAudioStream != NULL && !mIsAudioThreadFinished && !m_pcm_player->deviceOpenFailed())
+            {
+                if (mIsReadFinished && m_audio_pkt_list.size() <= 0 && m_pcm_player->getPcmFrameSize() <= 0)
+                {//读取完了 且音频数据也播放完了 就剩下视频数据了  直接显示出来了 不用同步了
+                    break;
+                }
+
+                ///有音频的情况下,将视频同步到音频
+                ///跟音频的pts做对比,比视频快则做延时
+                // std::lock_guard<std::mutex> lck(m_mutex_audio_clk);
+                audio_pts = audio_clock;
+            }
+            else
+            {
+                ///没有音频或者音频设备打开失败的情况下,直接同步到外部时钟
+                audio_pts = (av_gettime() - mVideoStartTime) / 1000; //毫秒
+                // std::lock_guard<std::mutex> lck(m_mutex_audio_clk);
+                audio_clock = audio_pts;
+            }
+
+// printf("%s %lld %lld \n", __FUNCTION__, video_pts, audio_pts);
+// std::cout<<video_pts<<" "<<audio_pts <<" "<<video_clock<<std::endl;
+            //主要是 跳转的时候 我们把video_clock设置成0了
+            //因此这里需要更新video_pts
+            //否则当从后面跳转到前面的时候 会卡在这里
+            video_pts = video_clock;
+
+            if (video_pts <= audio_pts) break;
+
+            int delayTime = (video_pts - audio_pts);
+
+            delayTime = delayTime > 5 ? 5:delayTime; //最长休眠5ms
+
+            if (!mIsNeedPause)
+            {
+                mSleep(delayTime);
+            }
+        }
+    };
+
     while(1)
     {
         if (mIsQuit)
@@ -277,43 +329,79 @@ void VideoPlayer::decodeVideoThread()
             continue;
         }
 
-        AVPacket packet = m_video_pkt_list.front();
+        AVPacket pkt = m_video_pkt_list.front();
         m_video_pkt_list.pop_front();
         lck.unlock();
 
-        AVPacket *pkt = &packet;
+        AVPacket *packet = &pkt;
+
+        if (!m_enable_video_decode)
+        {
+            if (packet->dts != AV_NOPTS_VALUE)
+            {
+                video_pts = packet->dts;
+            }
+            else
+            {
+                video_pts = 0;
+            }
+
+            video_pts *= (av_q2d(mVideoStream->time_base) * 1000); //毫秒
+            video_clock = video_pts;
+// printf("%s %lld %lld %lld %lld\n", __FUNCTION__, video_pts, video_clock, audio_pts, packet->dts);
+// printf("%s %d m_audio_pkt_list.size()=%d m_video_pkt_list.size()=%d \n", __FILE__, __LINE__, m_audio_pkt_list.size(), m_video_pkt_list.size());
+            avSyncFunc(); //音视频同步
+        }
+
+        if (m_event_handle && m_enable_encoded_video_callback)
+        {
+            //处理视频数据,直接回调未解码前的数据
+            int key_frame = (packet->flags & AV_PKT_FLAG_KEY);
+
+            VideoEncodedFramePtr videoFrame = std::make_shared<VideoEncodedFrame>();
+            videoFrame->setNalu(packet->data, packet->size, true, T_NALU_H265, video_clock);
+            videoFrame->setIsKeyFrame(key_frame);
+
+            m_event_handle->onVideoBuffer(videoFrame);
+        }
+
+        if (!m_enable_video_decode)
+        {
+            av_packet_unref(packet);
+            continue;
+        }
 
         //收到这个数据 说明刚刚执行过跳转 现在需要把解码器的数据 清除一下
-        if(strcmp((char*)pkt->data, FLUSH_DATA) == 0)
+        if(strcmp((char*)packet->data, FLUSH_DATA) == 0)
         {
             avcodec_flush_buffers(pCodecCtx);
-            av_packet_unref(pkt);
+            av_packet_unref(packet);
             continue;
         }
 
-        if (avcodec_send_packet(pCodecCtx, pkt) != 0)
+        if (avcodec_send_packet(pCodecCtx, packet) != 0)
         {
            printf("input AVPacket to decoder failed!\n");
-           av_packet_unref(pkt);
+           av_packet_unref(packet);
            continue;
         }
 
         while (0 == avcodec_receive_frame(pCodecCtx, pFrame))
         {
-            if (pkt->dts == AV_NOPTS_VALUE && pFrame->opaque&& *(uint64_t*) pFrame->opaque != AV_NOPTS_VALUE)
+            if (packet->dts == AV_NOPTS_VALUE && pFrame->opaque&& *(uint64_t*) pFrame->opaque != AV_NOPTS_VALUE)
             {
                 video_pts = *(uint64_t *) pFrame->opaque;
             }
-            else if (pkt->dts != AV_NOPTS_VALUE)
+            else if (packet->dts != AV_NOPTS_VALUE)
             {
-                video_pts = pkt->dts;
+                video_pts = packet->dts;
             }
             else
             {
                 video_pts = 0;
             }
 
-            video_pts *= av_q2d(mVideoStream->time_base);
+            video_pts *= (av_q2d(mVideoStream->time_base) * 1000); //毫秒
             video_clock = video_pts;
     //OUTPUT("%s %f \n", __FUNCTION__, video_pts);
             if (seek_flag_video)
@@ -321,7 +409,7 @@ void VideoPlayer::decodeVideoThread()
                 //发生了跳转 则跳过关键帧到目的时间的这几帧
                if (video_pts < seek_time)
                {
-                   av_packet_unref(pkt);
+                   av_packet_unref(packet);
                    continue;
                }
                else
@@ -330,49 +418,7 @@ void VideoPlayer::decodeVideoThread()
                }
             }
 
-            ///音视频同步,实现的原理就是,判断是否到显示此帧图像的时间了,没到则休眠5ms,然后继续判断
-            while(1)
-            {
-                if (mIsQuit)
-                {
-                    break;
-                }
-
-                if (mAudioStream != NULL && !mIsAudioThreadFinished && m_pcm_player->deviceOpened())
-                {
-                    if (mIsReadFinished && m_audio_pkt_list.size() <= 0 && m_pcm_player->getPcmFrameSize() <= 0)
-                    {//读取完了 且音频数据也播放完了 就剩下视频数据了  直接显示出来了 不用同步了
-                        break;
-                    }
-
-                    ///有音频的情况下,将视频同步到音频
-                    ///跟音频的pts做对比,比视频快则做延时
-                    audio_pts = audio_clock;
-                }
-                else
-                {
-                    ///没有音频或者音频设备打开失败的情况下,直接同步到外部时钟
-                    audio_pts = (av_gettime() - mVideoStartTime) / 1000000.0;
-                    audio_clock = audio_pts;
-                }
-
-// fprintf(stderr, "%s %f %f \n", __FUNCTION__, video_pts, audio_pts);
-                //主要是 跳转的时候 我们把video_clock设置成0了
-                //因此这里需要更新video_pts
-                //否则当从后面跳转到前面的时候 会卡在这里
-                video_pts = video_clock;
-
-                if (video_pts <= audio_pts) break;
-
-                int delayTime = (video_pts - audio_pts) * 1000;
-
-                delayTime = delayTime > 5 ? 5:delayTime; //最长休眠5ms
-
-                if (!mIsNeedPause)
-                {
-                    mSleep(delayTime);
-                }
-            }
+            avSyncFunc();
 
 #if CONFIG_AVFILTER
         if (   last_w != pFrame->width
@@ -485,13 +531,13 @@ void VideoPlayer::decodeVideoThread()
                     pFrame->linesize, 0, videoHeight, pFrameYUV->data,
                     pFrameYUV->linesize);
 
-// printf("(packet->flags & AV_PKT_FLAG_KEY) = %d\n", pkt->flags & AV_PKT_FLAG_KEY);
-            if (!is_key_frame_getted && (pkt->flags & AV_PKT_FLAG_KEY)) // is keyframe
+// printf("(packet->flags & AV_PKT_FLAG_KEY) = %d\n", packet->flags & AV_PKT_FLAG_KEY);
+            if (!is_key_frame_getted && (packet->flags & AV_PKT_FLAG_KEY)) // is keyframe
             {
                 is_key_frame_getted = true;
             }
 
-            if (is_key_frame_getted)
+            if (is_key_frame_getted) //只有获取到第一帧关键帧后才进行显示,rtsp流最开始的部分会花屏
             {
                 doDisplayVideo(yuv420pBuffer, videoWidth, videoHeight);
             }
@@ -508,7 +554,7 @@ void VideoPlayer::decodeVideoThread()
             }
 
         }
-        av_packet_unref(pkt);
+        av_packet_unref(packet);
     }
 
 #if CONFIG_AVFILTER

+ 137 - 0
module/VideoPlayer/src/frame/VideoFrame/VideoEncodedFrame.cpp

@@ -0,0 +1,137 @@
+#include "VideoEncodedFrame.h"
+
+VideoEncodedFrame::VideoEncodedFrame()
+{
+    mNalu = nullptr;
+    mPts = 0;
+    mIsKeyFrame = false;
+}
+
+VideoEncodedFrame::~VideoEncodedFrame()
+{
+    NALUParsing::FreeNALU(mNalu); //释放NALU内存
+    mNalu = nullptr;
+}
+
+void VideoEncodedFrame::setNalu(T_NALU *nalu, const int64_t &time)
+{
+    mNalu = nalu;
+    mPts  = time;
+}
+
+void VideoEncodedFrame::setNalu(uint8_t *buffer, const int &len, const bool &isAllocBuffer, const T_NALU_TYPE &type, const uint64_t &time)
+{
+    T_NALU *nalu = NALUParsing::AllocNALU(len, type, isAllocBuffer);
+
+    nalu->type = type;
+
+    if (type == T_NALU_H264)
+    {
+        T_H264_NALU_HEADER *nalu_header = (T_H264_NALU_HEADER *)(buffer);
+
+        nalu->nalu.h264Nalu.startcodeprefix_len = 4;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+//        nalu->nalu.h264Nalu.len = len;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
+        nalu->nalu.h264Nalu.forbidden_bit = 0;            //! should be always FALSE
+        nalu->nalu.h264Nalu.nal_reference_idc = nalu_header->NRI;        //! NALU_PRIORITY_xxxx
+        nalu->nalu.h264Nalu.nal_unit_type = nalu_header->TYPE;            //! NALU_TYPE_xxxx
+        nalu->nalu.h264Nalu.lost_packets = false;  //! true, if packet loss is detected
+
+        if (isAllocBuffer)
+        {
+            memcpy(nalu->nalu.h264Nalu.buf, buffer, len);  //! contains the first byte followed by the EBSP
+        }
+        else
+        {
+            nalu->nalu.h264Nalu.buf = buffer;
+        }
+        nalu->nalu.h264Nalu.max_size = len;
+        nalu->nalu.h264Nalu.len = len;
+
+//        {
+//            char *bufTmp = (char*)(Buf + StartCode);
+//            char s[10];
+//            itoa(bufTmp[0], s, 2);
+//            fprintf(stderr, "%s %08s %x %d\n", __FUNCTION__, s, bufTmp[0] , nalu_header->TYPE);
+//        }
+    }
+    else
+    {
+        T_H265_NALU_HEADER *nalu_header = (T_H265_NALU_HEADER *)buffer;
+
+        nalu->nalu.h265Nalu.startCodeLen = 4;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+        nalu->nalu.h265Nalu.h265NaluHeader = *nalu_header;
+
+        if (isAllocBuffer)
+        {
+            memcpy(nalu->nalu.h265Nalu.buf, buffer, len);  //! contains the first byte followed by the EBSP
+        }
+        else
+        {
+            nalu->nalu.h265Nalu.buf = buffer;
+        }
+
+//        nalu->nalu.h265Nalu.max_size = len;
+        nalu->nalu.h265Nalu.len = len;
+
+//        {
+//            char *bufTmp = (char*)(buffer);
+//            fprintf(stderr, "%s %02x%02x%02x%02x%02x%02x %d %d\n", __FUNCTION__, bufTmp[0], bufTmp[1], bufTmp[2], bufTmp[3], bufTmp[4], bufTmp[5], nalu->nalu.h265Nalu.h265NaluHeader.nal_unit_type, nalu_header->nal_unit_type);
+//        }
+    }
+
+    if (mNalu != nullptr)
+    {
+        NALUParsing::FreeNALU(mNalu); //释放NALU内存
+    }
+
+    mNalu = nalu;
+    mPts  = time;
+}
+
+uint8_t *VideoEncodedFrame::getBuffer()
+{
+    uint8_t *buffer = nullptr;
+
+    do{
+        if (!mNalu)
+        {
+            break;
+        }
+
+        if (mNalu->type == T_NALU_H264)
+        {
+            buffer = mNalu->nalu.h264Nalu.buf;
+        }
+        else if (mNalu->type == T_NALU_H265)
+        {
+            buffer = mNalu->nalu.h265Nalu.buf;
+        }
+
+    } while (0);
+
+    return buffer;
+}
+
+unsigned int VideoEncodedFrame::getSize()
+{
+    unsigned int buffer_size = 0;
+
+    do{
+        if (!mNalu)
+        {
+            break;
+        }
+
+        if (mNalu->type == T_NALU_H264)
+        {
+            buffer_size = mNalu->nalu.h264Nalu.len;
+        }
+        else if (mNalu->type == T_NALU_H265)
+        {
+            buffer_size = mNalu->nalu.h265Nalu.len;
+        }
+
+    } while (0);
+
+    return buffer_size;
+}

+ 49 - 0
module/VideoPlayer/src/frame/VideoFrame/VideoEncodedFrame.h

@@ -0,0 +1,49 @@
+#ifndef VIDEOENCODEDFRAME_H
+#define VIDEOENCODEDFRAME_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <memory>
+
+#include "nalu/nalu.h"
+
+#define VideoEncodedFramePtr std::shared_ptr<VideoEncodedFrame>
+
+class VideoEncodedFrame
+{
+public:
+    VideoEncodedFrame();
+    ~VideoEncodedFrame();
+
+    void setNalu(T_NALU *nalu, const int64_t &time = 0);
+    void setNalu(uint8_t *buffer, const int &len, const bool & isAllocBuffer, const T_NALU_TYPE &type, const uint64_t &time = 0);
+
+    void setIsKeyFrame(const bool &isKeyFrame){mIsKeyFrame = isKeyFrame;}
+
+    T_NALU *getNalu(){return mNalu;}
+    bool getIsKeyFrame(){return mIsKeyFrame;}
+    uint64_t getPts(){return mPts;}
+
+    void setTimeStamp(uint64_t t){m_timestamp_ms = t;}
+    uint64_t getTimeStamp(){return m_timestamp_ms;}
+
+    uint8_t *getBuffer();
+    unsigned int getSize();
+
+    void setId(uint64_t id){m_id = id;}
+    uint64_t getId(){return m_id;}
+
+private:
+    T_NALU *mNalu;
+
+    bool mIsKeyFrame;
+
+    uint64_t mPts;
+    uint64_t m_timestamp_ms = 0; //本地绝对时间(UTC时间戳-毫秒)
+
+    uint64_t m_id = 0;
+};
+
+#endif // VIDEOFRAME_H

+ 18 - 35
module/VideoPlayer/src/frame/VideoFrame/VideoFrame.cpp

@@ -2,54 +2,37 @@
 
 VideoFrame::VideoFrame()
 {
-    mYuv420Buffer = nullptr;
+
 }
 
 VideoFrame::~VideoFrame()
 {
-    if (mYuv420Buffer != nullptr)
-    {
-        free(mYuv420Buffer);
-        mYuv420Buffer = nullptr;
-    }
+
 }
 
-void VideoFrame::initBuffer(const int &width, const int &height)
+void VideoFrame::setFrame(std::shared_ptr<VideoEncodedFrame> frame)
 {
-    if (mYuv420Buffer != nullptr)
+    if (frame->getNalu()->type == T_NALU_H264)
     {
-        free(mYuv420Buffer);
-        mYuv420Buffer = nullptr;
+        m_type = VIDEOFRAME_TYPE_H264;
+    }
+    else if (frame->getNalu()->type == T_NALU_H265)
+    {
+        m_type = VIDEOFRAME_TYPE_H265;
     }
 
-    mWidth  = width;
-    mHegiht = height;
-
-    mYuv420Buffer = (uint8_t*)malloc(width * height * 3 / 2);
-
-}
-
-void VideoFrame::setYUVbuf(const uint8_t *buf)
-{
-    int Ysize = mWidth * mHegiht;
-    memcpy(mYuv420Buffer, buf, Ysize * 3 / 2);
-}
+    m_pts = frame->getPts();
 
-void VideoFrame::setYbuf(const uint8_t *buf)
-{
-    int Ysize = mWidth * mHegiht;
-    memcpy(mYuv420Buffer, buf, Ysize);
+    m_encoded_frame = frame;
 }
 
-void VideoFrame::setUbuf(const uint8_t *buf)
+void VideoFrame::setFrame(std::shared_ptr<VideoRawFrame> frame)
 {
-    int Ysize = mWidth * mHegiht;
-    memcpy(mYuv420Buffer + Ysize, buf, Ysize / 4);
-}
+    m_type = (FrameType)frame->type();
 
-void VideoFrame::setVbuf(const uint8_t *buf)
-{
-    int Ysize = mWidth * mHegiht;
-    memcpy(mYuv420Buffer + Ysize + Ysize / 4, buf, Ysize / 4);
-}
+    m_width  = frame->width();
+    m_hegiht = frame->height();
+    m_pts = frame->pts();
 
+    m_raw_frame = frame;
+}

+ 27 - 11
module/VideoPlayer/src/frame/VideoFrame/VideoFrame.h

@@ -6,30 +6,46 @@
 #include <string.h>
 #include <memory>
 
+#include "VideoRawFrame.h"
+#include "VideoEncodedFrame.h"
+
 #define VideoFramePtr std::shared_ptr<VideoFrame>
 
 class VideoFrame
 {
 public:
+    enum FrameType
+    {
+        VIDEOFRAME_TYPE_NONE = -1,
+        VIDEOFRAME_TYPE_YUV420P,   ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+        VIDEOFRAME_TYPE_RGB8,
+        VIDEOFRAME_TYPE_RGB24,     ///< packed RGB 8:8:8, 24bpp, RGBRGB...
+        VIDEOFRAME_TYPE_H264, 
+        VIDEOFRAME_TYPE_H265, 
+    };
     VideoFrame();
     ~VideoFrame();
 
-    void initBuffer(const int &width, const int &height);
+    void setFrame(std::shared_ptr<VideoEncodedFrame> frame);
+    void setFrame(std::shared_ptr<VideoRawFrame> frame);
 
-    void setYUVbuf(const uint8_t *buf);
-    void setYbuf(const uint8_t *buf);
-    void setUbuf(const uint8_t *buf);
-    void setVbuf(const uint8_t *buf);
+    uint8_t *buffer();
+    int size();
 
-    uint8_t * buffer(){return mYuv420Buffer;}
-    int width(){return mWidth;}
-    int height(){return mHegiht;}
+    int width(){return m_width;}
+    int height(){return m_hegiht;}
 
 protected:
-    uint8_t *mYuv420Buffer;
+    FrameType m_type = VIDEOFRAME_TYPE_NONE;
+
+    int m_width = -100;
+    int m_hegiht= -100;
+
+    int64_t m_pts = 0;
+
+    std::shared_ptr<VideoEncodedFrame> m_encoded_frame = nullptr;
+    std::shared_ptr<VideoRawFrame> m_raw_frame = nullptr;
 
-    int mWidth;
-    int mHegiht;
 };
 
 #endif // VIDEOFRAME_H

+ 97 - 0
module/VideoPlayer/src/frame/VideoFrame/VideoRawFrame.cpp

@@ -0,0 +1,97 @@
+#include "VideoRawFrame.h"
+#include <stdio.h>
+
+#ifdef ENABLE_FFMPEG
+extern "C"
+{
+    #include <libavformat/avformat.h>
+    #include <libavutil/imgutils.h>
+}
+#endif
+
+
+
+VideoRawFrame::VideoRawFrame()
+{
+    mFrameBuffer = nullptr;
+    mFrameBufferSize = 0;
+    mPts = 0;
+}
+
+VideoRawFrame::~VideoRawFrame()
+{
+    if (mFrameBuffer != nullptr)
+    {
+        free(mFrameBuffer);
+        mFrameBuffer = nullptr;
+        mFrameBufferSize = 0;
+    }
+}
+
+void VideoRawFrame::initBuffer(const int &width, const int &height, const FrameType &type, int64_t time)
+{
+    if (mFrameBuffer != nullptr)
+    {
+        free(mFrameBuffer);
+        mFrameBuffer = nullptr;
+    }
+
+    mWidth  = width;
+    mHegiht = height;
+
+    mPts = time;
+
+    mType = type;
+
+    int size = 0;
+    if (type == FRAME_TYPE_YUV420P)
+    {
+#ifdef ENABLE_FFMPEG
+		size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 1);  //按1字节进行内存对齐,得到的内存大小最接近实际大小
+#else
+        size = width * height * 3 / 2;
+#endif
+    }
+    else if (type == FRAME_TYPE_RGB8)
+    {
+        size = width * height;
+    }
+    else if (type == FRAME_TYPE_RGB24)
+    {
+        size = width * height * 3;
+    }
+
+    mFrameBuffer = (uint8_t*)malloc(size);
+    mFrameBufferSize = size;
+}
+
+void VideoRawFrame::setFramebuf(const uint8_t *buf)
+{
+    if (mFrameBuffer && buf)
+    {
+        memcpy(mFrameBuffer, buf, mFrameBufferSize);
+    }
+    else
+    {
+        printf("%s line=%d setFramebuf error!\n", __FUNCTION__, __LINE__);
+    }
+}
+
+void VideoRawFrame::setYbuf(const uint8_t *buf)
+{
+    int Ysize = mWidth * mHegiht;
+    memcpy(mFrameBuffer, buf, Ysize);
+}
+
+void VideoRawFrame::setUbuf(const uint8_t *buf)
+{
+    int Ysize = mWidth * mHegiht;
+    memcpy(mFrameBuffer + Ysize, buf, Ysize / 4);
+}
+
+void VideoRawFrame::setVbuf(const uint8_t *buf)
+{
+    int Ysize = mWidth * mHegiht;
+    memcpy(mFrameBuffer + Ysize + Ysize / 4, buf, Ysize / 4);
+}
+

+ 51 - 0
module/VideoPlayer/src/frame/VideoFrame/VideoRawFrame.h

@@ -0,0 +1,51 @@
+#pragma once
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <memory>
+
+#define VideoRawFramePtr std::shared_ptr<VideoRawFrame>
+
+class VideoRawFrame
+{
+public:
+    enum FrameType
+    {
+        FRAME_TYPE_NONE = -1,
+        FRAME_TYPE_YUV420P,   ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+        FRAME_TYPE_RGB8,
+        FRAME_TYPE_RGB24,     ///< packed RGB 8:8:8, 24bpp, RGBRGB...
+    };
+
+    VideoRawFrame();
+    ~VideoRawFrame();
+
+    void initBuffer(const int &width, const int &height, const FrameType &type, int64_t time = 0);
+
+    void setFramebuf(const uint8_t *buf);
+    void setYbuf(const uint8_t *buf);
+    void setUbuf(const uint8_t *buf);
+    void setVbuf(const uint8_t *buf);
+
+    uint8_t *buffer(){return mFrameBuffer;}
+    int width(){return mWidth;}
+    int height(){return mHegiht;}
+    int size(){return mFrameBufferSize;}
+
+    void setPts(const int64_t &pts){mPts=pts;}
+    int64_t pts(){return mPts;}
+
+    FrameType type(){return mType;}
+
+protected:
+    FrameType mType;
+
+    uint8_t *mFrameBuffer = nullptr;
+    int mFrameBufferSize = 0;
+
+    int mWidth;
+    int mHegiht;
+
+    int64_t mPts;
+};

+ 42 - 0
module/VideoPlayer/src/frame/nalu/h264.h

@@ -0,0 +1,42 @@
+#ifndef H264_H
+#define H264_H
+
+#include <stdlib.h>
+
+#define NALU_TYPE_SLICE 1
+#define NALU_TYPE_DPA 2
+#define NALU_TYPE_DPB 3
+#define NALU_TYPE_DPC 4
+#define NALU_TYPE_IDR 5
+#define NALU_TYPE_SEI 6
+#define NALU_TYPE_SPS 7
+#define NALU_TYPE_PPS 8
+#define NALU_TYPE_AUD 9
+#define NALU_TYPE_EOSEQ 10
+#define NALU_TYPE_EOSTREAM 11
+#define NALU_TYPE_FILL 12
+
+
+typedef struct
+{
+  int startcodeprefix_len;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+  unsigned len;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
+  unsigned max_size;            //! Nal Unit Buffer size
+  int forbidden_bit;            //! should be always FALSE
+  int nal_reference_idc;        //! NALU_PRIORITY_xxxx
+  int nal_unit_type;            //! NALU_TYPE_xxxx
+  unsigned char *buf;                    //! contains the first byte followed by the EBSP
+  unsigned short lost_packets;  //! true, if packet loss is detected
+} T_H264_NALU;
+
+#pragma pack (1)
+typedef struct {
+    //byte 0
+    unsigned char TYPE:5;
+    unsigned char NRI:2;
+    unsigned char F:1;
+
+} T_H264_NALU_HEADER; /**//* 1 BYTES */
+#pragma pack ()
+
+#endif // H264_H

+ 70 - 0
module/VideoPlayer/src/frame/nalu/h265.h

@@ -0,0 +1,70 @@
+#ifndef H265_H
+#define H265_H
+
+#include <stdlib.h>
+
+typedef enum e_hevc_nalu_type
+{
+    HEVC_NAL_TRAIL_N    = 0,
+    HEVC_NAL_TRAIL_R    = 1,
+    HEVC_NAL_TSA_N      = 2,
+    HEVC_NAL_TSA_R      = 3,
+    HEVC_NAL_STSA_N     = 4,
+    HEVC_NAL_STSA_R     = 5,
+    HEVC_NAL_RADL_N     = 6,
+    HEVC_NAL_RADL_R     = 7,
+    HEVC_NAL_RASL_N     = 8,
+    HEVC_NAL_RASL_R     = 9,
+    HEVC_NAL_VCL_N10    = 10,
+    HEVC_NAL_VCL_R11    = 11,
+    HEVC_NAL_VCL_N12    = 12,
+    HEVC_NAL_VCL_R13    = 13,
+    HEVC_NAL_VCL_N14    = 14,
+    HEVC_NAL_VCL_R15    = 15,
+    HEVC_NAL_BLA_W_LP   = 16,
+    HEVC_NAL_BLA_W_RADL = 17,
+    HEVC_NAL_BLA_N_LP   = 18,
+    HEVC_NAL_IDR_W_RADL = 19,
+    HEVC_NAL_IDR_N_LP   = 20,
+    HEVC_NAL_CRA_NUT    = 21,
+    HEVC_NAL_IRAP_VCL22 = 22,
+    HEVC_NAL_IRAP_VCL23 = 23,
+    HEVC_NAL_RSV_VCL24  = 24,
+    HEVC_NAL_RSV_VCL25  = 25,
+    HEVC_NAL_RSV_VCL26  = 26,
+    HEVC_NAL_RSV_VCL27  = 27,
+    HEVC_NAL_RSV_VCL28  = 28,
+    HEVC_NAL_RSV_VCL29  = 29,
+    HEVC_NAL_RSV_VCL30  = 30,
+    HEVC_NAL_RSV_VCL31  = 31,
+    HEVC_NAL_VPS        = 32,
+    HEVC_NAL_SPS        = 33,
+    HEVC_NAL_PPS        = 34,
+    HEVC_NAL_AUD        = 35,
+    HEVC_NAL_EOS_NUT    = 36,
+    HEVC_NAL_EOB_NUT    = 37,
+    HEVC_NAL_FD_NUT     = 38,
+    HEVC_NAL_SEI_PREFIX = 39,
+    HEVC_NAL_SEI_SUFFIX = 40
+} E_HEVC_NALU_TYPE;
+
+#pragma pack (1)
+typedef struct t_h265_nalu_header
+{
+    unsigned char forbidden_zero_bit:1;
+    unsigned char nal_unit_type:6;
+    unsigned char nuh_layer_id:6;
+    unsigned char nuh_temporal_id_plus1:3;
+} T_H265_NALU_HEADER;
+
+typedef struct t_h265_nalu
+{
+    int startCodeLen;
+    T_H265_NALU_HEADER h265NaluHeader;
+    unsigned int len; //nalu数据大小 包含起始码
+    unsigned char *buf;
+} T_H265_NALU;
+
+#pragma pack ()
+
+#endif // H264_H

+ 295 - 0
module/VideoPlayer/src/frame/nalu/nalu.cpp

@@ -0,0 +1,295 @@
+#include "nalu.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+NALUParsing::NALUParsing()
+{
+    m_buffer_max_size = 300 * 1024;
+    ///初始化一段内存 用于临时存放h264数据
+    m_buffer = (uint8_t*)malloc(m_buffer_max_size);
+    m_buffer_size = 0;
+}
+
+NALUParsing::~NALUParsing()
+{
+    free(m_buffer);
+}
+
+int NALUParsing::inputData(T_NALU_TYPE type, uint8_t *buf, int len, bool clear_old_buffer)
+{
+    if (clear_old_buffer)
+    {
+        m_buffer_size = 0;
+    }
+
+    if ((m_buffer_size + len) > m_buffer_max_size)
+    {
+        m_buffer_max_size = m_buffer_size + len;
+        realloc(m_buffer, m_buffer_max_size);
+    }
+
+    memcpy(m_buffer + m_buffer_size, buf, len);
+    m_buffer_size += len;
+
+    m_video_type = type;
+
+    return m_buffer_size;
+}
+
+T_NALU *NALUParsing::getNextFrame()
+{
+    /*根据h264文件的特性  逐个字节搜索 直到遇到h264的帧头 视为获取到了完整的一帧h264视频数据*/
+
+///    关于起始码startcode的两种形式:3字节的0x000001和4字节的0x00000001
+///    3字节的0x000001只有一种场合下使用,就是一个完整的帧被编为多个slice的时候,
+///    包含这些slice的nalu使用3字节起始码。其余场合都是4字节的。
+///    因此查找一帧的话,只需要查找四字节的起始码即可。
+
+    ///首先查找第一个起始码
+
+    int pos = 0; //记录当前处理的数据偏移量
+    int StartCode = 0;
+
+    while(1)
+    {
+        unsigned char* Buf = m_buffer + pos;
+        int lenth = m_buffer_size - pos; //剩余没有处理的数据长度
+        if (lenth <= 4)
+        {
+            return NULL;
+        }
+
+        ///查找起始码(0x00000001)
+        if(Buf[0]==0 && Buf[1]==0 && Buf[2] ==1)
+        {
+            StartCode = 3;
+            break;
+        }
+        else if(Buf[0]==0 && Buf[1]==0 && Buf[2] ==0 && Buf[3] ==1)
+         //Check whether buf is 0x00000001
+        {
+            StartCode = 4;
+            break;
+        }
+        else
+        {
+            //否则 往后查找一个字节
+            pos++;
+        }
+    }
+
+
+    ///然后查找下一个起始码查找第一个起始码
+
+    int pos_2 = pos + StartCode; //记录当前处理的数据偏移量
+    int StartCode_2 = 0;
+
+    while(1)
+    {
+        unsigned char* Buf = m_buffer + pos_2;
+        int lenth = m_buffer_size - pos_2; //剩余没有处理的数据长度
+        if (lenth <= 4)
+        {
+            return NULL;
+        }
+
+        ///查找起始码(0x00000001)
+        if(Buf[0]==0 && Buf[1]==0 && Buf[2] ==1)
+        {
+            StartCode_2 = 3;
+            break;
+        }
+        else if(Buf[0]==0 && Buf[1]==0 && Buf[2] ==0 && Buf[3] ==1)
+         //Check whether buf is 0x00000001
+        {
+            StartCode_2 = 4;
+            break;
+        }
+        else
+        {
+            //否则 往后查找一个字节
+            pos_2++;
+        }
+    }
+
+    /// 现在 pos和pos_2之间的数据就是一帧数据了
+    /// 把他取出来
+
+    ///由于传递给ffmpeg解码的数据 需要带上起始码 因此这里的nalu带上了起始码
+    unsigned char* Buf = m_buffer + pos; //这帧数据的起始数据(包含起始码)
+    int naluSize = pos_2 - pos; //nalu数据大小 包含起始码
+
+    T_NALU * nalu = AllocNALU(naluSize, m_video_type);//分配nal 资源
+
+    if (m_video_type == T_NALU_H264)
+    {
+        T_H264_NALU_HEADER *nalu_header = (T_H264_NALU_HEADER *)(Buf + StartCode);
+
+        nalu->nalu.h264Nalu.startcodeprefix_len = StartCode;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+        nalu->nalu.h264Nalu.len = naluSize;                 //nalu数据大小 包含起始码
+        nalu->nalu.h264Nalu.forbidden_bit = 0;            //! should be always FALSE
+        nalu->nalu.h264Nalu.nal_reference_idc = nalu_header->NRI;        //! NALU_PRIORITY_xxxx
+        nalu->nalu.h264Nalu.nal_unit_type = nalu_header->TYPE;            //! NALU_TYPE_xxxx
+        nalu->nalu.h264Nalu.lost_packets = false;  //! true, if packet loss is detected
+        memcpy(nalu->nalu.h264Nalu.buf, Buf, naluSize);  //! contains the first byte followed by the EBSP
+
+//        {
+//            char *bufTmp = (char*)(Buf + StartCode);
+//            char s[10];
+//            itoa(bufTmp[0], s, 2);
+//            fprintf(stderr, "%s %08s %x nalu_header->TYPE=%d naluSize=%d\n", __FUNCTION__, s, bufTmp[0] , nalu_header->TYPE, naluSize);
+//        }
+    }
+    else
+    {
+        T_H265_NALU_HEADER *nalu_header = (T_H265_NALU_HEADER *)(Buf + StartCode);
+
+        nalu->nalu.h265Nalu.startCodeLen = StartCode;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+        nalu->nalu.h265Nalu.len = naluSize;                 //nalu数据大小 包含起始码
+        nalu->nalu.h265Nalu.h265NaluHeader = *nalu_header;
+        memcpy(nalu->nalu.h265Nalu.buf, Buf, naluSize);  //! contains the first byte followed by the EBSP
+
+//        {
+//            char *bufTmp = (char*)(Buf);
+//            fprintf(stderr, "%s %02x%02x%02x%02x%02x%02x %d %d\n", __FUNCTION__, bufTmp[0], bufTmp[1], bufTmp[2], bufTmp[3], bufTmp[4], bufTmp[5], nalu->nalu.h265Nalu.h265NaluHeader.nal_unit_type, nalu_header->nal_unit_type);
+//        }
+    }
+
+    /// 将这一帧数据去掉
+    /// 把后一帧数据覆盖上来
+    int leftSize = m_buffer_size - pos_2;
+    memmove(m_buffer, m_buffer + pos_2, leftSize);
+    m_buffer_size = leftSize;
+
+    return nalu;
+}
+
+int NALUParsing::getIDRHeaderBuffer(uint8_t *buf, unsigned int &buf_len)
+{
+    int ret = 0;
+
+    buf_len = 0;
+
+//    FILE *fp1 = fopen("1.h265", "wb");
+//    fwrite(m_buffer, 1, m_buffer_size, fp1);
+//    fclose(fp1);
+
+    while (1)
+    {
+        T_NALU *nalu = getNextFrame();
+
+        if (nalu == nullptr)
+        {
+            break;
+        }
+
+        int nal_unit_type = 0;
+        uint8_t *nalu_buf = nullptr;
+        int nalu_size = 0;
+
+        if (nalu->type == T_NALU_H264)
+        {
+            nal_unit_type = nalu->nalu.h264Nalu.nal_unit_type;
+            nalu_buf = nalu->nalu.h264Nalu.buf;
+            nalu_size = nalu->nalu.h264Nalu.len;
+
+            //7:sps 8:pps
+            if (nal_unit_type == NALU_TYPE_SPS || nal_unit_type == NALU_TYPE_PPS)
+            {
+                memcpy(buf + buf_len, nalu_buf, nalu_size);
+                buf_len += nalu_size;
+            }
+        }
+        else if (nalu->type == T_NALU_H265)
+        {
+            nal_unit_type = nalu->nalu.h265Nalu.h265NaluHeader.nal_unit_type;
+            nalu_buf = nalu->nalu.h265Nalu.buf;
+            nalu_size = nalu->nalu.h265Nalu.len;
+
+            //32:vps 33:sps 34:pps
+            if (nal_unit_type == HEVC_NAL_VPS || nal_unit_type == HEVC_NAL_SPS || nal_unit_type == HEVC_NAL_PPS)
+            {
+                memcpy(buf + buf_len, nalu_buf, nalu_size);
+                buf_len += nalu_size;
+            }
+        }
+
+        FreeNALU(nalu);
+
+//        printf("%s %d nal_unit_type=%d %d %d\n", __FILE__, __LINE__, nal_unit_type, nalu_size, m_buffer_size);
+    }
+
+//    FILE *fp2 = fopen("1.vps", "wb");
+//    fwrite(buf, 1, buf_len, fp2);
+//    fclose(fp2);
+
+    return 0;
+}
+
+T_NALU *NALUParsing::AllocNALU(const int &buffer_size, const T_NALU_TYPE &type, const bool &is_alloc_buffer)
+{
+    T_NALU *n = nullptr;
+
+    n = (T_NALU*)malloc (sizeof(T_NALU));
+
+    n->type = type;
+
+    if (type == T_NALU_H264)
+    {
+        if (is_alloc_buffer)
+        {
+            n->nalu.h264Nalu.max_size = buffer_size;	//Assign buffer size
+            n->nalu.h264Nalu.buf = (unsigned char*)malloc (buffer_size);
+            n->nalu.h264Nalu.len = buffer_size;
+        }
+        else
+        {
+            n->nalu.h264Nalu.max_size = 0;	//Assign buffer size
+            n->nalu.h264Nalu.buf      = nullptr;
+            n->nalu.h264Nalu.len      = 0;
+        }
+    }
+    else
+    {
+        if (is_alloc_buffer)
+        {
+            n->nalu.h265Nalu.buf = (unsigned char*)malloc (buffer_size);
+            n->nalu.h265Nalu.len  = buffer_size;
+        }
+        else
+        {
+            n->nalu.h265Nalu.buf = nullptr;
+            n->nalu.h265Nalu.len = 0;
+        }
+    }
+
+    return n;
+}
+
+void NALUParsing::FreeNALU(T_NALU *n)
+{
+    if (n == nullptr) return;
+
+    if (n->type == T_NALU_H264)
+    {
+        if (n->nalu.h264Nalu.buf != nullptr)
+        {
+            free(n->nalu.h264Nalu.buf);
+        }
+    }
+    else
+    {
+        if (n->nalu.h265Nalu.buf != nullptr)
+        {
+            free(n->nalu.h265Nalu.buf);
+        }
+    }
+
+    free(n);
+}

+ 59 - 0
module/VideoPlayer/src/frame/nalu/nalu.h

@@ -0,0 +1,59 @@
+#ifndef NALU_H
+#define NALU_H
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#include "h264.h"
+#include "h265.h"
+
+enum T_NALU_TYPE
+{
+    T_NALU_H264 = 0,
+    T_NALU_H265,
+};
+
+typedef struct
+{
+    T_NALU_TYPE type;
+
+    union
+    {
+        T_H264_NALU h264Nalu;
+        T_H265_NALU h265Nalu;
+    }nalu;
+
+} T_NALU;
+
+///用于从连续的h264/h265数据中解析出nalu
+class NALUParsing
+{
+public:
+    NALUParsing();
+    ~NALUParsing();
+
+    int inputData(T_NALU_TYPE type, uint8_t *buf, int len, bool clear_old_buffer); //输入h264数据
+
+    ///从H264数据中查找出一帧数据
+    T_NALU* getNextFrame();
+
+    int getIDRHeaderBuffer(uint8_t *buf, unsigned int &buf_len); //获取IDR帧头部信息(vps+sps+pps)
+
+private:
+    uint8_t *m_buffer = nullptr;
+    int m_buffer_size = 0;
+    int m_buffer_max_size = 0;
+
+    T_NALU_TYPE m_video_type; //类型 区分是264还是265
+
+public:
+    ///为NALU_t结构体分配内存空间
+    static T_NALU *AllocNALU(const int &buffer_size, const T_NALU_TYPE &type, const bool &is_alloc_buffer = true);
+
+    ///释放
+    static void FreeNALU(T_NALU *n);
+
+};
+
+
+#endif // NALU_H

+ 8 - 2
src/MainWindow.cpp

@@ -314,7 +314,7 @@ void MainWindow::slotTimerTimeOut()
 {
     if (QObject::sender() == mTimer)
     {
-        qint64 Sec = mPlayer->getCurrentTime();
+        qint64 Sec = mPlayer->getCurrentTime() / 1000;
 
         ui->horizontalSlider->setValue(Sec);
 
@@ -677,11 +677,17 @@ qDebug()<<__FUNCTION__<<state<<mIsNeedPlayNext;
 }
 
 ///显示视频数据,此函数不宜做耗时操作,否则会影响播放的流畅性。
-void MainWindow::onDisplayVideo(std::shared_ptr<VideoFrame> videoFrame)
+void MainWindow::onDisplayVideo(VideoRawFramePtr videoFrame)
 {
     ui->widget_videoPlayer->inputOneFrame(videoFrame);
 }
 
+//回调未解码前的数据
+void MainWindow::onVideoBuffer(VideoEncodedFramePtr videoFrame)
+{
+
+}
+
 void MainWindow::closeEvent(QCloseEvent *event)
 {
     mPlayer->stop(true);

+ 2 - 1
src/MainWindow.h

@@ -107,8 +107,9 @@ protected:
     void onPlayerStateChanged(const VideoPlayer::State &state, const bool &hasVideo, const bool &hasAudio);
 
     ///显示视频数据,此函数不宜做耗时操作,否则会影响播放的流畅性。
-    void onDisplayVideo(VideoFramePtr videoFrame);
+    void onDisplayVideo(VideoRawFramePtr videoFrame);
 
+    void onVideoBuffer(VideoEncodedFramePtr videoFrame); //回调未解码前的数据
 };
 
 #endif // MAINWINDOW_H

+ 4 - 4
src/Widget/ShowVideoWidget.cpp

@@ -227,12 +227,12 @@ void ShowVideoWidget::mouseMoveEvent(QMouseEvent *event)
     }
 }
 
-void ShowVideoWidget::inputOneFrame(VideoFramePtr videoFrame)
+void ShowVideoWidget::inputOneFrame(VideoRawFramePtr videoFrame)
 {
     QMetaObject::invokeMethod(this, [=]()
     {
-        int width = videoFrame.get()->width();
-        int height = videoFrame.get()->height();
+        int width = videoFrame->width();
+        int height = videoFrame->height();
 
         if (m_nVideoW <= 0 || m_nVideoH <= 0 || m_nVideoW != width || m_nVideoH != height)
         {
@@ -645,7 +645,7 @@ void ShowVideoWidget::resizeGL(int window_W, int window_H)
         m_program->release();
     }
 
-    VideoFrame * videoFrame = mVideoFrame.get();
+    VideoRawFrame * videoFrame = mVideoFrame.get();
 
     if (videoFrame != nullptr)
     {

+ 2 - 2
src/Widget/ShowVideoWidget.h

@@ -51,7 +51,7 @@ public:
 
     qint64 getLastGetFrameTime(){return mLastGetFrameTime;}
 
-    void inputOneFrame(VideoFramePtr videoFrame);
+    void inputOneFrame(VideoRawFramePtr videoFrame);
 
 signals:
     void sig_CloseBtnClick();
@@ -97,7 +97,7 @@ private:
     int m_nVideoW; //视频分辨率宽
     int m_nVideoH; //视频分辨率高
 
-    VideoFramePtr mVideoFrame;
+    VideoRawFramePtr mVideoFrame;
     QList<FaceInfoNode> mFaceInfoList;
 
     bool mIsOpenGLInited; //openGL初始化函数是否执行过了