Kaynağa Gözat

V3.0.0 【支持扩展模式】

huihui 4 yıl önce
ebeveyn
işleme
2eb320ce19
100 değiştirilmiş dosya ile 10174 ekleme ve 318 silme
  1. 8 0
      README.md
  2. 91 49
      VideoRecorder.pro
  3. BIN
      bin/win32/SDL2.dll
  4. BIN
      bin/win32/VideoRecorder.exe
  5. BIN
      bin/win32/VideoRecorder.pdb
  6. 0 0
      bin/win32/avcodec-58.dll
  7. 0 0
      bin/win32/avdevice-58.dll
  8. 0 0
      bin/win32/avfilter-7.dll
  9. 0 0
      bin/win32/avformat-58.dll
  10. 0 0
      bin/win32/avutil-56.dll
  11. 0 0
      bin/win32/ffmpeg.exe
  12. 0 0
      bin/win32/ffplay.exe
  13. 0 0
      bin/win32/ffprobe.exe
  14. BIN
      bin/win32/libcrypto-1_1.dll
  15. BIN
      bin/win32/libssl-1_1.dll
  16. BIN
      bin/win32/libyuv.dll
  17. 0 0
      bin/win32/postproc-55.dll
  18. 0 0
      bin/win32/swresample-3.dll
  19. 0 0
      bin/win32/swscale-5.dll
  20. BIN
      bin/win64/ScreenSender
  21. BIN
      bin/win64/ScreenSender.exe
  22. BIN
      bin/win64/VideoEncode
  23. 0 0
      bin/win64/avcodec-58.dll
  24. 0 0
      bin/win64/avdevice-58.dll
  25. 0 0
      bin/win64/avfilter-7.dll
  26. 0 0
      bin/win64/avformat-58.dll
  27. 0 0
      bin/win64/avutil-56.dll
  28. 0 0
      bin/win64/ffmpeg.exe
  29. 0 0
      bin/win64/ffplay.exe
  30. 0 0
      bin/win64/ffprobe.exe
  31. 0 0
      bin/win64/postproc-55.dll
  32. 0 0
      bin/win64/swresample-3.dll
  33. 0 0
      bin/win64/swscale-5.dll
  34. BIN
      bin/插件/atl100.dll
  35. BIN
      bin/插件/audio_sniffer-x64.dll
  36. BIN
      bin/插件/audio_sniffer.dll
  37. BIN
      bin/插件/msvcp100.dll
  38. BIN
      bin/插件/msvcr100.dll
  39. 18 0
      bin/插件/reg.bat
  40. BIN
      bin/插件/screen-capture-recorder-x64.dll
  41. BIN
      bin/插件/screen-capture-recorder.dll
  42. 18 0
      bin/插件/unreg.bat
  43. BIN
      bin/插件/vcomp100.dll
  44. BIN
      bin/插件/vcredist_x64.exe
  45. BIN
      bin/插件/vcredist_x86.exe
  46. BIN
      bin32/VideoRecorder.exe
  47. 439 0
      module/DragAbleWidget/DragAbleDialog.cpp
  48. 66 0
      module/DragAbleWidget/DragAbleDialog.h
  49. 113 0
      module/DragAbleWidget/DragAbleDialog.ui
  50. 439 0
      module/DragAbleWidget/DragAbleWidget.cpp
  51. 67 0
      module/DragAbleWidget/DragAbleWidget.h
  52. 13 0
      module/DragAbleWidget/DragAbleWidget.pri
  53. 113 0
      module/DragAbleWidget/DragAbleWidget.ui
  54. 31 0
      module/ScreenRecorder/ScreenRecorder.pri
  55. 12 0
      module/ScreenRecorder/src/EventHandle/VideoRecorderEventHandle.cpp
  56. 29 0
      module/ScreenRecorder/src/EventHandle/VideoRecorderEventHandle.h
  57. 189 0
      module/ScreenRecorder/src/Media/Audio/AudioEncoder.cpp
  58. 45 0
      module/ScreenRecorder/src/Media/Audio/AudioEncoder.h
  59. 586 0
      module/ScreenRecorder/src/Media/Audio/GetAudioThread.cpp
  60. 101 0
      module/ScreenRecorder/src/Media/Audio/GetAudioThread.h
  61. 649 0
      module/ScreenRecorder/src/Media/Image/ImageReader.cpp
  62. 22 0
      module/ScreenRecorder/src/Media/Image/ImageReader.h
  63. 265 0
      module/ScreenRecorder/src/Media/Image/yuv420p.cpp
  64. 102 0
      module/ScreenRecorder/src/Media/Image/yuv420p.h
  65. 1540 0
      module/ScreenRecorder/src/Media/MediaManager.cpp
  66. 232 0
      module/ScreenRecorder/src/Media/MediaManager.h
  67. 933 0
      module/ScreenRecorder/src/Media/Video/CaptureWindowThread.cpp
  68. 95 0
      module/ScreenRecorder/src/Media/Video/CaptureWindowThread.h
  69. 524 0
      module/ScreenRecorder/src/Media/Video/GetVideoThread.cpp
  70. 90 0
      module/ScreenRecorder/src/Media/Video/GetVideoThread.h
  71. 529 0
      module/ScreenRecorder/src/Media/Video/VideoEncoder.cpp
  72. 73 0
      module/ScreenRecorder/src/Media/Video/VideoEncoder.h
  73. 24 0
      module/ScreenRecorder/src/Media/Video/VideoFileInfoTypes.h
  74. 375 269
      module/ScreenRecorder/src/Media/Video/VideoFileWriter.cpp
  75. 159 0
      module/ScreenRecorder/src/Media/Video/VideoFileWriter.h
  76. 33 0
      module/common/common.pri
  77. 48 0
      module/common/src/Audio/AudioFrame/AACFrame.cpp
  78. 87 0
      module/common/src/Audio/AudioFrame/AACFrame.h
  79. 41 0
      module/common/src/Audio/AudioFrame/PCMFrame.cpp
  80. 50 0
      module/common/src/Audio/AudioFrame/PCMFrame.h
  81. 70 0
      module/common/src/Audio/Mix/PcmMix.cpp
  82. 29 0
      module/common/src/Audio/Mix/PcmMix.h
  83. 243 0
      module/common/src/LogWriter/LogWriter.cpp
  84. 58 0
      module/common/src/LogWriter/LogWriter.h
  85. 271 0
      module/common/src/MoudleConfig.cpp
  86. 56 0
      module/common/src/MoudleConfig.h
  87. 102 0
      module/common/src/Mutex/Cond.cpp
  88. 55 0
      module/common/src/Mutex/Cond.h
  89. 44 0
      module/common/src/Mutex/Mutex.cpp
  90. 35 0
      module/common/src/Mutex/Mutex.h
  91. 34 0
      module/common/src/NALU/h264.h
  92. 76 0
      module/common/src/NALU/h265.h
  93. 210 0
      module/common/src/NALU/nalu.cpp
  94. 63 0
      module/common/src/NALU/nalu.h
  95. 79 0
      module/common/src/Video/VideoFrame/VideoEncodedFrame.cpp
  96. 42 0
      module/common/src/Video/VideoFrame/VideoEncodedFrame.h
  97. 86 0
      module/common/src/Video/VideoFrame/VideoRawFrame.cpp
  98. 58 0
      module/common/src/Video/VideoFrame/VideoRawFrame.h
  99. 56 0
      module/lib/common/RtAudio/RtAudio.pri
  100. 258 0
      module/lib/common/RtAudio/include/asio.cpp

+ 8 - 0
README.md

@@ -2,6 +2,8 @@
 # Qt+ffmpeg实现的录屏软件
 博客地址:http://blog.yundiantech.com/?log=blog&scat=196
 
+![Image text](https://raw.githubusercontent.com/yundiantech/VideoRecorder/master/pic/screenshot.png)
+
 
 版本说明:  
 Qt开发环境的搭建 请参考:  
@@ -61,3 +63,9 @@ Qt5.6.2(vs2013) + ffmpeg4.1
 2.修改文件地址为rtmp地址后即可实现推流
 
 
+
+【V3.0.0】 2021-05.11  
+Qt5.13.2(vs2017) + ffmpeg4.1  
+1.功能增强,支持扩展模式  
+2.录屏插件支持命令行直接注册  
+

+ 91 - 49
VideoRecorder.pro

@@ -1,69 +1,111 @@
 #-------------------------------------------------
 #
-# Project created by QtCreator 2015-04-01T17:15:51
+# Project created by QtCreator 2020-02-06T20:41:27
 #
 #-------------------------------------------------
 
-QT       += core gui network
+QT       += core gui network websockets multimedia
 
 greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
 
+TEMPLATE = app
+
+TARGET = VideoRecorder
+
 UI_DIR  = obj/Gui
 MOC_DIR = obj/Moc
 OBJECTS_DIR = obj/Obj
 
-
-#将输出文件直接放到源码目录下的bin目录下,将dll都放在了次目录中,用以解决运行后找不到dll的问
+#灏嗚緭鍑烘枃浠剁洿鎺ユ斁鍒版簮鐮佺洰褰曚笅鐨刡in鐩�綍涓嬶紝灏哾ll閮芥斁鍦ㄤ簡姝ょ洰褰曚腑锛岀敤浠ヨВ鍐宠繍琛屽悗鎵句笉鍒癲ll鐨勯棶
 #DESTDIR=$$PWD/bin/
-contains(QT_ARCH, i386) {
-    message("32-bit")
-    DESTDIR = $${PWD}/bin32
-} else {
-    message("64-bit")
-    DESTDIR = $${PWD}/bin64
-}
-QMAKE_CXXFLAGS += -std=c++11
-
-
-TARGET = VideoRecorder
-TEMPLATE = app
-
-SOURCES += src/main.cpp\
-        src/mainwindow.cpp \
-    src/video/savevideofile.cpp \
-    src/video/screenrecorder.cpp \
-    src/widget/selectrect.cpp \
-    src/widget/pushpoint.cpp \
-    src/video/getvideothread.cpp \
-    src/AppConfig.cpp
-
-HEADERS  += src/mainwindow.h \
-    src/video/savevideofile.h \
-    src/video/screenrecorder.h \
-    src/widget/selectrect.h \
-    src/widget/pushpoint.h \
-    src/video/getvideothread.h \
-    src/AppConfig.h
-
-FORMS    += src/mainwindow.ui
-
-
 win32{
-
     contains(QT_ARCH, i386) {
         message("32-bit")
-        INCLUDEPATH += $$PWD/lib/win32/ffmpeg/include \
-                       $$PWD/src
-
-        LIBS += -L$$PWD/lib/win32/ffmpeg/lib -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc -lswresample -lswscale
-
+        DESTDIR = $${PWD}/bin/win32
     } else {
         message("64-bit")
-        INCLUDEPATH += $$PWD/lib/win64/ffmpeg/include \
-                       $$PWD/src
-
-        LIBS += -L$$PWD/lib/win64/ffmpeg/lib -lavcodec -lavdevice -lavfilter -lavformat -lavutil -lpostproc -lswresample -lswscale
-
+        DESTDIR = $${PWD}/bin/win64
     }
-
 }
+
+### lib ### Begin
+    include($$PWD/module/lib/lib.pri)
+### lib ### End
+
+### common ### Begin
+    include($$PWD/module/common/common.pri)
+### common ### End
+
+#鍖呭惈鍙�嫋鍔ㄧ獥浣撶殑浠g爜
+include(module/DragAbleWidget/DragAbleWidget.pri)
+
+#鍖呭惈 褰曞睆妯″潡
+include(module/ScreenRecorder/ScreenRecorder.pri)
+
+SOURCES +=  \
+    src/AppConfig.cpp \
+    src/CaptureTask/CapturePictureWidget.cpp \
+    src/CaptureTask/CaptureTaskManager.cpp \
+    src/CaptureTask/CaptureWindowWidget.cpp \
+    src/CaptureTask/SelectAreaWidget/SelectAreaWidget.cpp \
+    src/CaptureTask/SelectAreaWidget/ShowAreaWdiget.cpp \
+    src/CaptureTask/SelectProgram/SelectRunningProgramDialog.cpp \
+    src/CaptureTask/SelectProgram/ShowProgramPictureWidget.cpp \
+    src/MainWindow.cpp \
+    src/Widget/CustomWidget/MyCustomerWidget.cpp \
+    src/Widget/CustomWidget/flowlayout.cpp \
+    src/main.cpp\
+    src/Base64/Base64.cpp \
+    src/DeviceTest/AudioInfo.cpp \
+    src/DeviceTest/DeviceSettingDialog.cpp \
+    src/Widget/mymessagebox_withTitle.cpp \
+    src/Base/FunctionTransfer.cpp \
+    src/Widget/ShowRedRectWidget.cpp \
+    src/Camera/ShowCameraWidget.cpp \
+    src/Widget/Video/ShowVideoWidget.cpp
+
+HEADERS  += \
+    src/AppConfig.h \
+    src/Base64/Base64.h \
+    src/CaptureTask/CapturePictureWidget.h \
+    src/CaptureTask/CaptureTaskManager.h \
+    src/CaptureTask/CaptureWindowWidget.h \
+    src/CaptureTask/SelectAreaWidget/SelectAreaWidget.h \
+    src/CaptureTask/SelectAreaWidget/ShowAreaWdiget.h \
+    src/CaptureTask/SelectProgram/SelectRunningProgramDialog.h \
+    src/CaptureTask/SelectProgram/ShowProgramPictureWidget.h \
+    src/DeviceTest/AudioInfo.h \
+    src/DeviceTest/DeviceSettingDialog.h \
+    src/MainWindow.h \
+    src/Widget/CustomWidget/MyCustomerWidget.h \
+    src/Widget/CustomWidget/flowlayout.h \
+    src/Widget/mymessagebox_withTitle.h \
+    src/Base/FunctionTransfer.h \
+    src/Widget/ShowRedRectWidget.h \
+    src/Camera/ShowCameraWidget.h \
+    src/Widget/Video/ShowVideoWidget.h
+
+INCLUDEPATH += $$PWD/src \
+               $$PWD/src/widget/common
+
+FORMS    += \
+    src/CaptureTask/CapturePictureWidget.ui \
+    src/CaptureTask/CaptureTaskManager.ui \
+    src/CaptureTask/CaptureWindowWidget.ui \
+    src/CaptureTask/SelectAreaWidget/SelectAreaWidget.ui \
+    src/CaptureTask/SelectAreaWidget/ShowAreaWdiget.ui \
+    src/CaptureTask/SelectProgram/SelectRunningProgramDialog.ui \
+    src/CaptureTask/SelectProgram/ShowProgramPictureWidget.ui \
+    src/MainWindow.ui \
+    src/DeviceTest/DeviceSettingDialog.ui \
+    src/Widget/CustomWidget/MyCustomerWidget.ui \
+    src/Widget/mymessagebox_withTitle.ui \
+    src/Widget/ShowRedRectWidget.ui \
+    src/Camera/ShowCameraWidget.ui \
+    src/widget/video/ShowVideoWidget.ui
+
+RESOURCES += \
+    res/resources.qrc
+
+win32:RC_FILE=$$PWD/res/main.rc
+macx:ICON = $$PWD/res/logo.icns

BIN
bin/win32/SDL2.dll


BIN
bin/win32/VideoRecorder.exe


BIN
bin/win32/VideoRecorder.pdb


+ 0 - 0
bin32/avcodec-58.dll → bin/win32/avcodec-58.dll


+ 0 - 0
bin32/avdevice-58.dll → bin/win32/avdevice-58.dll


+ 0 - 0
bin32/avfilter-7.dll → bin/win32/avfilter-7.dll


+ 0 - 0
bin32/avformat-58.dll → bin/win32/avformat-58.dll


+ 0 - 0
bin32/avutil-56.dll → bin/win32/avutil-56.dll


+ 0 - 0
bin32/ffmpeg.exe → bin/win32/ffmpeg.exe


+ 0 - 0
bin32/ffplay.exe → bin/win32/ffplay.exe


+ 0 - 0
bin32/ffprobe.exe → bin/win32/ffprobe.exe


BIN
bin/win32/libcrypto-1_1.dll


BIN
bin/win32/libssl-1_1.dll


BIN
bin/win32/libyuv.dll


+ 0 - 0
bin32/postproc-55.dll → bin/win32/postproc-55.dll


+ 0 - 0
bin32/swresample-3.dll → bin/win32/swresample-3.dll


+ 0 - 0
bin32/swscale-5.dll → bin/win32/swscale-5.dll


BIN
bin/win64/ScreenSender


BIN
bin/win64/ScreenSender.exe


BIN
bin/win64/VideoEncode


+ 0 - 0
bin64/avcodec-58.dll → bin/win64/avcodec-58.dll


+ 0 - 0
bin64/avdevice-58.dll → bin/win64/avdevice-58.dll


+ 0 - 0
bin64/avfilter-7.dll → bin/win64/avfilter-7.dll


+ 0 - 0
bin64/avformat-58.dll → bin/win64/avformat-58.dll


+ 0 - 0
bin64/avutil-56.dll → bin/win64/avutil-56.dll


+ 0 - 0
bin64/ffmpeg.exe → bin/win64/ffmpeg.exe


+ 0 - 0
bin64/ffplay.exe → bin/win64/ffplay.exe


+ 0 - 0
bin64/ffprobe.exe → bin/win64/ffprobe.exe


+ 0 - 0
bin64/postproc-55.dll → bin/win64/postproc-55.dll


+ 0 - 0
bin64/swresample-3.dll → bin/win64/swresample-3.dll


+ 0 - 0
bin64/swscale-5.dll → bin/win64/swscale-5.dll


BIN
bin/插件/atl100.dll


BIN
bin/插件/audio_sniffer-x64.dll


BIN
bin/插件/audio_sniffer.dll


BIN
bin/插件/msvcp100.dll


BIN
bin/插件/msvcr100.dll


+ 18 - 0
bin/插件/reg.bat

@@ -0,0 +1,18 @@
+@echo off
+rem	Check that the user	properly called	this script.
+
+rem	设置APPDIR环境变量为当前服务器程序运行目录
+call :compute_pwd
+set	APPDIR=%~dp0%
+echo 设置工作目录成功
+
+echo "开始注册 screen-capture-recorder插件"
+
+regsvr32 %APPDIR%screen-capture-recorder.dll
+regsvr32 %APPDIR%audio_sniffer.dll
+
+echo "注册完成"
+
+:compute_pwd
+@FOR /F	"tokens=*" %%i in ('cd') DO	@set PWD=%%~fsi
+@goto :EOF

BIN
bin/插件/screen-capture-recorder-x64.dll


BIN
bin/插件/screen-capture-recorder.dll


+ 18 - 0
bin/插件/unreg.bat

@@ -0,0 +1,18 @@
+@echo off
+rem	Check that the user	properly called	this script.
+
+rem	设置APPDIR环境变量为当前服务器程序运行目录
+call :compute_pwd
+set	APPDIR=%~dp0%
+echo 设置工作目录成功
+
+echo "开始卸载 screen-capture-recorder插件"
+
+regsvr32 /u %APPDIR%screen-capture-recorder.dll
+regsvr32 /u %APPDIR%audio_sniffer.dll
+
+echo "卸载完成"
+
+:compute_pwd
+@FOR /F	"tokens=*" %%i in ('cd') DO	@set PWD=%%~fsi
+@goto :EOF

BIN
bin/插件/vcomp100.dll


BIN
bin/插件/vcredist_x64.exe


BIN
bin/插件/vcredist_x86.exe


BIN
bin32/VideoRecorder.exe


+ 439 - 0
module/DragAbleWidget/DragAbleDialog.cpp

@@ -0,0 +1,439 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "DragAbleDialog.h"
+#include "ui_DragAbleDialog.h"
+
+#include <QDesktopWidget>
+#include <QMouseEvent>
+#include <QTimer>
+#include <QDebug>
+
+#define MARGINS 1 //窗体边框
+//鼠标实现改变窗口大小
+#define PADDING 6
+enum Direction { UP=0, DOWN, LEFT, RIGHT, LEFTTOP, LEFTBOTTOM, RIGHTBOTTOM, RIGHTTOP, NONE };
+
+DragAbleDialog::DragAbleDialog(QWidget *parent) :
+    QDialog(parent),
+    ui(new Ui::DragAbleDialog)
+{
+    ui->setupUi(this);
+
+    ///定时器用于定制检测鼠标位置,防止鼠标快速移入窗口,没有检测到,导致鼠标箭头呈现拖拉的形状
+    mTimer = new QTimer;
+    mTimer->setInterval(1000);
+    connect(mTimer, &QTimer::timeout, this, &DragAbleDialog::slotTimerTimeOut);
+    mTimer->start();
+
+///改变窗体大小相关
+    isMax = false;
+
+//    int w = this->width();
+//    int h = this->height();
+//    QRect screenRect = QApplication::desktop()->screenGeometry();//获取设备屏幕大小
+//    int x = (screenRect.width() - w) / 2;
+//    int y = (screenRect.height() - h) / 2;
+
+    mLocation = this->geometry();
+
+    isLeftPressDown = false;
+    this->dir = NONE;
+    this->setMouseTracking(true);// 追踪鼠标
+    ui->widget_frame->setMouseTracking(true);
+    ui->widget_container->setMouseTracking(true);
+
+    this->setFocusPolicy(Qt::ClickFocus);
+
+    ui->widget_frame->setContentsMargins(MARGINS,MARGINS,MARGINS,MARGINS);
+    showBorderRadius(true);
+
+}
+
+DragAbleDialog::~DragAbleDialog()
+{
+
+}
+
+QWidget *DragAbleDialog::getContainWidget()
+{
+    return ui->widget_container;
+}
+
+void DragAbleDialog::setTitle(QString str)
+{
+//    ui->label_titleName->setText(str);
+    this->setWindowTitle(str);
+}
+
+////////////改变窗体大小相关
+
+void DragAbleDialog::mouseReleaseEvent(QMouseEvent *event)
+{
+    if(event->button() == Qt::LeftButton)
+    {
+//        qDebug()<<__FUNCTION__;
+        isLeftPressDown = false;
+        if(dir != NONE)
+        {
+            this->releaseMouse();
+            this->setCursor(QCursor(Qt::ArrowCursor));
+        }
+    }
+}
+
+void DragAbleDialog::mousePressEvent(QMouseEvent *event)
+{
+//    qDebug()<<__FUNCTION__;
+    if (event->type() == QEvent::MouseButtonDblClick)
+    {
+        if (event->button() == Qt::LeftButton)
+        {
+//            if(QApplication::keyboardModifiers() == (Qt::ControlModifier|Qt::ShiftModifier|Qt::AltModifier))
+//            {
+//                doChangeFullScreen(); //ctrl + 左键
+//                doChangeMaxSize();
+//            }
+        }
+    }
+
+    switch(event->button())
+    {
+    case Qt::LeftButton:
+        if (isMax || this->isFullScreen()) break;
+        isLeftPressDown = true;
+        checkCursorDirect(event->globalPos());
+
+        if(dir != NONE)
+        {
+            this->mouseGrabber();
+            mIsResizeMode = true;
+        }
+        else
+        {
+            dragPosition  = event->globalPos() - this->frameGeometry().topLeft();
+            mIsResizeMode = false;
+        }
+        break;
+//    case Qt::RightButton:
+//        if (!this->isFullScreen())
+//            mAction_FullScreen->setText(tr("show fullscreen"));
+//        else
+//            mAction_FullScreen->setText(tr("quit fullscreen"));
+//        mPopMenu->exec(QCursor::pos());
+//        break;
+    default:
+        QWidget::mousePressEvent(event);
+    }
+
+}
+
+void DragAbleDialog::mouseMoveEvent(QMouseEvent *event)
+{
+//    qDebug()<<__FUNCTION__<<isLeftPressDown;
+
+    QPoint gloPoint = event->globalPos();
+    QRect rect = this->rect();
+    QPoint tl = mapToGlobal(rect.topLeft());
+    QPoint rb = mapToGlobal(rect.bottomRight());
+
+    if (isMax || this->isFullScreen()) return;
+    if (!isLeftPressDown)
+    {
+        checkCursorDirect(gloPoint);
+        return;
+    }
+
+//    if(!isLeftPressDown)
+//    {
+//        checkCursorDirect(gloPoint);
+//    }
+//    else
+    {
+
+//        if(dir != NONE)
+        if (mIsResizeMode)
+        {
+            QRect rMove(tl, rb);
+
+            switch(dir) {
+            case LEFT:
+                if(rb.x() - gloPoint.x() <= this->minimumWidth())
+                    rMove.setX(tl.x());
+                else
+                    rMove.setX(gloPoint.x());
+                break;
+            case RIGHT:
+                rMove.setWidth(gloPoint.x() - tl.x());
+                break;
+            case UP:
+                if(rb.y() - gloPoint.y() <= this->minimumHeight())
+                    rMove.setY(tl.y());
+                else
+                    rMove.setY(gloPoint.y());
+                break;
+            case DOWN:
+                rMove.setHeight(gloPoint.y() - tl.y());
+                break;
+            case LEFTTOP:
+                if(rb.x() - gloPoint.x() <= this->minimumWidth())
+                    rMove.setX(tl.x());
+                else
+                    rMove.setX(gloPoint.x());
+                if(rb.y() - gloPoint.y() <= this->minimumHeight())
+                    rMove.setY(tl.y());
+                else
+                    rMove.setY(gloPoint.y());
+                break;
+            case RIGHTTOP:
+                rMove.setWidth(gloPoint.x() - tl.x());
+                rMove.setY(gloPoint.y());
+                break;
+            case LEFTBOTTOM:
+                rMove.setX(gloPoint.x());
+                rMove.setHeight(gloPoint.y() - tl.y());
+                break;
+            case RIGHTBOTTOM:
+                rMove.setWidth(gloPoint.x() - tl.x());
+                rMove.setHeight(gloPoint.y() - tl.y());
+                break;
+            default:
+                break;
+            }
+            this->setGeometry(rMove);
+//            emit sig_WindowMoved(rMove);
+        } else {
+            checkCursorDirect(event->globalPos());
+
+            if (dir == NONE && !isMax)
+            {
+                QPoint point = event->globalPos() - dragPosition;
+
+                QRect mLimitRect = QApplication::desktop()->availableGeometry();
+
+                if (point.x() < mLimitRect.x())
+                    point.setX(mLimitRect.x());
+
+                if (point.x() > (mLimitRect.x()+mLimitRect.width()-this->width()))
+                    point.setX(mLimitRect.x()+mLimitRect.width()-this->width());
+
+
+                if (point.y() < mLimitRect.y())
+                    point.setY(mLimitRect.y());
+
+                if (point.y() > (mLimitRect.y()+mLimitRect.height()-this->height()))
+                    point.setY(mLimitRect.y()+mLimitRect.height()-this->height());
+
+                move(point);
+            }
+
+            event->accept();
+        }
+    }
+//    QWidget::mouseMoveEvent(event);、
+    event->accept();
+}
+
+void DragAbleDialog::checkCursorDirect(const QPoint &cursorGlobalPoint)
+{
+    // 获取窗体在屏幕上的位置区域,tl为topleft点,rb为rightbottom点
+    QRect rect = this->rect();
+    QPoint tl = mapToGlobal(rect.topLeft());
+    QPoint rb = mapToGlobal(rect.bottomRight());
+
+    int x = cursorGlobalPoint.x();
+    int y = cursorGlobalPoint.y();
+
+    if(tl.x() + PADDING >= x && tl.x() <= x && tl.y() + PADDING >= y && tl.y() <= y) {
+        // 左上角
+        dir = LEFTTOP;
+        this->setCursor(QCursor(Qt::SizeFDiagCursor));  // 设置鼠标形状
+    } else if(x >= rb.x() - PADDING && x <= rb.x() && y >= rb.y() - PADDING && y <= rb.y()) {
+        // 右下角
+        dir = RIGHTBOTTOM;
+        this->setCursor(QCursor(Qt::SizeFDiagCursor));
+    } else if(x <= tl.x() + PADDING && x >= tl.x() && y >= rb.y() - PADDING && y <= rb.y()) {
+        //左下角
+        dir = LEFTBOTTOM;
+        this->setCursor(QCursor(Qt::SizeBDiagCursor));
+    } else if(x <= rb.x() && x >= rb.x() - PADDING && y >= tl.y() && y <= tl.y() + PADDING) {
+        // 右上角
+        dir = RIGHTTOP;
+        this->setCursor(QCursor(Qt::SizeBDiagCursor));
+    } else if(x <= tl.x() + PADDING && x >= tl.x()) {
+        // 左边
+        dir = LEFT;
+        this->setCursor(QCursor(Qt::SizeHorCursor));
+    } else if( x <= rb.x() && x >= rb.x() - PADDING) {
+        // 右边
+        dir = RIGHT;
+        this->setCursor(QCursor(Qt::SizeHorCursor));
+    }else if(y >= tl.y() && y <= tl.y() + PADDING){
+        // 上边
+        dir = UP;
+        this->setCursor(QCursor(Qt::SizeVerCursor));
+    } else if(y <= rb.y() && y >= rb.y() - PADDING) {
+        // 下边
+        dir = DOWN;
+        this->setCursor(QCursor(Qt::SizeVerCursor));
+    }else {
+        // 默认
+        dir = NONE;
+        this->setCursor(QCursor(Qt::ArrowCursor));
+    }
+}
+
+void DragAbleDialog::doShowMaxSize()
+{
+    this->show();
+    this->showMaximized();
+    this->raise();
+    ui->widget_frame->setContentsMargins(0,0,0,0); //隐藏边框
+
+    showBorderRadius(false);
+}
+
+void DragAbleDialog::doShowFullScreen()
+{
+    this->show();
+    this->showFullScreen();
+    this->raise();
+    ui->widget_frame->setContentsMargins(0,0,0,0); //隐藏边框
+
+    showBorderRadius(false);
+
+}
+
+void DragAbleDialog::doShowNormal()
+{
+    qDebug()<<__FUNCTION__;
+
+    this->show();
+    this->showNormal();
+    this->raise();
+
+    if (!isMax)
+    {
+        ui->widget_frame->setContentsMargins(MARGINS,MARGINS,MARGINS,MARGINS);
+        showBorderRadius(true);
+    } else {
+        ui->widget_frame->setContentsMargins(0,0,0,0);
+        showBorderRadius(false);
+    }
+}
+
+
+void DragAbleDialog::showBorderRadius(bool isShow)
+{
+    QString str;
+
+    if (isShow)
+    {
+        str = QString("QWidget#widget_frame\
+                        {\
+                            border:3px solid  rgb(46, 165, 255);\
+                            background-color: rgba(255, 255, 255, 0);\
+                            border-radius:5px;\
+                        }\
+                        QWidget#widget_back\
+                        {\
+                        border-radius:3px;\
+                        }\
+                        QWidget#widget_title\
+                        {\
+                            border-top-right-radius:5px;\
+                            border-top-left-radius:5px;\
+                        }\
+                        QWidget#widget_container\
+                        {\
+                            border-bottom-right-radius:5px;\
+                            border-bottom-left-radius:5px;\
+                        }\
+                        QStackedWidget\
+                        {\
+                            border-bottom-right-radius:5px;\
+                            border-bottom-left-radius:5px;\
+                        }\
+                        QWidget#page_courseList\
+                        {\
+                            border-bottom-right-radius:5px;\
+                            border-bottom-left-radius:5px;\
+                        }");
+    }
+    else
+    {
+        str = QString("QWidget#widget_frame\
+                        {\
+                            border:3px solid  rgb(46, 165, 255);\
+                            background-color: rgba(255, 255, 255, 0);\
+                            border-radius:0px;\
+                        }\
+                        QWidget#widget_back\
+                        {\
+                        border-radius:0px;\
+                        }\
+                        QWidget#widget_title\
+                        {\
+                            border-top-right-radius:0px;\
+                            border-top-left-radius:0px;\
+                        }\
+                        QWidget#widget_container\
+                        {\
+                            border-bottom-right-radius:0px;\
+                            border-bottom-left-radius:0px;\
+                        }\
+                        QStackedWidget\
+                        {\
+                            border-bottom-right-radius:0px;\
+                            border-bottom-left-radius:0px;\
+                        }\
+                        QWidget#page_courseList\
+                        {\
+                            border-bottom-right-radius:0px;\
+                            border-bottom-left-radius:0px;\
+                        }");
+    }
+
+    ui->widget_frame->setStyleSheet(str);
+
+}
+
+void DragAbleDialog::doChangeFullScreen()
+{
+    if (this->isFullScreen())
+    {
+//        this->doShowNormal();
+//        mAction_FullScreen->setText(tr("show fullscreen"));
+    }
+    else
+    {
+        this->doShowFullScreen();
+//        mAction_FullScreen->setText(tr("quit fullscreen"));
+    }
+}
+
+void DragAbleDialog::doChangeMaxSize()
+{
+    if (this->isMaximized())
+    {
+//        this->doShowNormal();
+//        mAction_FullScreen->setText(tr("show fullscreen"));
+    }
+    else
+    {
+        this->doShowMaxSize();
+//        mAction_FullScreen->setText(tr("quit fullscreen"));
+    }
+}
+
+void DragAbleDialog::slotTimerTimeOut()
+{
+    if (QObject::sender() == mTimer)
+    {
+        if (!isLeftPressDown)
+            checkCursorDirect(QCursor::pos());
+    }
+}

+ 66 - 0
module/DragAbleWidget/DragAbleDialog.h

@@ -0,0 +1,66 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef DRAGABLEDIALOG_H
+#define DRAGABLEDIALOG_H
+
+#include <QWidget>
+#include <QTimer>
+#include <QDialog>
+
+namespace Ui {
+class DragAbleDialog;
+}
+
+class DragAbleDialog : public QDialog
+{
+    Q_OBJECT
+
+public:
+    explicit DragAbleDialog(QWidget *parent = 0);
+    ~DragAbleDialog();
+
+    QWidget *getContainWidget();
+
+    void setTitle(QString str);
+
+    void doShowMaxSize();
+    void doShowFullScreen();
+    void doShowNormal();
+
+private:
+    Ui::DragAbleDialog *ui;
+
+    QTimer *mTimer;
+
+    ///以下是改变窗体大小相关
+    ////////
+protected:
+    void mouseReleaseEvent(QMouseEvent *event);
+    void mouseMoveEvent(QMouseEvent *event);
+    void mousePressEvent(QMouseEvent *event);
+
+private:
+    bool isMax; //是否最大化
+    QRect mLocation;
+
+    bool mIsResizeMode;
+    bool isLeftPressDown;  // 判断左键是否按下
+    QPoint dragPosition;   // 窗口移动拖动时需要记住的点
+    int dir;        // 窗口大小改变时,记录改变方向
+
+    void checkCursorDirect(const QPoint &cursorGlobalPoint);
+
+    void showBorderRadius(bool isShow);
+    void doChangeFullScreen();
+    void doChangeMaxSize();
+
+private slots:
+    void slotTimerTimeOut();
+
+};
+
+#endif // DRAGABLEDIALOG_H

+ 113 - 0
module/DragAbleWidget/DragAbleDialog.ui

@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>DragAbleDialog</class>
+ <widget class="QDialog" name="DragAbleDialog">
+  <property name="geometry">
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>1024</width>
+    <height>680</height>
+   </rect>
+  </property>
+  <property name="windowTitle">
+   <string>Form</string>
+  </property>
+  <layout class="QVBoxLayout" name="verticalLayout">
+   <property name="spacing">
+    <number>0</number>
+   </property>
+   <property name="leftMargin">
+    <number>0</number>
+   </property>
+   <property name="topMargin">
+    <number>0</number>
+   </property>
+   <property name="rightMargin">
+    <number>0</number>
+   </property>
+   <property name="bottomMargin">
+    <number>0</number>
+   </property>
+   <item>
+    <widget class="QWidget" name="widget_frame" native="true">
+     <property name="styleSheet">
+      <string notr="true">QWidget#widget_frame
+{
+	border:3px solid  rgb(46, 165, 255);
+	background-color: rgba(255, 255, 255, 0);
+	border-radius:0px;
+}
+
+QWidget#widget_back
+{
+border-radius:0px;
+}
+
+QWidget#widget_title
+{
+	border-top-right-radius:1px;
+	border-top-left-radius:1px;
+}
+
+QWidget#widget_container
+{
+	border-bottom-right-radius:1px;
+	border-bottom-left-radius:1px;
+}
+
+QStackedWidget
+{
+	border-bottom-right-radius:1px;
+	border-bottom-left-radius:1px;
+}
+
+QWidget#page_courseList
+{
+	border-bottom-right-radius:1px;
+	border-bottom-left-radius:1px;
+}
+
+	</string>
+     </property>
+     <layout class="QVBoxLayout" name="verticalLayout_2">
+      <property name="spacing">
+       <number>0</number>
+      </property>
+      <property name="leftMargin">
+       <number>0</number>
+      </property>
+      <property name="topMargin">
+       <number>0</number>
+      </property>
+      <property name="rightMargin">
+       <number>0</number>
+      </property>
+      <property name="bottomMargin">
+       <number>0</number>
+      </property>
+      <item>
+       <widget class="QWidget" name="widget_container" native="true">
+        <property name="sizePolicy">
+         <sizepolicy hsizetype="Preferred" vsizetype="Expanding">
+          <horstretch>0</horstretch>
+          <verstretch>0</verstretch>
+         </sizepolicy>
+        </property>
+        <property name="styleSheet">
+         <string notr="true">QWidget#widget_container
+{
+	background-color: rgb(22, 22, 22);
+}
+</string>
+        </property>
+       </widget>
+      </item>
+     </layout>
+    </widget>
+   </item>
+  </layout>
+ </widget>
+ <resources/>
+ <connections/>
+</ui>

+ 439 - 0
module/DragAbleWidget/DragAbleWidget.cpp

@@ -0,0 +1,439 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "DragAbleWidget.h"
+#include "ui_DragAbleWidget.h"
+
+#include <QDesktopWidget>
+#include <QMouseEvent>
+#include <QTimer>
+#include <QDebug>
+
+#define MARGINS 1 //窗体边框
+//鼠标实现改变窗口大小
+#define PADDING 6
+enum Direction { UP=0, DOWN, LEFT, RIGHT, LEFTTOP, LEFTBOTTOM, RIGHTBOTTOM, RIGHTTOP, NONE };
+
+DragAbleWidget::DragAbleWidget(QWidget *parent) :
+    QWidget(parent),
+    ui(new Ui::DragAbleWidget)
+{
+    ui->setupUi(this);
+
+//    ///定时器用于定制检测鼠标位置,防止鼠标快速移入窗口,没有检测到,导致鼠标箭头呈现拖拉的形状
+//    mTimer = new QTimer;
+//    mTimer->setInterval(1000);
+//    connect(mTimer, &QTimer::timeout, this, &DragAbleWidget::slotTimerTimeOut);
+//    mTimer->start();
+
+///改变窗体大小相关
+    isMax = false;
+
+//    int w = this->width();
+//    int h = this->height();
+//    QRect screenRect = QApplication::desktop()->screenGeometry();//获取设备屏幕大小
+//    int x = (screenRect.width() - w) / 2;
+//    int y = (screenRect.height() - h) / 2;
+
+    mLocation = this->geometry();
+
+    isLeftPressDown = false;
+    this->dir = NONE;
+    this->setMouseTracking(true);// 追踪鼠标
+    ui->widget_frame->setMouseTracking(true);
+    ui->widget_container->setMouseTracking(true);
+
+    this->setFocusPolicy(Qt::ClickFocus);
+
+    ui->widget_frame->setContentsMargins(MARGINS,MARGINS,MARGINS,MARGINS);
+    showBorderRadius(true);
+
+}
+
+DragAbleWidget::~DragAbleWidget()
+{
+
+}
+
+QWidget *DragAbleWidget::getContainWidget()
+{
+    return ui->widget_container;
+}
+
+void DragAbleWidget::setTitle(QString str)
+{
+//    ui->label_titleName->setText(str);
+    this->setWindowTitle(str);
+}
+
+////////////改变窗体大小相关
+
+void DragAbleWidget::mouseReleaseEvent(QMouseEvent *event)
+{
+    if(event->button() == Qt::LeftButton)
+    {
+//        qDebug()<<__FUNCTION__;
+        isLeftPressDown = false;
+        if(dir != NONE)
+        {
+            this->releaseMouse();
+            this->setCursor(QCursor(Qt::ArrowCursor));
+        }
+    }
+}
+
+void DragAbleWidget::mousePressEvent(QMouseEvent *event)
+{
+//    qDebug()<<__FUNCTION__;
+    if (event->type() == QEvent::MouseButtonDblClick)
+    {
+        if (event->button() == Qt::LeftButton)
+        {
+//            if(QApplication::keyboardModifiers() == (Qt::ControlModifier|Qt::ShiftModifier|Qt::AltModifier))
+//            {
+//                doChangeFullScreen(); //ctrl + 左键
+//                doChangeMaxSize();
+//            }
+        }
+    }
+
+    switch(event->button())
+    {
+    case Qt::LeftButton:
+        if (isMax || this->isFullScreen()) break;
+        isLeftPressDown = true;
+        checkCursorDirect(event->globalPos());
+
+        if(dir != NONE)
+        {
+            this->mouseGrabber();
+            mIsResizeMode = true;
+        }
+        else
+        {
+            dragPosition  = event->globalPos() - this->frameGeometry().topLeft();
+            mIsResizeMode = false;
+        }
+        break;
+//    case Qt::RightButton:
+//        if (!this->isFullScreen())
+//            mAction_FullScreen->setText(tr("show fullscreen"));
+//        else
+//            mAction_FullScreen->setText(tr("quit fullscreen"));
+//        mPopMenu->exec(QCursor::pos());
+//        break;
+    default:
+        QWidget::mousePressEvent(event);
+    }
+
+}
+
+void DragAbleWidget::mouseMoveEvent(QMouseEvent *event)
+{
+//    qDebug()<<__FUNCTION__<<isLeftPressDown;
+
+    QPoint gloPoint = event->globalPos();
+    QRect rect = this->rect();
+    QPoint tl = mapToGlobal(rect.topLeft());
+    QPoint rb = mapToGlobal(rect.bottomRight());
+
+    if (isMax || this->isFullScreen()) return;
+    if (!isLeftPressDown)
+    {
+        checkCursorDirect(gloPoint);
+        return;
+    }
+
+//    if(!isLeftPressDown)
+//    {
+//        checkCursorDirect(gloPoint);
+//    }
+//    else
+    {
+
+//        if(dir != NONE)
+        if (mIsResizeMode)
+        {
+            QRect rMove(tl, rb);
+
+            switch(dir) {
+            case LEFT:
+                if(rb.x() - gloPoint.x() <= this->minimumWidth())
+                    rMove.setX(tl.x());
+                else
+                    rMove.setX(gloPoint.x());
+                break;
+            case RIGHT:
+                rMove.setWidth(gloPoint.x() - tl.x());
+                break;
+            case UP:
+                if(rb.y() - gloPoint.y() <= this->minimumHeight())
+                    rMove.setY(tl.y());
+                else
+                    rMove.setY(gloPoint.y());
+                break;
+            case DOWN:
+                rMove.setHeight(gloPoint.y() - tl.y());
+                break;
+            case LEFTTOP:
+                if(rb.x() - gloPoint.x() <= this->minimumWidth())
+                    rMove.setX(tl.x());
+                else
+                    rMove.setX(gloPoint.x());
+                if(rb.y() - gloPoint.y() <= this->minimumHeight())
+                    rMove.setY(tl.y());
+                else
+                    rMove.setY(gloPoint.y());
+                break;
+            case RIGHTTOP:
+                rMove.setWidth(gloPoint.x() - tl.x());
+                rMove.setY(gloPoint.y());
+                break;
+            case LEFTBOTTOM:
+                rMove.setX(gloPoint.x());
+                rMove.setHeight(gloPoint.y() - tl.y());
+                break;
+            case RIGHTBOTTOM:
+                rMove.setWidth(gloPoint.x() - tl.x());
+                rMove.setHeight(gloPoint.y() - tl.y());
+                break;
+            default:
+                break;
+            }
+            this->setGeometry(rMove);
+//            emit sig_WindowMoved(rMove);
+        } else {
+            checkCursorDirect(event->globalPos());
+
+            if (dir == NONE && !isMax)
+            {
+                QPoint point = event->globalPos() - dragPosition;
+
+                QRect mLimitRect = QApplication::desktop()->availableGeometry();
+
+                if (point.x() < mLimitRect.x())
+                    point.setX(mLimitRect.x());
+
+                if (point.x() > (mLimitRect.x()+mLimitRect.width()-this->width()))
+                    point.setX(mLimitRect.x()+mLimitRect.width()-this->width());
+
+
+                if (point.y() < mLimitRect.y())
+                    point.setY(mLimitRect.y());
+
+                if (point.y() > (mLimitRect.y()+mLimitRect.height()-this->height()))
+                    point.setY(mLimitRect.y()+mLimitRect.height()-this->height());
+
+                move(point);
+            }
+
+            event->accept();
+        }
+    }
+//    QWidget::mouseMoveEvent(event);、
+    event->accept();
+}
+
+void DragAbleWidget::checkCursorDirect(const QPoint &cursorGlobalPoint)
+{
+    // 获取窗体在屏幕上的位置区域,tl为topleft点,rb为rightbottom点
+    QRect rect = this->rect();
+    QPoint tl = mapToGlobal(rect.topLeft());
+    QPoint rb = mapToGlobal(rect.bottomRight());
+
+    int x = cursorGlobalPoint.x();
+    int y = cursorGlobalPoint.y();
+
+    if(tl.x() + PADDING >= x && tl.x() <= x && tl.y() + PADDING >= y && tl.y() <= y) {
+        // 左上角
+        dir = LEFTTOP;
+        this->setCursor(QCursor(Qt::SizeFDiagCursor));  // 设置鼠标形状
+    } else if(x >= rb.x() - PADDING && x <= rb.x() && y >= rb.y() - PADDING && y <= rb.y()) {
+        // 右下角
+        dir = RIGHTBOTTOM;
+        this->setCursor(QCursor(Qt::SizeFDiagCursor));
+    } else if(x <= tl.x() + PADDING && x >= tl.x() && y >= rb.y() - PADDING && y <= rb.y()) {
+        //左下角
+        dir = LEFTBOTTOM;
+        this->setCursor(QCursor(Qt::SizeBDiagCursor));
+    } else if(x <= rb.x() && x >= rb.x() - PADDING && y >= tl.y() && y <= tl.y() + PADDING) {
+        // 右上角
+        dir = RIGHTTOP;
+        this->setCursor(QCursor(Qt::SizeBDiagCursor));
+    } else if(x <= tl.x() + PADDING && x >= tl.x()) {
+        // 左边
+        dir = LEFT;
+        this->setCursor(QCursor(Qt::SizeHorCursor));
+    } else if( x <= rb.x() && x >= rb.x() - PADDING) {
+        // 右边
+        dir = RIGHT;
+        this->setCursor(QCursor(Qt::SizeHorCursor));
+    }else if(y >= tl.y() && y <= tl.y() + PADDING){
+        // 上边
+        dir = UP;
+        this->setCursor(QCursor(Qt::SizeVerCursor));
+    } else if(y <= rb.y() && y >= rb.y() - PADDING) {
+        // 下边
+        dir = DOWN;
+        this->setCursor(QCursor(Qt::SizeVerCursor));
+    }else {
+        // 默认
+        dir = NONE;
+        this->setCursor(QCursor(Qt::ArrowCursor));
+    }
+}
+
+void DragAbleWidget::doShowMaxSize()
+{
+    this->show();
+    this->showMaximized();
+    this->raise();
+    ui->widget_frame->setContentsMargins(0,0,0,0); //隐藏边框
+
+    showBorderRadius(false);
+}
+
+void DragAbleWidget::doShowFullScreen()
+{
+    this->show();
+    this->showFullScreen();
+    this->raise();
+    ui->widget_frame->setContentsMargins(0,0,0,0); //隐藏边框
+
+    showBorderRadius(false);
+
+}
+
+void DragAbleWidget::doShowNormal()
+{
+    qDebug()<<__FUNCTION__;
+
+    this->show();
+    this->showNormal();
+    this->raise();
+
+    if (!isMax)
+    {
+        ui->widget_frame->setContentsMargins(MARGINS,MARGINS,MARGINS,MARGINS);
+        showBorderRadius(true);
+    } else {
+        ui->widget_frame->setContentsMargins(0,0,0,0);
+        showBorderRadius(false);
+    }
+}
+
+
+void DragAbleWidget::showBorderRadius(bool isShow)
+{
+    QString str;
+
+    if (isShow)
+    {
+        str = QString("QWidget#widget_frame\
+                        {\
+                            border:3px solid  rgb(46, 165, 255);\
+                            background-color: rgba(255, 255, 255, 0);\
+                            border-radius:5px;\
+                        }\
+                        QWidget#widget_back\
+                        {\
+                        border-radius:3px;\
+                        }\
+                        QWidget#widget_title\
+                        {\
+                            border-top-right-radius:5px;\
+                            border-top-left-radius:5px;\
+                        }\
+                        QWidget#widget_container\
+                        {\
+                            border-bottom-right-radius:5px;\
+                            border-bottom-left-radius:5px;\
+                        }\
+                        QStackedWidget\
+                        {\
+                            border-bottom-right-radius:5px;\
+                            border-bottom-left-radius:5px;\
+                        }\
+                        QWidget#page_courseList\
+                        {\
+                            border-bottom-right-radius:5px;\
+                            border-bottom-left-radius:5px;\
+                        }");
+    }
+    else
+    {
+        str = QString("QWidget#widget_frame\
+                        {\
+                            border:3px solid  rgb(46, 165, 255);\
+                            background-color: rgba(255, 255, 255, 0);\
+                            border-radius:0px;\
+                        }\
+                        QWidget#widget_back\
+                        {\
+                        border-radius:0px;\
+                        }\
+                        QWidget#widget_title\
+                        {\
+                            border-top-right-radius:0px;\
+                            border-top-left-radius:0px;\
+                        }\
+                        QWidget#widget_container\
+                        {\
+                            border-bottom-right-radius:0px;\
+                            border-bottom-left-radius:0px;\
+                        }\
+                        QStackedWidget\
+                        {\
+                            border-bottom-right-radius:0px;\
+                            border-bottom-left-radius:0px;\
+                        }\
+                        QWidget#page_courseList\
+                        {\
+                            border-bottom-right-radius:0px;\
+                            border-bottom-left-radius:0px;\
+                        }");
+    }
+
+    ui->widget_frame->setStyleSheet(str);
+
+}
+
+void DragAbleWidget::doChangeFullScreen()
+{
+    if (this->isFullScreen())
+    {
+//        this->doShowNormal();
+//        mAction_FullScreen->setText(tr("show fullscreen"));
+    }
+    else
+    {
+        this->doShowFullScreen();
+//        mAction_FullScreen->setText(tr("quit fullscreen"));
+    }
+}
+
+void DragAbleWidget::doChangeMaxSize()
+{
+    if (this->isMaximized())
+    {
+//        this->doShowNormal();
+//        mAction_FullScreen->setText(tr("show fullscreen"));
+    }
+    else
+    {
+        this->doShowMaxSize();
+//        mAction_FullScreen->setText(tr("quit fullscreen"));
+    }
+}
+
+void DragAbleWidget::slotTimerTimeOut()
+{
+    if (QObject::sender() == mTimer)
+    {
+        if (!isLeftPressDown)
+            checkCursorDirect(QCursor::pos());
+    }
+}

+ 67 - 0
module/DragAbleWidget/DragAbleWidget.h

@@ -0,0 +1,67 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef DRAGABLEWIDGET_H
+#define DRAGABLEWIDGET_H
+
+#include <QWidget>
+#include <QTimer>
+#include <QWidget>
+
+namespace Ui {
+class DragAbleWidget;
+}
+
+class DragAbleWidget : public QWidget
+{
+    Q_OBJECT
+
+public:
+    explicit DragAbleWidget(QWidget *parent = 0);
+    ~DragAbleWidget();
+
+    QWidget *getContainWidget();
+
+    void setTitle(QString str);
+
+    void doShowMaxSize();
+    void doShowFullScreen();
+    void doShowNormal();
+
+private:
+    Ui::DragAbleWidget *ui;
+
+    QTimer *mTimer;
+
+    ///以下是改变窗体大小相关
+    ////////
+protected:
+//    bool eventFilter(QObject *obj, QEvent *event);
+    void mouseReleaseEvent(QMouseEvent *event);
+    void mouseMoveEvent(QMouseEvent *event);
+    void mousePressEvent(QMouseEvent *event);
+
+private:
+    bool isMax; //是否最大化
+    QRect mLocation;
+
+    bool mIsResizeMode;
+    bool isLeftPressDown;  // 判断左键是否按下
+    QPoint dragPosition;   // 窗口移动拖动时需要记住的点
+    int dir;        // 窗口大小改变时,记录改变方向
+
+    void checkCursorDirect(const QPoint &cursorGlobalPoint);
+
+    void showBorderRadius(bool isShow);
+    void doChangeFullScreen();
+    void doChangeMaxSize();
+
+private slots:
+    void slotTimerTimeOut();
+
+};
+
+#endif // DRAGABLEWIDGET_H

+ 13 - 0
module/DragAbleWidget/DragAbleWidget.pri

@@ -0,0 +1,13 @@
+SOURCES += \
+    $$PWD/DragAbleDialog.cpp \
+    $$PWD/DragAbleWidget.cpp
+
+HEADERS  += \
+    $$PWD/DragAbleDialog.h \
+    $$PWD/DragAbleWidget.h
+
+FORMS    += \
+    $$PWD/DragAbleDialog.ui \
+    $$PWD/DragAbleWidget.ui
+
+INCLUDEPATH += $$PWD

+ 113 - 0
module/DragAbleWidget/DragAbleWidget.ui

@@ -0,0 +1,113 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ui version="4.0">
+ <class>DragAbleWidget</class>
+ <widget class="QWidget" name="DragAbleWidget">
+  <property name="geometry">
+   <rect>
+    <x>0</x>
+    <y>0</y>
+    <width>1024</width>
+    <height>680</height>
+   </rect>
+  </property>
+  <property name="windowTitle">
+   <string>Form</string>
+  </property>
+  <layout class="QVBoxLayout" name="verticalLayout">
+   <property name="spacing">
+    <number>0</number>
+   </property>
+   <property name="leftMargin">
+    <number>0</number>
+   </property>
+   <property name="topMargin">
+    <number>0</number>
+   </property>
+   <property name="rightMargin">
+    <number>0</number>
+   </property>
+   <property name="bottomMargin">
+    <number>0</number>
+   </property>
+   <item>
+    <widget class="QWidget" name="widget_frame" native="true">
+     <property name="styleSheet">
+      <string notr="true">QWidget#widget_frame
+{
+	border:3px solid  rgb(46, 165, 255);
+	background-color: rgba(255, 255, 255, 0);
+	border-radius:0px;
+}
+
+QWidget#widget_back
+{
+border-radius:0px;
+}
+
+QWidget#widget_title
+{
+	border-top-right-radius:1px;
+	border-top-left-radius:1px;
+}
+
+QWidget#widget_container
+{
+	border-bottom-right-radius:1px;
+	border-bottom-left-radius:1px;
+}
+
+QStackedWidget
+{
+	border-bottom-right-radius:1px;
+	border-bottom-left-radius:1px;
+}
+
+QWidget#page_courseList
+{
+	border-bottom-right-radius:1px;
+	border-bottom-left-radius:1px;
+}
+
+	</string>
+     </property>
+     <layout class="QVBoxLayout" name="verticalLayout_2">
+      <property name="spacing">
+       <number>0</number>
+      </property>
+      <property name="leftMargin">
+       <number>0</number>
+      </property>
+      <property name="topMargin">
+       <number>0</number>
+      </property>
+      <property name="rightMargin">
+       <number>0</number>
+      </property>
+      <property name="bottomMargin">
+       <number>0</number>
+      </property>
+      <item>
+       <widget class="QWidget" name="widget_container" native="true">
+        <property name="sizePolicy">
+         <sizepolicy hsizetype="Preferred" vsizetype="Expanding">
+          <horstretch>0</horstretch>
+          <verstretch>0</verstretch>
+         </sizepolicy>
+        </property>
+        <property name="styleSheet">
+         <string notr="true">QWidget#widget_container
+{
+	background-color: rgb(22, 22, 22);
+}
+</string>
+        </property>
+       </widget>
+      </item>
+     </layout>
+    </widget>
+   </item>
+  </layout>
+ </widget>
+ <resources/>
+ <connections/>
+</ui>

+ 31 - 0
module/ScreenRecorder/ScreenRecorder.pri

@@ -0,0 +1,31 @@
+INCLUDEPATH += $$PWD/src
+INCLUDEPATH += $$PWD/src/Media/Audio
+
+SOURCES += \
+        $$PWD/src/EventHandle/VideoRecorderEventHandle.cpp \
+        $$PWD/src/Media/Image/ImageReader.cpp \
+        $$PWD/src/Media/Video/CaptureWindowThread.cpp \
+        $$PWD/src/Media/Audio/AudioEncoder.cpp \
+        $$PWD/src/Media/Audio/GetAudioThread.cpp \
+        $$PWD/src/Media/MediaManager.cpp \
+        $$PWD/src/Media/Video/GetVideoThread.cpp \
+        $$PWD/src/Media/Video/VideoEncoder.cpp \
+        $$PWD/src/Media/Video/VideoFileWriter.cpp \
+        $$PWD/src/Media/Image/yuv420p.cpp
+
+HEADERS += \
+        $$PWD/src/EventHandle/VideoRecorderEventHandle.h \
+        $$PWD/src/Media/Image/ImageReader.h \
+        $$PWD/src/Media/Video/CaptureWindowThread.h \
+        $$PWD/src/Media/MediaManager.h \
+        $$PWD/src/Media/Video/VideoFileInfoTypes.h \
+        $$PWD/src/Media/Video/VideoFileWriter.h \
+        $$PWD/src/Media/Image/yuv420p.h \
+        $$PWD/src/Media/Audio/AudioEncoder.h \
+        $$PWD/src/Media/Audio/GetAudioThread.h \
+        $$PWD/src/Media/Video/GetVideoThread.h \
+        $$PWD/src/Media/Video/VideoEncoder.h
+
+#### lib ### Begin
+#    include($$PWD/../lib/lib.pri)
+#### lib ### End

+ 12 - 0
module/ScreenRecorder/src/EventHandle/VideoRecorderEventHandle.cpp

@@ -0,0 +1,12 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "VideoRecorderEventHandle.h"
+
+VideoRecorderEventHandle::~VideoRecorderEventHandle()
+{
+
+}

+ 29 - 0
module/ScreenRecorder/src/EventHandle/VideoRecorderEventHandle.h

@@ -0,0 +1,29 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+#ifndef VideoRecorderEventHandle_H
+#define VideoRecorderEventHandle_H
+
+class VideoRecorderEventHandle
+{
+public:
+    virtual ~VideoRecorderEventHandle();
+
+//    /**
+//     * @brief 输出状态
+//     * @param state
+//     */
+//    virtual void OnStateChanged(const CallBackState &state) = 0;
+
+    /**
+     * @brief 输出音频音量
+     * @param volumeL 左声道音量大小:0~100
+     * @param volumeR 右声道音量大小:0~100
+     */
+    virtual void OnAudioVolumeUpdated(const int &volumeL, const int &volumeR) = 0;
+
+};
+
+#endif // VideoRecorderEventHandle_H

+ 189 - 0
module/ScreenRecorder/src/Media/Audio/AudioEncoder.cpp

@@ -0,0 +1,189 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "AudioEncoder.h"
+
+AudioEncoder::AudioEncoder()
+{
+    aCodec = nullptr;
+    aCodecCtx = nullptr;
+
+    aFrame = nullptr;
+
+    mFrameBuffer = nullptr;
+    mFrameBufferSize = 0;
+
+}
+
+bool AudioEncoder::openEncoder()
+{
+    if (aCodec == nullptr)
+    {
+        ///打开音频编码器
+        //find the encoder
+        aCodec = avcodec_find_encoder(AV_CODEC_ID_AAC);
+
+        if (aCodec == nullptr)
+        {
+            fprintf(stderr, "audio Codec not found.\n");
+            return false;
+        }
+        else
+        {
+            aCodecCtx = avcodec_alloc_context3(aCodec);
+        }
+
+        aCodecCtx->codec_type  = AVMEDIA_TYPE_AUDIO;
+        aCodecCtx->sample_fmt  = AV_SAMPLE_FMT_FLTP;
+        aCodecCtx->sample_rate = 44100;
+        aCodecCtx->channels    = 2;
+        aCodecCtx->channel_layout = av_get_default_channel_layout(aCodecCtx->channels);
+
+    //    aCodecCtx->channels       = av_get_channel_layout_nb_channels(aCodecCtx->channel_layout);
+    //    aCodecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
+
+    //    aCodecCtx->profile=FF_PROFILE_AAC_LOW; //(可参考AAC格式简介)
+    //    aCodecCtx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
+
+    //    aCodecCtx->bit_rate = 64000;
+
+        aCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
+        aCodecCtx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
+
+        if(avcodec_open2(aCodecCtx, aCodec, nullptr)<0)
+        {
+            printf("Could not open audio codec.\n");
+            return false;
+        }
+
+        mONEFrameSize = av_samples_get_buffer_size(NULL, aCodecCtx->channels, aCodecCtx->frame_size, aCodecCtx->sample_fmt, 1);
+
+        aFrame           = av_frame_alloc();
+        mFrameBuffer     = (uint8_t *)av_malloc(mONEFrameSize);
+        mFrameBufferSize = mONEFrameSize;
+
+        ///这句话必须要(设置这个frame里面的采样点个数)
+        int oneChannelBufferSize = mONEFrameSize / aCodecCtx->channels; //计算出一个声道的数据
+        int nb_samplesize = oneChannelBufferSize / av_get_bytes_per_sample(aCodecCtx->sample_fmt); //计算出采样点个数
+        aFrame->nb_samples = nb_samplesize;
+
+        ///填充数据  下面这2种方式都可以
+    //    avcodec_fill_audio_frame(ost->frame, aCodecCtx->channels, aCodecCtx->sample_fmt,(const uint8_t*)ost->frameBuffer, mONEFrameSize, 0);
+        av_samples_fill_arrays(aFrame->data, aFrame->linesize, mFrameBuffer, aCodecCtx->channels, aFrame->nb_samples, aCodecCtx->sample_fmt, 0);
+    }
+
+    return true;
+}
+
+void AudioEncoder::closeEncoder()
+{
+    avcodec_close(aCodecCtx);
+    av_free(aCodecCtx);
+    av_free(aFrame);
+
+    aCodec = nullptr;
+    aCodecCtx = nullptr;
+
+    aFrame = nullptr;
+}
+
+/**
+*  Add ADTS header at the beginning of each and every AAC packet.
+*  This is needed as MediaCodec encoder generates a packet of raw
+*  AAC data.
+*
+*  Note the packetLen must count in the ADTS header itself !!! .
+*注意,这里的packetLen参数为raw aac Packet Len + 7; 7 bytes adts header
+**/
+void addADTStoPacket(uint8_t* packet, int packetLen)
+{
+   int profile = 2;  //AAC LC,MediaCodecInfo.CodecProfileLevel.AACObjectLC;
+   int freqIdx = 4;  //32K, 见后面注释avpriv_mpeg4audio_sample_rates中32000对应的数组下标,来自ffmpeg源码
+   int chanCfg = 2;  //见后面注释channel_configuration,Stero双声道立体声
+
+   /*int avpriv_mpeg4audio_sample_rates[] = {
+       96000, 88200, 64000, 48000, 44100, 32000,
+               24000, 22050, 16000, 12000, 11025, 8000, 7350
+   };
+   channel_configuration: 表示声道数chanCfg
+   0: Defined in AOT Specifc Config
+   1: 1 channel: front-center
+   2: 2 channels: front-left, front-right
+   3: 3 channels: front-center, front-left, front-right
+   4: 4 channels: front-center, front-left, front-right, back-center
+   5: 5 channels: front-center, front-left, front-right, back-left, back-right
+   6: 6 channels: front-center, front-left, front-right, back-left, back-right, LFE-channel
+   7: 8 channels: front-center, front-left, front-right, side-left, side-right, back-left, back-right, LFE-channel
+   8-15: Reserved
+   */
+
+   // fill in ADTS data
+   packet[0] = (uint8_t)0xFF;
+   packet[1] = (uint8_t)0xF9;
+   packet[2] = (uint8_t)(((profile-1)<<6) + (freqIdx<<2) +(chanCfg>>2));
+   packet[3] = (uint8_t)(((chanCfg&3)<<6) + (packetLen>>11));
+   packet[4] = (uint8_t)((packetLen&0x7FF) >> 3);
+   packet[5] = (uint8_t)(((packetLen&7)<<5) + 0x1F);
+   packet[6] = (uint8_t)0xFC;
+}
+
+AACFramePtr AudioEncoder::encode(uint8_t *inputbuf, int bufferSize)
+{
+    AACFramePtr framePtr = nullptr;
+
+    AVPacket pkt;
+    av_init_packet(&pkt);
+
+    AVPacket *packet = &pkt;
+
+    memcpy(mFrameBuffer, inputbuf, bufferSize);
+
+    int ret = avcodec_send_frame(aCodecCtx, aFrame);
+    if (ret != 0)
+    {
+        char buff[128]={0};
+        av_strerror(ret, buff, 128);
+
+        fprintf(stderr, "Error sending a frame for encoding! (%s)\n", buff);
+        return false;
+    }
+
+    /* read all the available output packets (in general there may be any
+     * number of them */
+    while (ret >= 0)
+    {
+        ret = avcodec_receive_packet(aCodecCtx, packet);
+
+        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF || ret < 0)
+        {
+            char errstr[AV_ERROR_MAX_STRING_SIZE] = {0};
+            av_make_error_string(errstr, AV_ERROR_MAX_STRING_SIZE, ret);
+            fprintf(stderr, "!!!!!!!!!! Error encoding audio frame: %s ret=%d", errstr, ret);
+
+            return false;
+        }
+
+        uint8_t * aacBuf = (uint8_t *)malloc(packet->size+7);
+        addADTStoPacket(aacBuf, 7+packet->size);
+        memcpy(aacBuf+7, packet->data, packet->size);
+
+#if 0 ///写入aac文件
+        static FILE *aacFp = fopen("out22.aac", "wb");
+        fwrite(aac_buf,1,packet->size+7,aacFp);
+#endif
+
+        framePtr = std::make_shared<AACFrame>();
+        AACFrame *frame = framePtr.get();
+        frame->setFrameBuffer(aacBuf, 7+packet->size);
+
+        free(aacBuf);
+
+        av_packet_unref(packet);
+        break;
+    }
+
+    return framePtr;
+}

+ 45 - 0
module/ScreenRecorder/src/Media/Audio/AudioEncoder.h

@@ -0,0 +1,45 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef AUDIOENCODER_H
+#define AUDIOENCODER_H
+
+#include "AudioFrame/PCMFrame.h"
+#include "AudioFrame/AACFrame.h"
+
+extern "C"
+{
+    #include <libavcodec/avcodec.h>
+    #include <libavdevice/avdevice.h>
+    #include <libavformat/avformat.h>
+    #include <libswresample/swresample.h>
+    #include <libavutil/imgutils.h>
+}
+
+class AudioEncoder
+{
+public:
+    AudioEncoder();
+
+    bool openEncoder();
+    void closeEncoder();
+
+    int getONEFrameSize(){return mONEFrameSize;}
+
+    AACFramePtr encode(uint8_t *inputbuf, int bufferSize);
+
+private:
+    AVCodec         *aCodec;
+    AVCodecContext  *aCodecCtx;
+
+    AVFrame *aFrame;
+    uint8_t *mFrameBuffer;/// 存放pcm数据,用来取出刚好的一帧数据传给编码器编码
+    int mFrameBufferSize;
+
+    int mONEFrameSize;
+};
+
+#endif // AUDIOENCODER_H

+ 586 - 0
module/ScreenRecorder/src/Media/Audio/GetAudioThread.cpp

@@ -0,0 +1,586 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "MoudleConfig.h"
+#include "GetAudioThread.h"
+
+#include <thread>
+#include <QDebug>
+
+#if defined __linux
+#include <xcb/xcb.h>
+#endif
+//#include <QDebug>
+//Show Dshow Device
+static void show_dshow_device()
+{
+    AVFormatContext *pFormatCtx = avformat_alloc_context();
+    AVDictionary* options = nullptr;
+    av_dict_set(&options,"list_devices","true",0);
+    AVInputFormat *iformat = av_find_input_format("dshow");
+    printf("========Device Info=============\n");
+    avformat_open_input(&pFormatCtx,"video=dummy",iformat,&options);
+    printf("================================\n");
+}
+
+GetAudioThread::GetAudioThread()
+{
+    pFormatCtx = nullptr;
+    aCodecCtx = nullptr;
+    aFrame = nullptr;
+
+    mIsThreadRunning = false;
+    mIsStop = true;
+    m_pause = false;
+
+    aFrame = nullptr;
+    aFrame_ReSample = nullptr;
+
+    mCallBackFunc = nullptr;
+    mCallBackFuncParam = nullptr;
+
+    audio_buf_size_L = 0;
+    audio_buf_size_R = 0;
+
+    mONEFrameSize = 0;
+
+    mCond = new Cond();
+//    mIsNeedReOpenWhenReadFailed = true;
+}
+
+GetAudioThread::~GetAudioThread()
+{
+
+}
+
+bool GetAudioThread::openDevice(const std::string &deviceName)
+{
+    mCond->Lock();
+
+    mDeviceName = deviceName;
+    bool isSucceed = init(mDeviceName.c_str());
+
+    if (!isSucceed)
+    {
+        deInit();
+    }
+
+    mCond->Unlock();
+
+    return isSucceed;
+}
+
+bool GetAudioThread::init(const char * const deviceName)
+{
+//    show_dshow_device();
+
+    if (pFormatCtx != nullptr)
+    {
+        return true;
+    }
+
+    AVCodec			*pCodec = nullptr;
+
+    pFormatCtx = avformat_alloc_context();
+
+    if (strcmp(deviceName, "virtual-audio-capturer") == 0)
+    {
+        pFormatCtx->flags |= AVFMT_FLAG_NONBLOCK;
+    }
+
+#if defined(WIN32)
+
+    AVInputFormat *ifmt = av_find_input_format("dshow"); //使用dshow
+
+    char deviceNameStr[512] = {0};
+    sprintf(deviceNameStr, "audio=%s", deviceName);
+
+//    if(avformat_open_input(&pFormatCtx, "audio=virtual-audio-capturer", ifmt, nullptr)!=0)
+    if(avformat_open_input(&pFormatCtx, deviceNameStr, ifmt, nullptr)!=0)
+    {
+        fprintf(stderr, "Couldn't open input stream audio %s.(无法打开输入流)\n", deviceNameStr);
+        return false;
+    }
+
+#elif defined __linux
+//Linux
+    //Linux
+    AVInputFormat *ifmt = av_find_input_format("alsa");
+    if(avformat_open_input(&pFormatCtx, deviceName, ifmt, NULL)!=0)
+    {
+        printf("Couldn't open input stream. default\n");
+        return false;
+    }
+
+#else
+    show_avfoundation_device();
+    //Mac
+    AVInputFormat *ifmt=av_find_input_format("avfoundation");
+    //Avfoundation
+    //[video]:[audio]
+    if(avformat_open_input(&pFormatCtx,"0",ifmt,nullptr)!=0)
+    {
+        fprintf(stderr, "Couldn't open input stream.\n");
+        return VideoOpenFailed;
+    }
+#endif
+
+    audioStream = -1;
+    aCodecCtx   = nullptr;
+
+    for(unsigned int i=0; i < pFormatCtx->nb_streams; i++)
+    {
+        if(pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO)
+        {
+            audioStream = static_cast<int>(i);
+            break;
+        }
+    }
+
+    if(audioStream == -1)
+    {
+        printf("Didn't find a audio stream.(没有找到音频流)\n");
+        return false;
+    }
+
+    //find the decoder
+    aCodecCtx = avcodec_alloc_context3(nullptr);
+    avcodec_parameters_to_context(aCodecCtx, pFormatCtx->streams[audioStream]->codecpar);
+
+    pCodec = avcodec_find_decoder(aCodecCtx->codec_id);
+
+    if(pCodec == nullptr)
+    {
+        printf("audio Codec not found.\n");
+        return false;
+    }
+
+    if(avcodec_open2(aCodecCtx, pCodec, nullptr)<0)
+    {
+        printf("Could not open audio codec.\n");
+        return false;
+    }
+
+    ///解码音频相关
+    aFrame = av_frame_alloc();
+
+    initResample();
+
+    return true;
+}
+
+void GetAudioThread::deInit()
+{
+qDebug()<<__FUNCTION__<<"000";
+    if (swrCtx != nullptr)
+    {
+        swr_free(&swrCtx);
+
+        swrCtx = nullptr;
+    }
+
+    if (aFrame)
+    {
+        av_free(aFrame);
+        aFrame = nullptr;
+    }
+
+    if (aFrame_ReSample)
+    {
+        av_free(aFrame_ReSample);
+        aFrame_ReSample = nullptr;
+    }
+qDebug()<<__FUNCTION__<<"333";
+    if (aCodecCtx)
+        avcodec_close(aCodecCtx);
+qDebug()<<__FUNCTION__<<"444";
+    if (pFormatCtx)
+    {
+//qDebug()<<__FUNCTION__<<pFormatCtx->iformat<<pFormatCtx->filename<<pFormatCtx->url;
+
+//        if (strcmp(pFormatCtx->filename, "virtual-audio-capturer") == 0)
+//        {
+//qDebug()<<__FUNCTION__<<"000";
+//        }
+//        else
+        {
+qDebug()<<__FUNCTION__<<"555";
+            avformat_close_input(&pFormatCtx);
+            avformat_free_context(pFormatCtx);
+        }
+
+        pFormatCtx = nullptr;
+    }
+qDebug()<<__FUNCTION__<<"999";
+}
+
+void GetAudioThread::startRecord(int outOneFrameSize, std::function<void (PCMFramePtr pcmFrame, void *param)> func, void *param)
+{
+    mIsStop = false;
+
+    mONEFrameSize = outOneFrameSize;
+
+    mCallBackFunc = func;
+    mCallBackFuncParam = param;
+
+    //启动新的线程
+    std::thread([&](GetAudioThread *pointer)
+    {
+        pointer->run();
+
+    }, this).detach();
+
+}
+
+void GetAudioThread::pauseRecord()
+{
+    m_pause = true;
+}
+
+void GetAudioThread::restoreRecord()
+{
+    m_getFirst = false;
+    m_pause = false;
+}
+
+void GetAudioThread::stopRecord(bool isBlock)
+{
+qDebug()<<__FUNCTION__<<"111"<<isBlock<<mIsThreadRunning;
+    mIsStop = true;
+
+    if (isBlock)
+    {
+        while(mIsThreadRunning)
+        {
+           MoudleConfig::mSleep(10);
+        }
+    }
+qDebug()<<__FUNCTION__<<"222"<<isBlock<<mIsThreadRunning;
+}
+
+bool GetAudioThread::initResample()
+{
+    //重采样设置选项-----------------------------------------------------------start
+    aFrame_ReSample = nullptr;
+
+    //frame->16bit 44100 PCM 统一音频采样格式与采样率
+    swrCtx = nullptr;
+
+    //输入的声道布局
+    int in_ch_layout;
+
+    /// 由于ffmpeg编码aac需要输入FLTP格式的数据。
+    /// 因此这里将音频重采样成44100 双声道  AV_SAMPLE_FMT_FLTP
+    //重采样设置选项----------------
+    //输入的采样格式
+    in_sample_fmt = aCodecCtx->sample_fmt;
+    //输出的采样格式 32bit PCM
+    out_sample_fmt = AV_SAMPLE_FMT_FLTP;
+    //输入的采样率
+    in_sample_rate = aCodecCtx->sample_rate;
+    //输入的声道布局
+    in_ch_layout = aCodecCtx->channel_layout;
+
+    //输出的采样率
+    out_sample_rate = 44100;
+
+    //输出的声道布局
+    out_ch_layout = AV_CH_LAYOUT_STEREO;
+    audio_tgt_channels = av_get_channel_layout_nb_channels(out_ch_layout);
+
+//        //输出的声道布局
+//        out_ch_layout = av_get_default_channel_layout(audio_tgt_channels); ///AV_CH_LAYOUT_STEREO
+//        out_ch_layout &= ~AV_CH_LAYOUT_STEREO;
+
+    if (in_ch_layout <= 0)
+    {
+        if (aCodecCtx->channels == 2)
+        {
+            in_ch_layout = AV_CH_LAYOUT_STEREO;
+        }
+        else
+        {
+            in_ch_layout = AV_CH_LAYOUT_MONO;
+        }
+    }
+
+    swrCtx = swr_alloc_set_opts(nullptr, out_ch_layout, out_sample_fmt, out_sample_rate,
+                                         in_ch_layout, in_sample_fmt, in_sample_rate, 0, nullptr);
+
+    /** Open the resampler with the specified parameters. */
+    int ret = swr_init(swrCtx);
+    if (ret < 0)
+    {
+        char buff[128]={0};
+        av_strerror(ret, buff, 128);
+
+        fprintf(stderr, "Could not open resample context %s\n", buff);
+        swr_free(&swrCtx);
+        swrCtx = nullptr;
+
+        return false;
+    }
+
+    return true;
+}
+
+void GetAudioThread::run()
+{
+    mIsThreadRunning = true;
+
+    int64_t firstTime = MoudleConfig::getTimeStamp_MilliSecond();
+    m_getFirst = false;
+    int64_t timeIndex = 0;
+
+    mAudioPts = 0.0;
+
+while(!mIsStop)
+{
+    if (!openDevice(mDeviceName))
+    {
+        MoudleConfig::mSleep(1000);
+        continue;
+    }
+
+    mLastReadFailedTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+    while(!mIsStop)
+    {
+        AVPacket packet;
+
+        int ret = av_read_frame(pFormatCtx, &packet);
+
+        if (ret<0)
+        {
+//            char buffer[1024] = {0};
+//            av_strerror(ret, buffer, 1024);
+//            qDebug("av_read_frame = %d %s\n", ret, buffer);
+
+            if (pFormatCtx->flags & AVFMT_FLAG_NONBLOCK)
+            {
+                ///一秒内都没有读取到过数据,则认为真的失败了。
+                if ((MoudleConfig::getTimeStamp_MilliSecond() - mLastReadFailedTime) > 3000)
+                {
+                    qDebug("audio read failed! %s\n", mDeviceName.c_str());
+                    fprintf(stderr, "audio read failed! %s\n", mDeviceName.c_str());
+                    break;
+                }
+
+                MoudleConfig::mSleep(1);
+                continue;
+            }
+            else
+            {
+                fprintf(stderr, "audio read failed! %s\n", mDeviceName.c_str());
+
+                break;
+            }
+        }
+
+        mLastReadFailedTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+        if (m_pause)
+        {
+            av_packet_unref(&packet);
+            MoudleConfig::mSleep(10);
+            continue;
+        }
+//qDebug()<<packet.stream_index<<audioStream<<packet.size;
+        if(packet.stream_index == audioStream)
+        {
+            int64_t time = 0;
+//            if (m_saveVideoFileThread)
+            {
+                if (m_getFirst)
+                {
+                    int64_t secondTime = MoudleConfig::getTimeStamp_MilliSecond();
+                    time = secondTime - firstTime + timeIndex;
+                }
+                else
+                {
+                    firstTime = MoudleConfig::getTimeStamp_MilliSecond();
+                    timeIndex = 0;
+                    m_getFirst = true;
+                }
+            }
+//fprintf(stderr, "read audio frame %s size = %d \n", mDeviceName.c_str(), packet.size);
+            if (int ret = avcodec_send_packet(aCodecCtx, &packet) && ret != 0)
+            {
+               char buffer[1024] = {0};
+               av_strerror(ret, buffer, 1024);
+               fprintf(stderr, "input AVPacket to decoder failed! ret = %d %s\n", ret, buffer);
+            }
+            else
+            {
+                int iii=0;
+            //    while (0 == avcodec_receive_frame(pCodecCtx, pFrame))
+                while(1)
+                {
+                    int ret = avcodec_receive_frame(aCodecCtx, aFrame);
+                    if (ret != 0)
+                    {
+            //            char buffer[1024] = {0};
+            //            av_strerror(ret, buffer, 1024);
+            //            fprintf(stderr, "avcodec_receive_frame = %d %s\n", ret, buffer);
+                        break;
+                    }
+
+                    int dst_nb_samples = av_rescale_rnd(aFrame->nb_samples, out_sample_rate, in_sample_rate, AV_ROUND_UP);
+
+
+                    ///解码一帧后才能获取到采样率等信息,因此将初始化放到这里
+                    if (aFrame_ReSample == nullptr || aFrame_ReSample->nb_samples != dst_nb_samples)
+                    {
+                        if (aFrame_ReSample)
+                        {
+                            av_free(aFrame_ReSample);
+                            aFrame_ReSample = nullptr;
+                        }
+
+                        aFrame_ReSample = av_frame_alloc();
+
+//                        int nb_samples = av_rescale_rnd(swr_get_delay(swrCtx, out_sample_rate) + aFrame->nb_samples, out_sample_rate, in_sample_rate, AV_ROUND_UP);
+//                        av_samples_fill_arrays(aFrame_ReSample->data, aFrame_ReSample->linesize, audio_buf_resample, audio_tgt_channels, aFrame_ReSample->nb_samples, out_sample_fmt, 0);
+
+                        aFrame_ReSample->format = out_sample_fmt;
+                        aFrame_ReSample->channel_layout = out_ch_layout;
+                        aFrame_ReSample->sample_rate = out_sample_rate;
+                        aFrame_ReSample->nb_samples = dst_nb_samples;
+//qDebug()<<__FUNCTION__<<"samples 111:"<<aFrame->nb_samples<<aFrame_ReSample->nb_samples<<dst_nb_samples;
+
+                        ret = av_frame_get_buffer(aFrame_ReSample, 0);
+                        if (ret < 0)
+                        {
+                            fprintf(stderr, "Error allocating an audio buffer\n");
+//                            exit(1);
+                        }
+                    }
+
+                    ///执行重采样
+                    int len2 = swr_convert(swrCtx, aFrame_ReSample->data, aFrame_ReSample->nb_samples, (const uint8_t**)aFrame->data, aFrame->nb_samples);
+
+    ///下面这两种方法计算的大小是一样的
+    #if 0
+                    int resampled_data_size = len2 * audio_tgt_channels * av_get_bytes_per_sample(out_sample_fmt);
+    #else
+                    int resampled_data_size = av_samples_get_buffer_size(NULL, audio_tgt_channels, aFrame_ReSample->nb_samples, out_sample_fmt, 1);
+    #endif
+
+//qDebug()<<__FUNCTION__<<"samples:"<<len2<<aFrame_ReSample->nb_samples<<aFrame->nb_samples<<dst_nb_samples<<resampled_data_size;
+
+                    int OneChannelDataSize = resampled_data_size / audio_tgt_channels;
+
+//fprintf(stderr, "OneChannelDataSize=%d %d %d\n", OneChannelDataSize, mAudioEncoder->getONEFrameSize(), aFrame->nb_samples);
+/// 由于平面模式的pcm存储方式为:LLLLLLLLLLLLLLLLLLLLLRRRRRRRRRRRRRRRRRRRRR,
+/// 因此这里需要将左右声道数据分开存入文件才可以正常播放。
+/// 使用播放器单独播放左右 声道数据测试即可(以单声道 44100 32bit打开播放)。
+//static FILE *fp1 = fopen("out-L.pcm", "wb");
+//fwrite(aFrame_ReSample->data[0], 1, OneChannelDataSize, fp1);
+//if (audio_tgt_channels >= 2)
+//{
+//    static FILE *fp2 = fopen("out-R.pcm", "wb");
+//    fwrite(aFrame_ReSample->data[1], 1, OneChannelDataSize, fp2);
+//}
+                    dealWithAudioFrame(OneChannelDataSize);
+                }
+            }
+        }
+        else
+        {
+            fprintf(stderr, "other %d \n", packet.stream_index);
+        }
+
+        av_packet_unref(&packet);
+
+    }
+
+    fprintf(stderr, "audio record stopping... \n");
+    qDebug("audio record stopping...  %s\n", mDeviceName.c_str());
+
+    m_pause = false;
+
+    deInit();
+
+    fprintf(stderr, "audio record finished! \n");
+    qDebug("audio record finished! %s\n", mDeviceName.c_str());
+}
+
+    mIsThreadRunning = false;
+
+}
+
+void GetAudioThread::dealWithAudioFrame(const int &OneChannelDataSize)
+{
+    ///编码器一帧的采样为1024,而这里一次获取到的不是1024,因此需要放入队列,然后每次从队里取1024次采样交给编码器。
+    ///PS:平面模式的pcm存储方式为:LLLLLLLLLLLLLLLLLLLLLRRRRRRRRRRRRRRRRRRRRR,需要了解这句话的含义。
+
+    memcpy(audio_buf_L + audio_buf_size_L, aFrame_ReSample->data[0], OneChannelDataSize);
+    audio_buf_size_L += OneChannelDataSize;
+
+    if (audio_tgt_channels >= 2)
+    {
+        memcpy(audio_buf_R + audio_buf_size_R, aFrame_ReSample->data[0], OneChannelDataSize);
+        audio_buf_size_R += OneChannelDataSize;
+    }
+
+//    if (audio_tgt_channels >= 2)
+//    {
+//        memcpy(audio_buf_R + audio_buf_size_R, aFrame_ReSample->data[1], OneChannelDataSize);
+//        audio_buf_size_R += OneChannelDataSize;
+//    }
+
+    int index = 0;
+
+    int leftSize  = audio_buf_size_L;
+
+    int ONEChannelAudioSize = mONEFrameSize / audio_tgt_channels;
+
+    ///由于采集到的数据很大,而编码器一次只需要很少的数据。
+    ///因此将采集到的数据分成多次传给编码器。
+    /// 由于平面模式的pcm存储方式为:LLLLLLLLLLLLLLLLLLLLLRRRRRRRRRRRRRRRRRRRRR,因此这里合并完传给编码器就行了
+    while(1)
+    {
+        if (leftSize >= ONEChannelAudioSize)
+        {
+            uint8_t * buffer = (uint8_t *)malloc(ONEChannelAudioSize * audio_tgt_channels);
+            memcpy(buffer, audio_buf_L+index, ONEChannelAudioSize);
+
+            if (audio_tgt_channels >= 2)
+            {
+                memcpy(buffer+ONEChannelAudioSize, audio_buf_R+index, ONEChannelAudioSize);
+            }
+
+            ///一秒44100采样,一帧1024次采样,那么一帧的时间就是: 1000 / (44100 / 1024) = 23.2199580毫秒
+            mAudioPts += 23.2199580;
+
+            int64_t time = mAudioPts;
+
+            PCMFramePtr framePtr = std::make_shared<PCMFrame>();
+            framePtr->setFrameBuffer(buffer, ONEChannelAudioSize * audio_tgt_channels, time);
+
+            free(buffer);
+
+            if (mCallBackFunc != nullptr)
+            {
+                mCallBackFunc(framePtr, mCallBackFuncParam);
+            }
+
+            index    += ONEChannelAudioSize;
+            leftSize -= ONEChannelAudioSize;
+        }
+        else
+        {
+            if (leftSize > 0)
+            {
+                memcpy(audio_buf_L, audio_buf_L+index, leftSize);
+                memcpy(audio_buf_R, audio_buf_R+index, leftSize);
+            }
+            audio_buf_size_L = leftSize;
+            audio_buf_size_R = leftSize;
+            break;
+        }
+    }
+}

+ 101 - 0
module/ScreenRecorder/src/Media/Audio/GetAudioThread.h

@@ -0,0 +1,101 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef GetAudioThread_H
+#define GetAudioThread_H
+
+#include <functional>
+
+extern "C"
+{
+    #include "libavcodec/avcodec.h"
+    #include "libavformat/avformat.h"
+    #include "libswscale/swscale.h"
+    #include "libswresample/swresample.h"
+    #include "libavdevice/avdevice.h"
+    #include "libavutil/imgutils.h"
+    #include <libavutil/time.h>
+}
+
+#include "AudioFrame/PCMFrame.h"
+#include "Mutex/Cond.h"
+
+#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio
+
+/**
+ * @brief The GetVideoThread class  此类主要负责采集屏幕
+ */
+
+class GetAudioThread
+{
+
+public:
+    explicit GetAudioThread();
+    ~GetAudioThread();
+
+//    void setIsNeedReOpenWhenReadFailed(const bool &value){mIsNeedReOpenWhenReadFailed = value;}
+    bool openDevice(const std::string &deviceName);
+
+    void startRecord(int outOneFrameSize, std::function<void (PCMFramePtr pcmFrame, void *param)> func, void *param);
+    void pauseRecord();
+    void restoreRecord();
+    void stopRecord(bool isBlock = true);
+
+protected:
+    void run();
+
+private:
+    Cond *mCond;
+    std::string mDeviceName;
+
+//    bool mIsNeedReOpenWhenReadFailed; //读取失败后,是否需要重新打开设备
+    int64_t mLastReadFailedTime; //记录上一次遗憾平读取失败的时间,因为是非阻塞模式,因此需要通过判断一定时间内没有读取过数据才可以认为真的读取失败了
+    int mONEFrameSize; //输出的一帧数据大小
+    float mAudioPts;
+
+    std::function<void (PCMFramePtr pcmFrame, void *param)> mCallBackFunc; //回调函数
+    void* mCallBackFuncParam;
+
+    AVFormatContext	*pFormatCtx;
+    int             audioStream;
+    AVCodecContext	*aCodecCtx;
+
+    AVFrame	*aFrame;
+
+    bool mIsThreadRunning;
+    bool mIsStop;
+    bool m_pause;
+    bool m_getFirst; //是否获取到了时间基准
+
+    ///以下变量用于音频重采样
+    /// 由于ffmpeg编码aac需要输入FLTP格式的数据。
+    /// 因此这里将音频重采样成44100 双声道  AV_SAMPLE_FMT_FLTP
+    AVFrame *aFrame_ReSample;
+    SwrContext *swrCtx;
+
+    enum AVSampleFormat in_sample_fmt; //输入的采样格式
+    enum AVSampleFormat out_sample_fmt;//输出的采样格式 16bit PCM
+    int in_sample_rate;//输入的采样率
+    int out_sample_rate;//输出的采样率
+    int audio_tgt_channels; ///av_get_channel_layout_nb_channels(out_ch_layout);
+    int out_ch_layout;
+
+    ///用于存储读取到的音频数据
+    /// 由于平面模式的pcm存储方式为:LLLLLLLLLLLLLLLLLLLLLRRRRRRRRRRRRRRRRRRRRR,因此这里需要将左右声道数据分开存放
+    DECLARE_ALIGNED(16, uint8_t, audio_buf_L) [AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
+    unsigned int audio_buf_size_L;
+    DECLARE_ALIGNED(16, uint8_t, audio_buf_R) [AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
+    unsigned int audio_buf_size_R;
+
+    bool initResample();
+    void dealWithAudioFrame(const int &OneChannelDataSize);
+
+    bool init(const char * const deviceName);
+    void deInit();
+
+};
+
+#endif // GetVideoThread_H

+ 649 - 0
module/ScreenRecorder/src/Media/Image/ImageReader.cpp

@@ -0,0 +1,649 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "ImageReader.h"
+
+extern "C"
+{
+#include <libavformat/avformat.h>
+#include <libswscale/swscale.h>
+#include <libavdevice/avdevice.h>
+#include <libavutil/imgutils.h>
+}
+
+ImageReader::ImageReader()
+{
+
+}
+
+// 读图象文件,支持BMP、JPEG、PNG文件
+// 输入参数:
+//        pFileName ---- 图象文件名
+//        nBufSize ---- 接收图象数据的缓冲区大小,当接收缓冲区大小为0或者接收缓冲区指针为空时,只输出图象的宽度
+// 输出参数:
+//        pOutDataBuf ---- 接收图象数据,必须分配足够大的缓冲区并且将分配的缓冲区大小通过nBufSize参数传入
+//        pnWidth ---- 接收图象宽度
+//        pnHeight ---- 接收图象高度
+// 返回值:
+//        0 ---- 成功
+//        -1 ---- 参数错误
+//        -2 ---- 打开文件失败
+//        其它 ---- 图象解析失败
+// 备注:不知道图象分辨率因为不确认应分配的缓冲区大小时,可以先填入空缓冲区指针检测图象分辨率,再分配合适的缓冲区重新调用获得图象数据
+int ImageReader::ReadRgb24Buffer(const char* pFileName, uint8_t* pRgbBuffer, int nBufSize, int* pnWidth, int* pnHeight, int nDepth)
+{
+    int ret = -100;
+
+//fprintf(stderr,"ReadImageFile %s \n", pFileName);
+
+    ///ffmpeg相关变量 用于打开rtsp
+     AVFormatContext *pFormatCtx;
+     AVCodecContext *pCodecCtx;
+     AVCodec *pCodec;
+
+     int videoStream;
+
+     ///解码视频相关
+     AVFrame *pFrame, *pFrameRGB_Scaled;
+     uint8_t *out_buffer_rgb_scaled; //压缩的rgb数据
+     struct SwsContext *img_convert_ctx_RGBScaled;  //用于RGB数据的压缩
+     int numBytes_rgb_scaled; //压缩后的rgb数据
+
+     const char *file_path = pFileName;
+
+     ///ffmpeg相关变量 用于打开图片文件
+     pFormatCtx = nullptr;
+     pCodecCtx  = nullptr;
+     pCodec     = nullptr;
+
+     ///解码视频相关
+     pFrame    = nullptr;
+     pFrameRGB_Scaled = nullptr;
+     out_buffer_rgb_scaled = nullptr; //压缩的rgb数据
+     img_convert_ctx_RGBScaled = nullptr;  //用于RGB数据的压缩
+
+     AVPacket packet;
+
+     videoStream = -1;
+
+     //Allocate an AVFormatContext.
+     pFormatCtx = avformat_alloc_context();
+
+     AVDictionary* opts = nullptr;
+     av_dict_set(&opts, "rtsp_transport", "tcp", 0); //设置tcp or udp,默认一般优先tcp再尝试udp
+     av_dict_set(&opts, "stimeout", "3000000", 0);//设置超时3秒
+
+     AVStream *video_st = nullptr;
+
+     ///输出的rgb数据 分辨率
+     int OutPutWidth = 0;
+     int OutPutHeight= 0;
+
+     if (pFormatCtx == nullptr)
+     {
+         fprintf(stderr, "apFormatCtx == nullptr \n");
+         goto end;
+     }
+
+     ///打开rtsp流
+     if ((ret = avformat_open_input(&pFormatCtx, file_path, nullptr, &opts)) && (ret != 0))
+     {
+         char buff[128]={0};
+         av_strerror(ret, buff, 128);
+         fprintf(stderr, "avformat_open_input erro %s\n", buff);
+         goto end;
+     }
+
+     ///读取出视频流
+     if ((ret = avformat_find_stream_info(pFormatCtx, nullptr)) && (ret < 0))
+     {
+         char buff[128]={0};
+         av_strerror(ret, buff, 128);
+         fprintf(stderr, "avformat_find_stream_info erro %s\n", buff);
+
+         goto end;
+     }
+     else
+     {
+         //find videoindex
+         videoStream = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
+         if (videoStream < 0)
+         {///如果videoStream为-1 说明没有找到视频流
+             fprintf(stderr, "av_find_best_stream erro videoStream=%d\n", videoStream);
+             ret = -1;
+             goto end;
+         }
+     }
+
+     ///打开视频解码器
+     pCodecCtx = avcodec_alloc_context3(nullptr);
+     if (pCodecCtx == nullptr)
+     {
+         fprintf(stderr,"could not allocate AVCodecContext.\n");
+         ret = -2;
+         goto end;
+     }
+
+     avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);
+
+     pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
+
+     if (pCodec == nullptr)
+     {
+         fprintf(stderr,"Codec not found.\n");
+         ret = -3;
+         goto end;
+     }
+     else
+     {
+//         pCodecCtx->thread_count = 8;
+
+         ///打开解码器
+         if ((ret = avcodec_open2(pCodecCtx, pCodec, NULL)) && (ret < 0))
+         {
+             char buff[128]={0};
+             av_strerror(ret, buff, 128);
+             fprintf(stderr, "avcodec_open2 erro %s\n", buff);
+             avcodec_close(pCodecCtx);
+             goto end;
+         }
+     }
+
+     if (pCodecCtx->pix_fmt == AV_PIX_FMT_NONE)
+     {
+         fprintf(stderr,"open file failed! pCodecCtx->pix_fmt == AV_PIX_FMT_NONE \n");
+         ret = -4;
+         goto end;
+     }
+
+     video_st = pFormatCtx->streams[videoStream];
+
+     ///输出的rgb数据 分辨率
+     OutPutWidth  = pCodecCtx->width;
+     OutPutHeight = pCodecCtx->height;
+
+//     if (OutPutWidth > 1920)
+//     {
+//         int newWidth = 1920;
+//         int newHeight;
+
+//         newHeight = OutPutHeight * newWidth / OutPutWidth;
+
+//         fprintf(stderr, "%s PixWidth is Scaled w=%d h=%d newW=%d newH=%d \n", __FUNCTION__, OutPutWidth, OutPutHeight, newWidth, newHeight);
+
+//         OutPutWidth  = newWidth;
+//         OutPutHeight = newHeight;
+
+//     }
+
+//     if (OutPutHeight > 1080)
+//     {
+//         int newWidth;
+//         int newHeight = 1080;
+
+//         newWidth = OutPutWidth * newHeight / OutPutHeight;
+
+//         fprintf(stderr, "%s PixHeight is Scaled w=%d h=%d newW=%d newH=%d \n", __FUNCTION__, OutPutWidth, OutPutHeight, newWidth, newHeight);
+
+//         OutPutWidth  = newWidth;
+//         OutPutHeight = newHeight;
+//     }
+
+     if (OutPutWidth % 2 != 0) OutPutWidth++;
+     if (OutPutHeight % 2 != 0) OutPutHeight++;
+
+     ///当传入的指针为空的时候也需要返回图像宽高
+     *pnWidth  = OutPutWidth;
+     *pnHeight = OutPutHeight;
+
+     if (pRgbBuffer == nullptr)
+     {
+         ret = 0;
+         goto end;
+     }
+
+     ///分配解码后的帧的存储空间
+     {
+         AVPixelFormat tartFormat = AV_PIX_FMT_RGB24;
+
+         pFrame = av_frame_alloc();  //用于保存解码后的帧
+         pFrameRGB_Scaled = av_frame_alloc(); //用于保存转换后的rgb24数据
+
+         ///将解码后的YUV数据转换成RGB24 并压缩rgb
+         img_convert_ctx_RGBScaled = sws_getContext(pCodecCtx->width, pCodecCtx->height,
+                 pCodecCtx->pix_fmt, OutPutWidth, OutPutHeight,
+                 tartFormat, SWS_BICUBIC, nullptr, nullptr, nullptr);
+         numBytes_rgb_scaled = avpicture_get_size(tartFormat, OutPutWidth, OutPutHeight);
+         out_buffer_rgb_scaled = (uint8_t *) av_malloc(numBytes_rgb_scaled * sizeof(uint8_t));
+         avpicture_fill((AVPicture *) pFrameRGB_Scaled, out_buffer_rgb_scaled, tartFormat,
+                 OutPutWidth, OutPutHeight);
+
+     }
+
+//     ///输出视频信息
+//     av_dump_format(pFormatCtx, 0, file_path, 0);
+
+     fprintf(stderr,"open file succeed!\n");
+
+     ret = -200;
+
+     do{
+         if (av_read_frame(pFormatCtx, &packet) < 0)
+         {
+             fprintf(stderr,"read image stream failed!\n");
+             break; //这里认为视频读取完了
+         }
+
+         if (packet.stream_index == videoStream)
+         {
+             if (avcodec_send_packet(pCodecCtx, &packet) != 0)
+             {
+                av_packet_unref(&packet);
+                continue;
+             }
+
+             while (0 == avcodec_receive_frame(pCodecCtx, pFrame))
+             {
+                 ///判断解码完毕的帧是否是关键帧
+                 bool isKeyFrame = false;
+
+                 if(pFrame->key_frame)
+                 {
+                     isKeyFrame = true;
+                 }
+
+ //                ///反转图像 ,否则生成的rgb24图像是上下颠倒的
+ //                pFrame->data[0] += pFrame->linesize[0] * (pCodecCtx->height - 1);
+ //                pFrame->linesize[0] *= -1;
+ //                pFrame->data[1] += pFrame->linesize[1] * (pCodecCtx->height / 2 - 1);
+ //                pFrame->linesize[1] *= -1;
+ //                pFrame->data[2] += pFrame->linesize[2] * (pCodecCtx->height / 2 - 1);
+ //                pFrame->linesize[2] *= -1;
+
+                 ///将解码后的图像转成rgb24
+                 sws_scale(img_convert_ctx_RGBScaled,
+                         (uint8_t const * const *) pFrame->data,
+                         pFrame->linesize, 0, pCodecCtx->height, pFrameRGB_Scaled->data,
+                         pFrameRGB_Scaled->linesize);
+
+                 ///处理rgb数据
+
+                 memcpy(pRgbBuffer, out_buffer_rgb_scaled, numBytes_rgb_scaled);
+
+                 *pnWidth  = OutPutWidth;
+                 *pnHeight = OutPutHeight;
+
+                 ret = 0;
+
+//                 FILE * fp = fopen("out.rgb24", "wb");
+//                 fwrite(out_buffer_rgb_scaled, 1, numBytes_rgb_scaled, fp);
+//                 fclose(fp);
+
+//                 fprintf(stderr,"open file succeed! %d\n", numBytes_rgb_scaled);
+
+             }
+         }
+
+         av_packet_unref(&packet);
+
+     }while(0);
+
+//     avcodec_close(pCodecCtx);
+//     avformat_close_input(&pFormatCtx);
+
+ end:
+
+     {
+         if (pFrame != nullptr)
+             av_frame_free(&pFrame);
+
+         if (pFrameRGB_Scaled != nullptr)
+             av_frame_free(&pFrameRGB_Scaled);
+
+         if (img_convert_ctx_RGBScaled != nullptr)
+             sws_freeContext(img_convert_ctx_RGBScaled);
+
+         if (out_buffer_rgb_scaled != nullptr)
+             av_free(out_buffer_rgb_scaled);
+
+         pFrame    = nullptr;
+         pFrameRGB_Scaled = nullptr;
+         out_buffer_rgb_scaled = nullptr; //压缩的rgb数据
+         img_convert_ctx_RGBScaled = nullptr;  //用于RGB数据的压缩
+
+     }
+
+        if (pCodecCtx != nullptr)
+        {
+         avcodec_close(pCodecCtx);
+         av_free(pCodecCtx);
+        }
+
+        if (pFormatCtx != nullptr)
+        {
+          avformat_close_input(&pFormatCtx);
+          avformat_free_context(pFormatCtx);
+        }
+
+        if (opts != nullptr)
+        {
+          av_dict_free(&opts);
+        }
+
+//fprintf(stderr,"ReadImageFile %s   111 ret = %d \n", pFileName, ret);
+
+     return ret;
+}
+
+int ImageReader::ReadYuv420pBuffer(const char* pFileName, uint8_t* pYuv420pBuffer, int nBufSize, int* pnWidth, int* pnHeight)
+{
+    int ret = -100;
+
+//fprintf(stderr,"ReadImageFile %s \n", pFileName);
+
+    ///ffmpeg相关变量 用于打开rtsp
+     AVFormatContext *pFormatCtx;
+     AVCodecContext *pCodecCtx;
+     AVCodec *pCodec;
+
+     int videoStream;
+
+     ///解码视频相关
+     AVFrame *pFrame, *pFrameRGB_Scaled;
+     uint8_t *out_buffer_rgb_scaled; //压缩的rgb数据
+     struct SwsContext *img_convert_ctx_RGBScaled;  //用于RGB数据的压缩
+     int numBytes_rgb_scaled; //压缩后的rgb数据
+
+     const char *file_path = pFileName;
+
+     ///ffmpeg相关变量 用于打开图片文件
+     pFormatCtx = nullptr;
+     pCodecCtx  = nullptr;
+     pCodec     = nullptr;
+
+     ///解码视频相关
+     pFrame    = nullptr;
+     pFrameRGB_Scaled = nullptr;
+     out_buffer_rgb_scaled = nullptr; //压缩的rgb数据
+     img_convert_ctx_RGBScaled = nullptr;  //用于RGB数据的压缩
+
+     AVPacket packet;
+
+     videoStream = -1;
+
+     //Allocate an AVFormatContext.
+     pFormatCtx = avformat_alloc_context();
+
+     AVDictionary* opts = nullptr;
+     av_dict_set(&opts, "rtsp_transport", "tcp", 0); //设置tcp or udp,默认一般优先tcp再尝试udp
+     av_dict_set(&opts, "stimeout", "3000000", 0);//设置超时3秒
+
+     AVStream *video_st = nullptr;
+
+     ///输出的rgb数据 分辨率
+     int OutPutWidth = 0;
+     int OutPutHeight= 0;
+
+     if (pFormatCtx == nullptr)
+     {
+         fprintf(stderr, "apFormatCtx == nullptr \n");
+         goto end;
+     }
+
+     ///打开rtsp流
+     if ((ret = avformat_open_input(&pFormatCtx, file_path, nullptr, &opts)) && (ret != 0))
+     {
+         char buff[128]={0};
+         av_strerror(ret, buff, 128);
+         fprintf(stderr, "avformat_open_input erro %s\n", buff);
+         goto end;
+     }
+
+     ///读取出视频流
+     if ((ret = avformat_find_stream_info(pFormatCtx, nullptr)) && (ret < 0))
+     {
+         char buff[128]={0};
+         av_strerror(ret, buff, 128);
+         fprintf(stderr, "avformat_find_stream_info erro %s\n", buff);
+
+         goto end;
+     }
+     else
+     {
+         //find videoindex
+         videoStream = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
+         if (videoStream < 0)
+         {///如果videoStream为-1 说明没有找到视频流
+             fprintf(stderr, "av_find_best_stream erro videoStream=%d\n", videoStream);
+             ret = -1;
+             goto end;
+         }
+     }
+
+     ///打开视频解码器
+     pCodecCtx = avcodec_alloc_context3(nullptr);
+     if (pCodecCtx == nullptr)
+     {
+         fprintf(stderr,"could not allocate AVCodecContext.\n");
+         ret = -2;
+         goto end;
+     }
+
+     avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);
+
+     pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
+
+     if (pCodec == nullptr)
+     {
+         fprintf(stderr,"Codec not found.\n");
+         ret = -3;
+         goto end;
+     }
+     else
+     {
+//         pCodecCtx->thread_count = 8;
+
+         ///打开解码器
+         if ((ret = avcodec_open2(pCodecCtx, pCodec, NULL)) && (ret < 0))
+         {
+             char buff[128]={0};
+             av_strerror(ret, buff, 128);
+             fprintf(stderr, "avcodec_open2 erro %s\n", buff);
+             avcodec_close(pCodecCtx);
+             goto end;
+         }
+     }
+
+     if (pCodecCtx->pix_fmt == AV_PIX_FMT_NONE)
+     {
+         fprintf(stderr,"open file failed! pCodecCtx->pix_fmt == AV_PIX_FMT_NONE \n");
+         ret = -4;
+         goto end;
+     }
+
+     video_st = pFormatCtx->streams[videoStream];
+
+     ///输出的rgb数据 分辨率
+     OutPutWidth  = pCodecCtx->width;
+     OutPutHeight = pCodecCtx->height;
+
+//     if (OutPutWidth > 1920)
+//     {
+//         int newWidth = 1920;
+//         int newHeight;
+
+//         newHeight = OutPutHeight * newWidth / OutPutWidth;
+
+//         fprintf(stderr, "%s PixWidth is Scaled w=%d h=%d newW=%d newH=%d \n", __FUNCTION__, OutPutWidth, OutPutHeight, newWidth, newHeight);
+
+//         OutPutWidth  = newWidth;
+//         OutPutHeight = newHeight;
+
+//     }
+
+//     if (OutPutHeight > 1080)
+//     {
+//         int newWidth;
+//         int newHeight = 1080;
+
+//         newWidth = OutPutWidth * newHeight / OutPutHeight;
+
+//         fprintf(stderr, "%s PixHeight is Scaled w=%d h=%d newW=%d newH=%d \n", __FUNCTION__, OutPutWidth, OutPutHeight, newWidth, newHeight);
+
+//         OutPutWidth  = newWidth;
+//         OutPutHeight = newHeight;
+//     }
+
+     if (OutPutWidth % 2 != 0) OutPutWidth++;
+     if (OutPutHeight % 2 != 0) OutPutHeight++;
+
+     ///当传入的指针为空的时候也需要返回图像宽高
+     *pnWidth  = OutPutWidth;
+     *pnHeight = OutPutHeight;
+
+     if (pYuv420pBuffer == nullptr)
+     {
+         ret = 0;
+         goto end;
+     }
+
+     ///分配解码后的帧的存储空间
+     {
+         AVPixelFormat tartFormat = AV_PIX_FMT_YUV420P;
+
+         pFrame = av_frame_alloc();  //用于保存解码后的帧
+         pFrameRGB_Scaled = av_frame_alloc(); //用于保存转换后的rgb24数据
+
+         ///将解码后的YUV数据转换成RGB24 并压缩rgb
+         img_convert_ctx_RGBScaled = sws_getContext(pCodecCtx->width, pCodecCtx->height,
+                 pCodecCtx->pix_fmt, OutPutWidth, OutPutHeight,
+                 tartFormat, SWS_BICUBIC, nullptr, nullptr, nullptr);
+
+         numBytes_rgb_scaled = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, OutPutWidth, OutPutHeight, 1);  //按1字节进行内存对齐,得到的内存大小最接近实际大小
+		 out_buffer_rgb_scaled = static_cast<uint8_t *>(av_malloc(numBytes_rgb_scaled * sizeof(uint8_t)));
+         av_image_fill_arrays(pFrameRGB_Scaled->data, pFrameRGB_Scaled->linesize, out_buffer_rgb_scaled, AV_PIX_FMT_YUV420P, OutPutWidth, OutPutHeight, 1);
+
+		 //numBytes_rgb_scaled = avpicture_get_size(tartFormat, OutPutWidth, OutPutHeight);
+		 //out_buffer_rgb_scaled = (uint8_t *)av_malloc(numBytes_rgb_scaled * sizeof(uint8_t));
+		 //avpicture_fill((AVPicture *)pFrameRGB_Scaled, out_buffer_rgb_scaled, tartFormat,
+			// OutPutWidth, OutPutHeight);
+
+     }
+
+//     ///输出视频信息
+     av_dump_format(pFormatCtx, 0, file_path, 0);
+
+     fprintf(stderr,"open file succeed!\n");
+
+     ret = -200;
+
+     do{
+         if (av_read_frame(pFormatCtx, &packet) < 0)
+         {
+             fprintf(stderr,"read image stream failed!\n");
+             break; //这里认为视频读取完了
+         }
+
+         if (packet.stream_index == videoStream)
+         {
+             if (avcodec_send_packet(pCodecCtx, &packet) != 0)
+             {
+                av_packet_unref(&packet);
+                continue;
+             }
+
+             while (0 == avcodec_receive_frame(pCodecCtx, pFrame))
+             {
+                 ///判断解码完毕的帧是否是关键帧
+                 bool isKeyFrame = false;
+
+                 if(pFrame->key_frame)
+                 {
+                     isKeyFrame = true;
+                 }
+
+ //                ///反转图像 ,否则生成的rgb24图像是上下颠倒的
+ //                pFrame->data[0] += pFrame->linesize[0] * (pCodecCtx->height - 1);
+ //                pFrame->linesize[0] *= -1;
+ //                pFrame->data[1] += pFrame->linesize[1] * (pCodecCtx->height / 2 - 1);
+ //                pFrame->linesize[1] *= -1;
+ //                pFrame->data[2] += pFrame->linesize[2] * (pCodecCtx->height / 2 - 1);
+ //                pFrame->linesize[2] *= -1;
+
+                 ///将解码后的图像转成rgb24
+                 sws_scale(img_convert_ctx_RGBScaled,
+                         (uint8_t const * const *) pFrame->data,
+                         pFrame->linesize, 0, pCodecCtx->height, pFrameRGB_Scaled->data,
+                         pFrameRGB_Scaled->linesize);
+
+                 ///处理rgb数据
+
+                 memcpy(pYuv420pBuffer, out_buffer_rgb_scaled, numBytes_rgb_scaled);
+
+                 *pnWidth  = OutPutWidth;
+                 *pnHeight = OutPutHeight;
+
+                 ret = 0;
+
+//                 FILE * fp = fopen("out.rgb24", "wb");
+//                 fwrite(out_buffer_rgb_scaled, 1, numBytes_rgb_scaled, fp);
+//                 fclose(fp);
+
+//                 fprintf(stderr,"open file succeed! %d\n", numBytes_rgb_scaled);
+
+             }
+         }
+
+         av_packet_unref(&packet);
+
+     }while(0);
+
+//     avcodec_close(pCodecCtx);
+//     avformat_close_input(&pFormatCtx);
+
+ end:
+
+     {
+         if (pFrame != nullptr)
+             av_frame_free(&pFrame);
+
+         if (pFrameRGB_Scaled != nullptr)
+             av_frame_free(&pFrameRGB_Scaled);
+
+         if (img_convert_ctx_RGBScaled != nullptr)
+             sws_freeContext(img_convert_ctx_RGBScaled);
+
+         if (out_buffer_rgb_scaled != nullptr)
+             av_free(out_buffer_rgb_scaled);
+
+         pFrame    = nullptr;
+         pFrameRGB_Scaled = nullptr;
+         out_buffer_rgb_scaled = nullptr; //压缩的rgb数据
+         img_convert_ctx_RGBScaled = nullptr;  //用于RGB数据的压缩
+
+     }
+
+        if (pCodecCtx != nullptr)
+        {
+         avcodec_close(pCodecCtx);
+         av_free(pCodecCtx);
+        }
+
+        if (pFormatCtx != nullptr)
+        {
+          avformat_close_input(&pFormatCtx);
+          avformat_free_context(pFormatCtx);
+        }
+
+        if (opts != nullptr)
+        {
+          av_dict_free(&opts);
+        }
+
+//fprintf(stderr,"ReadImageFile %s   111 ret = %d \n", pFileName, ret);
+
+     return ret;
+}

+ 22 - 0
module/ScreenRecorder/src/Media/Image/ImageReader.h

@@ -0,0 +1,22 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+#ifndef IMAGEREADER_H
+#define IMAGEREADER_H
+
+#include <stdint.h>
+
+class ImageReader
+{
+public:
+    ImageReader();
+
+    static int ReadRgb24Buffer(const char* pFileName, uint8_t* pRgbBuffer, int nBufSize, int* pnWidth, int* pnHeight, int nDepth);
+    static int ReadYuv420pBuffer(const char* pFileName, uint8_t* pYuv420pBuffer, int nBufSize, int* pnWidth, int* pnHeight);
+
+
+};
+
+#endif // IMAGEREADER_H

+ 265 - 0
module/ScreenRecorder/src/Media/Image/yuv420p.cpp

@@ -0,0 +1,265 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "yuv420p.h"
+
+#include <string.h>
+
+void cod_video_blend_u8 (unsigned char *  d1, int d1_stride,
+	const unsigned char *  s1, int s1_stride, int a, int w, int h)
+{
+	int i;
+	int j;
+	cod_int8 * ptr0;
+	const cod_int8 * ptr4;
+	cod_int8 var34;
+	cod_int8 var35;
+	cod_union16 var36;
+	cod_int8 var37;
+	cod_union16 var38;
+	cod_union16 var39;
+	cod_union16 var40;
+	cod_union16 var41;
+	cod_union16 var42;
+	cod_union16 var43;
+	cod_union16 var44;
+
+	for (j = 0; j < h; j++)
+	{
+		ptr0 = (cod_int8 *)COD_PTR_OFFSET (d1, d1_stride * j);
+		ptr4 = (cod_int8 *)COD_PTR_OFFSET (s1, s1_stride * j);
+
+		/* 5: loadpw */
+		var36.i = a;
+
+		for (i = 0; i < w; i++)
+		{
+			/* 0: loadb */
+			var34 = ptr0[i];
+			/* 1: convubw */
+			var38.i = (cod_uint8) var34;
+			/* 2: loadb */
+			var35 = ptr4[i];
+			/* 3: convubw */
+			var39.i = (cod_uint8) var35;
+			/* 4: subw */
+			var40.i = var39.i - var38.i;
+			/* 6: mullw */
+			var41.i = (var40.i * var36.i) & 0xffff;
+			/* 7: shlw */
+			var42.i = ((cod_uint16) var38.i) << 8;
+			/* 8: addw */
+			var43.i = var42.i + var41.i;
+			/* 9: shruw */
+			var44.i = ((cod_uint16) var43.i) >> 8;
+			/* 10: convsuswb */
+			var37 = COD_CLAMP (var44.i, 0, 255);
+			/* 11: storeb */
+			ptr0[i] = var37;
+		}
+	}
+
+}
+
+
+/*************************************************************************************/
+/* Y444, Y42B, I420, YV12, Y41B */
+inline void _blend_420p_planar (const unsigned char * src, unsigned char * dest,
+	int src_stride, int dest_stride, int src_width, int src_height, double src_alpha)
+{
+	/* If it's completely transparent... we just return */
+	if (src_alpha == 0.0)
+	{
+		return;
+	}
+
+	/* If it's completely opaque, we do a fast copy */
+	if (src_alpha == 1.0)
+	{
+		for (int i = 0; i < src_height; i++)
+		{
+			memcpy (dest, src, src_width);
+			src += src_stride;
+			dest += dest_stride;
+		}
+		return;
+	}
+	// do alpha blend
+	int b_alpha = COD_CLAMP ((int) (src_alpha * 256), 0, 256);
+	cod_video_blend_u8(dest, dest_stride, src, src_stride, b_alpha, src_width, src_height);
+}
+
+void blend_420p_planar (codImageFrame * srcframe, int xpos, int ypos, double src_alpha, codImageFrame * destframe)
+{
+	const unsigned char *b_src;
+	unsigned char *b_dest;
+	int b_src_width;
+	int b_src_height;
+	int xoffset = 0;
+	int yoffset = 0;
+	int src_comp_rowstride, dest_comp_rowstride;
+	int src_comp_height;
+	int src_comp_width;
+	int comp_ypos, comp_xpos;
+	int comp_yoffset, comp_xoffset;
+
+	int src_width = srcframe->width;
+	int src_height = srcframe->height;
+
+	int dest_width = destframe->width;
+	int dest_height = destframe->height;
+
+	switch(srcframe->pixfmt)
+	{
+	case cod_fmt_yv12:
+	case cod_fmt_i420:
+		xpos = COD_ROUND_UP_2 (xpos);
+		ypos = COD_ROUND_UP_2 (ypos);
+		break;
+
+	case cod_fmt_y444:
+		xpos = COD_ROUND_UP_1 (xpos);
+		ypos = COD_ROUND_UP_1 (ypos);
+		break;
+
+	case cod_fmt_y42b:
+		xpos = COD_ROUND_UP_2 (xpos);
+		ypos = COD_ROUND_UP_1 (ypos);
+		break;
+
+	case cod_fmt_y41b:
+		xpos = COD_ROUND_UP_4 (xpos);
+		ypos = COD_ROUND_UP_2 (ypos);
+		break;
+
+	default:
+		xpos = COD_ROUND_UP_2 (xpos);
+		ypos = COD_ROUND_UP_2 (ypos);
+		break;
+	}
+
+
+	b_src_width = src_width;
+	b_src_height = src_height;
+
+	/* adjust src pointers for negative sizes */
+	if (xpos < 0)
+	{
+		xoffset = -xpos;
+		b_src_width -= -xpos;
+		xpos = 0;
+	}
+	if (ypos < 0)
+	{
+		yoffset += -ypos;
+		b_src_height -= -ypos;
+		ypos = 0;
+	}
+	/* If x or y offset are larger then the source it's outside of the picture */
+	if (xoffset > src_width || yoffset > src_height)
+	{
+		return;
+	}
+
+	/* adjust width/height if the src is bigger than dest */
+	if (xpos + src_width > dest_width)
+	{
+		b_src_width = dest_width - xpos;
+	}
+	if (ypos + src_height > dest_height)
+	{
+		b_src_height = dest_height - ypos;
+	}
+	if (b_src_width < 0 || b_src_height < 0)
+	{
+		return;
+	}
+
+	/* First mix Y, then U, then V */
+	b_src = srcframe->data;
+	b_dest = destframe->data;
+	src_comp_rowstride = src_width;
+	dest_comp_rowstride = dest_width;
+	src_comp_width = b_src_width;
+	src_comp_height = b_src_height;
+	comp_xpos = xpos;
+	comp_ypos = ypos;
+	comp_xoffset = xoffset;
+	comp_yoffset = yoffset;
+	_blend_420p_planar (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride,
+		b_dest + comp_xpos + comp_ypos * dest_comp_rowstride,
+		src_comp_rowstride,
+		dest_comp_rowstride, src_comp_width, src_comp_height,
+		src_alpha);
+
+
+	// Blend V
+	b_src = (unsigned char *)COD_PTR_OFFSET (srcframe->data, src_width * src_height);
+	b_dest = (unsigned char *)COD_PTR_OFFSET (destframe->data, dest_width * dest_height);
+	src_comp_rowstride = src_width >> 1;
+	dest_comp_rowstride = dest_width >> 1;
+	src_comp_width = b_src_width >> 1;
+	src_comp_height = b_src_height >> 1;
+	comp_xpos = xpos >> 1;
+	comp_ypos = ypos >> 1;
+	comp_xoffset = xoffset >> 1;
+	comp_yoffset = yoffset >> 1;
+	_blend_420p_planar (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride,
+		b_dest + comp_xpos + comp_ypos * dest_comp_rowstride,
+		src_comp_rowstride,
+		dest_comp_rowstride, src_comp_width, src_comp_height,
+		src_alpha);
+
+	// Blend U, other information same to V
+	b_src = (unsigned char *)COD_PTR_OFFSET (srcframe->data, src_width * (src_height + (src_height >> 2)));
+	b_dest = (unsigned char *)COD_PTR_OFFSET (destframe->data, dest_width * (dest_height + (dest_height >> 2)));
+	_blend_420p_planar (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride,
+		b_dest + comp_xpos + comp_ypos * dest_comp_rowstride,
+		src_comp_rowstride,
+		dest_comp_rowstride, src_comp_width, src_comp_height,
+		src_alpha);
+}
+
+void Yuv420Cut(int x,int y,int desW,int desH,int srcW,int srcH,uint8_t *srcBuffer,uint8_t *desBuffer)
+{
+    int tmpRange;
+    int bufferIndex;
+
+    int yIndex = 0;
+    bufferIndex = 0 + x + y*srcW;
+    tmpRange = srcW * desH;
+    for (int i=0;i<tmpRange;) //逐行拷贝Y分量数据
+    {
+        memcpy(desBuffer+yIndex,srcBuffer+bufferIndex+i,desW);
+        i += srcW;
+        yIndex += desW;
+    }
+
+    int uIndex = desW * desH;
+    int uIndexStep = srcW/2;
+    int uWidthCopy = desW/2;
+    bufferIndex = srcW * srcH+x/2 + y /2 *srcW / 2;
+    tmpRange = srcW * desH / 4;
+    for (int i=0;i<tmpRange;) //逐行拷贝U分量数据
+    {
+        memcpy(desBuffer+uIndex,srcBuffer+bufferIndex+i,uWidthCopy);
+        i += uIndexStep;
+        uIndex += uWidthCopy;
+    }
+
+
+    int vIndex = desW * desH +  desW * desH /4;
+    int vIndexStep = srcW/2;
+    int vWidthCopy = desW/2;
+    bufferIndex = srcW*srcH + srcW*srcH/4 + x/2 + y /2 *srcW / 2;
+    tmpRange = srcW * desH / 4;
+    for (int i=0;i<tmpRange;) //逐行拷贝V分量数据
+    {
+        memcpy(desBuffer+vIndex,srcBuffer+bufferIndex+i,vWidthCopy);
+        i += vIndexStep;
+        vIndex += vWidthCopy;
+    }
+}

+ 102 - 0
module/ScreenRecorder/src/Media/Image/yuv420p.h

@@ -0,0 +1,102 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+#include <stdint.h>
+
+/* force a int [b,c] */
+#define COD_CLAMP(a,b,c) \
+	(a) < (b) ? (b) : ((a) > (c) ? (c) : (a))
+
+#define COD_ROUND_UP_1(x) (x)
+#define COD_ROUND_UP_2(x) ((x) & ~1)
+#define COD_ROUND_UP_4(x) ((x) & ~3)
+
+#define COD_PTR_OFFSET(ptr,offset) ((void *)(((unsigned char *)(ptr)) + (offset)))
+typedef char cod_int8;
+typedef unsigned char cod_uint8;
+typedef short cod_int16;
+typedef unsigned short cod_uint16;
+typedef int cod_int32;
+typedef unsigned int cod_uint32;
+typedef long long cod_int64;
+typedef unsigned long long cod_uint64;
+
+
+typedef union 
+{
+	cod_int16	i;
+	cod_int8	x2[2];
+} cod_union16;
+
+typedef union
+{
+	cod_int32	i; 
+	float		f; 
+	cod_int16	x2[2]; 
+	cod_int8	x4[4];
+} cod_union32;
+
+typedef union
+{
+	cod_int64	i;
+	double		f;
+	cod_int32	x2[2];
+	float		x2f[2];
+	cod_int16	x4[4];
+} cod_union64;
+
+/* define supported pixel format */
+typedef enum
+{
+	cod_fmt_unknown,
+	cod_fmt_rgb,
+	cod_fmt_rgba,
+	cod_fmt_argb,
+	cod_fmt_rgbx,
+	cod_fmt_xrgb,
+
+	cod_fmt_bgr,
+	cod_fmt_bgra,
+	cod_fmt_abgr,
+	cod_fmt_bgrx,
+	cod_fmt_xbgr,
+
+	cod_fmt_yuy2,
+	cod_fmt_i420,
+	cod_fmt_yv12,
+	cod_fmt_ayuv,
+	cod_fmt_nv12,
+	cod_fmt_nv21,
+	cod_fmt_yuyv,
+	cod_fmt_yvyu,
+	cod_fmt_uyvy,
+	cod_fmt_y444,
+	cod_fmt_y42b,
+	cod_fmt_y41b
+}cod_fmt;
+
+
+/* define videoframe */
+typedef struct tag_codImageFrame
+{
+	unsigned char	*data;						// memory pointer
+	int				width;						// width of image
+	int				height;						// height of image
+	long			stride;						// stride of image
+	cod_fmt			pixfmt;						// pixel format of image
+
+	tag_codImageFrame()
+	{
+		data = 0;
+		width = height = stride = 0;
+
+		pixfmt = cod_fmt_unknown;
+	}
+}codImageFrame;
+
+void blend_420p_planar (codImageFrame * srcframe, int xpos, int ypos, double src_alpha, codImageFrame * destframe);
+
+///yuv420裁剪
+void Yuv420Cut(int x,int y,int desW,int desH,int srcW,int srcH,uint8_t *srcBuffer,uint8_t *desBuffer);

+ 1540 - 0
module/ScreenRecorder/src/Media/MediaManager.cpp

@@ -0,0 +1,1540 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "MediaManager.h"
+
+#if defined(WIN32)
+    #include<direct.h>
+#else
+    #include<unistd.h>
+#endif
+
+extern "C"
+{
+    #include "libyuv.h"
+};
+
+#include "Image/yuv420p.h"
+#include "Image/ImageReader.h"
+
+#include "MoudleConfig.h"
+#include "Mix/PcmMix.h"
+
+static bool gIsInited = false;
+
+void doInit()
+{
+    if (!gIsInited)
+    {
+    #ifdef WIN32
+        WSADATA dat;
+        WSAStartup(MAKEWORD(2, 2), &dat);
+    #endif // WIN32
+
+        av_register_all();
+        avformat_network_init();
+        avdevice_register_all();
+
+        gIsInited = true;
+    }
+
+}
+
+MediaManager::MediaManager()
+{
+    doInit();
+
+    mGetCameraVideoThread = nullptr;
+    mGetAudioThread = nullptr;
+    mGetAudioThread_MicroPhone = nullptr;
+
+    mGetCameraVideoThread = new GetVideoThread(); //获取摄像头画面的线程
+    mGetAudioThread = new GetAudioThread();
+    mGetAudioThread_MicroPhone = new GetAudioThread();
+
+//    mGetAudioThread->setIsNeedReOpenWhenReadFailed(false);
+//    mGetAudioThread_MicroPhone->setIsNeedReOpenWhenReadFailed(true);
+
+    mCond_Audio = new Cond();
+    mCond_Video = new Cond();
+
+//    mVideoEncoder     = new VideoEncoder();
+    mAudioEncoder     = new AudioEncoder();
+//    mVideoEncoder->openEncoder();
+    mAudioEncoder->openEncoder();
+
+    mIsAudioBufferManagerStop = true;
+    mIsAudioBufferManagerThreadRunning = false;
+
+    mIsVideoBufferManagerStop = true;
+    mIsVideoBufferManagerThreadRunning = false;
+
+    mIsCaptureNow = false;
+    mStartCaptureTime = 0;
+    mStartWriteVideoTime = 0;
+
+    mIsMicroPhoneMute   = false;
+    mIsVirtualAudioMute = false;
+
+    mVideoRecorderEventHandle = nullptr;
+
+    mCameraFrameCallBackFunc      = nullptr; //摄像头数据回调函数
+    mCameraFrameCallBackFuncParam = nullptr; //回调函数用户参数
+
+    mVideoFileWidth  = 0;
+    mVideoFileHeight = 0;
+    mVideoFileWriter = new VideoFileWriter();
+
+    mLastInputRtmpVideoTime = 0; //记录上一次往rtmp推入视频的时间
+    mRtmpFrameRate = 10;
+
+    ///记录获取音频的2个线程,用于混音
+    {
+        {
+            AudioManagerNode node;
+            node.thread = mGetAudioThread;
+            node.pcmFrameList.clear();
+            node.lastGetFrameTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+            mAudioManagerList.push_back(node);
+        }
+
+        {
+            AudioManagerNode node;
+            node.thread = mGetAudioThread_MicroPhone;
+            node.pcmFrameList.clear();
+            node.lastGetFrameTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+            mAudioManagerList.push_back(node);
+        }
+    }
+
+    setFrameRate(15);
+
+    startCaptureCamera();
+    startAudioBufferMangerThread();
+
+//    {
+//        addCaptureWindowTask(1, NULL, RECT{0, 0, 1920, 1080},   RECT{0, 0, 1920, 1080},    1.0);
+//        addCaptureWindowTask(2, NULL, RECT{300, 300, 940, 980}, RECT{500, 500, 800, 900}, 0.5);
+
+//        addCaptureCameraTask(11, "USB2.0 PC CAMERA", RECT{1600, 0, 1920, 480}, 1.0);
+//        addCaptureCameraTask(12, "USB2.0 PC CAMERA", RECT{100, 0, 620, 480}, 1.0);
+
+//    }
+}
+
+MediaManager::~MediaManager()
+{
+qDebug()<<__FUNCTION__;
+    stopAll();
+qDebug()<<__FUNCTION__<<"finished!";
+}
+
+void MediaManager::stopAll()
+{
+    stopCapture();
+    stopCaptureCamera();
+    stopAudioBufferMangerThread();
+
+    clearTask();
+}
+
+void MediaManager::setCameraFrameCallBackFunc(std::function<void (VideoRawFramePtr yuvFrame, VideoRawFramePtr rgbFrame, void *param)> func, void *param)
+{
+    mCameraFrameCallBackFunc = func;
+    mCameraFrameCallBackFuncParam = param;
+}
+
+void MediaManager::setFinalVideoFrameCallBackFunc(std::function<void (VideoRawFramePtr yuvFrame, void *param)> func, void *param)
+{
+    mFinalVideoFrameCallBackFunc = func;
+    mFinalVideoFrameCallBackFuncParam = param;
+}
+
+void MediaManager::startAudioBufferMangerThread()
+{
+    if (!mIsAudioBufferManagerThreadRunning)
+    {
+        std::thread([=]
+        {
+            mIsAudioBufferManagerStop = false;
+
+            this->audioBufferManagerThreadFunc();
+
+        }).detach();
+    }
+}
+
+void MediaManager::stopAudioBufferMangerThread()
+{
+    mIsAudioBufferManagerStop = true;
+
+    while(mIsAudioBufferManagerThreadRunning)
+    {
+        MoudleConfig::mSleep(100);
+    }
+}
+
+bool MediaManager::startCapture(const bool &enableVirtualAudio)
+{
+    if (mIsCaptureNow)
+    {
+        return true;
+    }
+
+    auto inputPcmBufferFunc = [&](PCMFramePtr pcmFrame, void *param)
+    {
+        inputPcmFrame(pcmFrame, param);
+    };
+
+    bool isSucceed = false;
+
+    if (enableVirtualAudio)
+    {
+        if (mGetAudioThread->openDevice("virtual-audio-capturer"))
+        {
+            isSucceed = true;
+        }
+    }
+    else
+    {
+        isSucceed = true;
+    }
+
+    if (isSucceed)
+    {
+        mIsCaptureNow = true;
+        mStartCaptureTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+        int64_t currentTime = mVideoFileWriter->getVideoPts();
+        mStartWriteVideoTime = MoudleConfig::getTimeStamp_MilliSecond() - currentTime;
+        qDebug()<<__FUNCTION__<<"currentTime:"<<currentTime;
+
+//        std::thread([=]
+//        {
+//            mIsAudioBufferManagerStop = false;
+
+//            this->audioBufferManagerThreadFunc();
+
+//        }).detach();
+
+        std::thread([=]
+        {
+            mIsVideoBufferManagerStop = false;
+
+            this->videoBufferManagerThreadFunc();
+
+        }).detach();
+
+        if (enableVirtualAudio)
+        {
+            mGetAudioThread->startRecord(mAudioEncoder->getONEFrameSize(), inputPcmBufferFunc, mGetAudioThread);
+        }
+    }
+
+    return isSucceed;
+}
+
+bool MediaManager::stopCapture(const bool &isBlock)
+{
+    stopCaptureMic();
+
+    if (mGetAudioThread != nullptr)
+    {
+        mGetAudioThread->stopRecord(isBlock);
+    }
+
+    {
+//        mIsAudioBufferManagerStop = true;
+        mIsVideoBufferManagerStop = true;
+
+//        while(mIsAudioBufferManagerThreadRunning)
+//        {
+//            MoudleConfig::mSleep(100);
+//        }
+
+        while(mIsVideoBufferManagerThreadRunning)
+        {
+            MoudleConfig::mSleep(100);
+        }
+    }
+
+    mIsCaptureNow = false;
+
+    return true;
+}
+
+bool MediaManager::muteMicroPhone(bool isMute)
+{
+    mIsMicroPhoneMute = isMute;
+    return true;
+}
+
+bool MediaManager::muteVirtualAudio(bool isMute)
+{
+    mIsVirtualAudioMute = isMute;
+    return true;
+}
+
+bool MediaManager::setMicroPhone(const std::string &deviceName)
+{
+    bool ret = startCaptureMic(deviceName);
+    return ret;
+}
+
+bool MediaManager::startCaptureMic(const std::string &deviceName)
+{
+    auto inputPcmBufferFunc = [&](PCMFramePtr pcmFrame, void *param)
+    {
+        inputPcmFrame(pcmFrame, param);
+    };
+qDebug()<<__FUNCTION__<<"000";
+    stopCaptureMic();
+qDebug()<<__FUNCTION__<<"111";
+    bool isSucceed = false;
+
+    if (mGetAudioThread_MicroPhone->openDevice(deviceName))
+    {
+        qDebug()<<__FUNCTION__<<"222";
+        mGetAudioThread_MicroPhone->startRecord(mAudioEncoder->getONEFrameSize(), inputPcmBufferFunc, mGetAudioThread_MicroPhone);
+
+        isSucceed = true;
+    }
+qDebug()<<__FUNCTION__<<"333";
+    return isSucceed;
+}
+
+bool MediaManager::stopCaptureMic(const bool &isBlock)
+{
+    if (mGetAudioThread_MicroPhone != nullptr)
+    {
+        mGetAudioThread_MicroPhone->stopRecord(isBlock);
+    }
+
+    return true;
+}
+
+void MediaManager::startCaptureCamera()
+{
+    auto getCameraVideoFrameFunc = [=](VideoRawFramePtr yuvFramePtr, VideoRawFramePtr rgbFramePtr, void *param)
+    {
+        if (mCameraFrameCallBackFunc != nullptr)
+        {
+            mCameraFrameCallBackFunc(yuvFramePtr, rgbFramePtr, param);
+        }
+
+        inputVideoFrame(yuvFramePtr, param);
+    };
+
+    if (mGetCameraVideoThread != nullptr)
+    {
+        mGetCameraVideoThread->startRecord(getCameraVideoFrameFunc, mGetCameraVideoThread);
+    }
+}
+
+void MediaManager::stopCaptureCamera()
+{
+    if (mGetCameraVideoThread != nullptr)
+    {
+        mGetCameraVideoThread->stopRecord(true);
+    }
+}
+
+bool MediaManager::openCamera(const std::string &deviceName)
+{
+    bool isSucceed = false;
+
+    if (mGetCameraVideoThread->openCamera(deviceName) == GetVideoThread::SUCCEED)
+    {
+        isSucceed = true;
+    }
+
+    return isSucceed;
+}
+
+bool MediaManager::closeCamera()
+{
+    mGetCameraVideoThread->closeCamera();
+
+    return true;
+}
+
+bool MediaManager::openCameraCaptureMode(const std::string &deviceName)
+{
+    mIsCameraOpenCaptureMode = true;
+    bool isSucceed = openCamera(deviceName);
+    return isSucceed;
+}
+
+bool MediaManager::openCameraWindowMode(const std::string &deviceName)
+{
+    mIsCameraOpenWindowMode = true;
+    bool isSucceed = openCamera(deviceName);
+    return isSucceed;
+}
+
+bool MediaManager::closeCameraCaptureMode()
+{
+    mIsCameraOpenCaptureMode = false;
+
+    if (!mIsCameraOpenCaptureMode && !mIsCameraOpenWindowMode)
+    {
+        closeCamera();
+    }
+
+    return true;
+
+}
+
+bool MediaManager::closeCameraWindowMode()
+{
+    mIsCameraOpenWindowMode = false;
+
+    if (!mIsCameraOpenCaptureMode && !mIsCameraOpenWindowMode)
+    {
+        closeCamera();
+    }
+
+    return true;
+}
+
+void MediaManager::setQuality(const int &quality)
+{
+    mVideoFileWriter->setQuality(quality);
+}
+
+void MediaManager::setFrameRate(const int &frameRate)
+{
+    mRtmpFrameRate = frameRate + 5; //每秒钟传给写视频的线程多几针,因为写视频的线程里面还有自己的丢帧策略
+    mVideoFileWriter->setVideoFrameRate(frameRate);
+}
+
+bool MediaManager::isRecording()
+{
+    return mVideoFileWriter->isThreadRunning();
+}
+
+bool MediaManager::openFile(const std::string &filePath)
+{
+    mStartWriteVideoTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+    auto videoEncodedFunc = [=](VideoEncodedFramePtr videoFramePtr, void *param)
+    {
+//        qDebug()<<__FUNCTION__<<videoFramePtr->getNalu()->nalu.h264Nalu.len;
+//        if (param == mVideoEncoder)
+//        {
+//            mRtmpSender->inputVideoFrame(videoFramePtr);
+//        }
+    };
+qDebug()<<__FUNCTION__<<"111";
+    bool ret = true;
+
+    mVideoFileWriter->setFileName(filePath);
+qDebug()<<__FUNCTION__<<"222";
+    mVideoFileWriter->startEncode(videoEncodedFunc, nullptr);
+qDebug()<<__FUNCTION__<<"333";
+qDebug()<<__FUNCTION__<<filePath.c_str();
+    return ret;
+}
+
+bool MediaManager::closeFile()
+{
+    mVideoFileWriter->stopEncode();
+    return true;
+}
+
+std::list<VideoFileInfo> MediaManager::getVideoFileList()
+{
+    return mVideoFileWriter->getVideoFileList();
+}
+
+int64_t MediaManager::getVideoFileCurrentTime()
+{
+    int64_t time = mVideoFileWriter->getVideoPts();
+    return time;
+}
+
+void MediaManager::setVideoSize(const int &width, const int &height)
+{
+    int w = width;
+    int h = height;
+
+    if (w % 2 != 0)
+    {
+        w++;
+    }
+
+    if (h % 2 != 0)
+    {
+        h++;
+    }
+
+    mCond_Video->Lock();
+
+    mVideoFileWidth  = w;
+    mVideoFileHeight = h;
+
+    mVideoFileWriter->setWidth(mVideoFileWidth, mVideoFileHeight);
+
+    mVideoFrameBackGround = std::make_shared<VideoRawFrame>();
+    mVideoFrameBackGround->initBuffer(mVideoFileWidth, mVideoFileHeight, VideoRawFrame::FRAME_TYPE_YUV420P);
+
+    int YSize = mVideoFileWidth * mVideoFileHeight;
+    int USize = mVideoFileWidth * mVideoFileHeight / 4;
+    int VSize = USize;
+
+    ///黑色Yuv是(0,128,128);
+    memset(mVideoFrameBackGround->getBuffer() + YSize, 128, USize+VSize);
+
+    mCond_Video->Unlock();
+
+}
+
+//void MediaManager::setCaptureWindowRect(const int &x, const int &y, const int &width, const int &height)
+//{
+//    mGetScreenVideoThread->setRect(x, y, width, height);
+//}
+
+void MediaManager::setVideoRecorderEventHandle(VideoRecorderEventHandle *handle)
+{
+    mVideoRecorderEventHandle = handle;
+//    mVideoFileWriter->setEventHandle(handle);
+}
+
+void MediaManager::addCaptureWindowTask(const int &id, const HWND &hWnd, const RECT &srcRect, const RECT &destRect, const float &alpha)
+{
+qDebug()<<__FUNCTION__<<id<<hWnd<<srcRect.left<<srcRect.top<<srcRect.right<<srcRect.bottom<<destRect.left<<destRect.top<<destRect.right<<destRect.bottom<<alpha;
+
+    VideoManagerNode node;
+    node.id = id;
+//    node.thread = thread;
+    node.videoFrame = nullptr;
+    node.type = TaskType_Window;
+    node.rect.destRect = destRect;
+    node.alpha = alpha;
+    node.lastGetFrameTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+    CaptureWindowThread *thread = nullptr;
+qDebug()<<__FUNCTION__<<"000";
+    mCond_Video->Lock();
+qDebug()<<__FUNCTION__<<"111";
+    for (VideoManagerNode tmpNode : mVideoManagerList)
+    {
+        if (tmpNode.id == id)
+        {
+            thread = (CaptureWindowThread*)tmpNode.thread;
+            node.videoFrame = tmpNode.videoFrame;
+            break;
+        }
+    }
+
+    if (thread == nullptr)
+    {
+        thread = new CaptureWindowThread(); //捕获窗口图像的线程;
+    }
+
+    node.thread = thread;
+
+    mVideoManagerList.remove(node);
+    mVideoManagerList.push_back(node);
+
+    mCond_Video->Unlock();
+
+    auto getScreenVideoFrameFunc = [=](VideoRawFramePtr videoFramePtr, void *param)
+    {
+        inputVideoFrame(videoFramePtr, param);
+    };
+    int64_t currentTime = mVideoFileWriter->getVideoPts();
+
+    thread->stopRecord(true);
+
+    int x = srcRect.left;
+    int y = srcRect.top;
+    int w = srcRect.right - srcRect.left;
+    int h = srcRect.bottom - srcRect.top;
+qDebug()<<__FUNCTION__<<"222"<<x<<y<<w<<h;
+    thread->setHWND(hWnd);
+    thread->setRect(x, y, w, h);
+
+    thread->startRecord(getScreenVideoFrameFunc, thread, currentTime);
+
+    qDebug()<<__FUNCTION__<<"111"<<id<<hWnd<<srcRect.left<<srcRect.top<<srcRect.right<<srcRect.bottom<<destRect.left<<destRect.top<<destRect.right<<destRect.bottom<<alpha;
+
+}
+
+void MediaManager::addCaptureCameraTask(const int &id, const RECT &destRect, const float &alpha)
+{
+    VideoManagerNode node;
+    node.id = id;
+    node.thread = mGetCameraVideoThread;
+    node.videoFrame = nullptr;
+    node.type = TaskType_Camera;
+    node.rect.destRect = destRect;
+    node.alpha = alpha;
+    node.lastGetFrameTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+    mCond_Video->Lock();
+    mVideoManagerList.remove(node);
+    mVideoManagerList.push_back(node);
+    mCond_Video->Unlock();
+}
+
+void MediaManager::addCapturePictureTask(const int &id, const std::string &filePath, const RECT &destRect, const float &alpha)
+{
+    VideoRawFramePtr videoFrame = nullptr;
+
+    {
+        int width  = 0;
+        int height = 0;
+        int ret = ImageReader::ReadYuv420pBuffer(filePath.c_str(), NULL, 0, &width, &height);
+qDebug()<<__FUNCTION__<<filePath.c_str()<<ret<<width<<height<<destRect.left<<destRect.top<<destRect.right<<destRect.bottom;
+        if (ret == 0 && width > 0 && height > 0)
+        {
+            videoFrame = std::make_shared<VideoRawFrame>();
+            videoFrame->initBuffer(width, height, VideoRawFrame::FRAME_TYPE_YUV420P);
+
+            ImageReader::ReadYuv420pBuffer(filePath.c_str(), videoFrame->getBuffer(), videoFrame->getSize(), &width, &height);
+        }
+    }
+
+//    FILE *fp = fopen("out-pic.yuv", "wb");
+//    fwrite(videoFrame->getBuffer(), 1, videoFrame->getSize(), fp);
+//    fclose(fp);
+
+    VideoManagerNode node;
+    node.id = id;
+    node.thread = nullptr;
+    node.videoFrame = videoFrame;
+    node.type = TaskType_Picture;
+    node.rect.destRect = destRect;
+    node.alpha = alpha;
+    node.lastGetFrameTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+    mCond_Video->Lock();
+    mVideoManagerList.remove(node);
+    mVideoManagerList.push_back(node);
+    mCond_Video->Unlock();
+}
+
+bool MediaManager::removeTask(const int &id)
+{
+    bool isExist = false;
+
+    std::list<CaptureWindowThread *> needStopThreadList; //记录需要调用stopRecord(true) 的任务指针。
+
+    mCond_Video->Lock();
+
+    for (VideoManagerNode node : mVideoManagerList)
+    {
+        if (node.id == id)
+        {
+            isExist = true;
+
+            if (node.type == TaskType_Window)
+            {
+                CaptureWindowThread * thread = (CaptureWindowThread*)node.thread;
+                thread->stopRecord(false);
+                needStopThreadList.push_back(thread);
+                ///这里不能等待,因为线程里面传数据过来的时候,调用了inputVideoFrame,里面也调用了mCond_Video->Lock(),这样就会造成死锁。
+                /// 因此放到后面等锁释放后再来阻塞等待。
+//                thread->stopRecord(true);
+//                delete thread;
+            }
+            else if (node.type == TaskType_Camera)
+            {
+                GetVideoThread * thread = (GetVideoThread*)node.thread;
+    //            thread->startRecord(getCameraVideoFrameFunc, thread);
+            }
+            else if (node.type == TaskType_Picture)
+            {
+
+            }
+        }
+    }
+qDebug()<<__FUNCTION__<<id<<isExist;
+    if (isExist)
+    {
+        VideoManagerNode node;
+        node.id = id;
+
+        mVideoManagerList.remove(node);
+    }
+
+    mCond_Video->Unlock();
+
+    for (CaptureWindowThread * thread : needStopThreadList)
+    {
+        thread->stopRecord(true);
+        delete thread;
+    }
+
+    return isExist;
+}
+
+void MediaManager::clearTask()
+{
+    std::list<CaptureWindowThread *> needStopThreadList; //记录需要调用stopRecord(true) 的任务指针。
+
+    mCond_Video->Lock();
+
+    for (VideoManagerNode node : mVideoManagerList)
+    {
+        {
+            if (node.type == TaskType_Window)
+            {
+                CaptureWindowThread * thread = (CaptureWindowThread*)node.thread;
+                thread->stopRecord(false);
+                needStopThreadList.push_back(thread);
+                ///这里不能等待,因为线程里面传数据过来的时候,调用了inputVideoFrame,里面也调用了mCond_Video->Lock(),这样就会造成死锁。
+                /// 因此放到后面等锁释放后再来阻塞等待。
+//                thread->stopRecord(true);
+//                delete thread;
+            }
+            else if (node.type == TaskType_Camera)
+            {
+                GetVideoThread * thread = (GetVideoThread*)node.thread;
+//                thread->stopRecord(true);
+//                delete thread;
+            }
+            else if (node.type == TaskType_Picture)
+            {
+
+            }
+        }
+    }
+
+    mVideoManagerList.clear();
+
+    mCond_Video->Unlock();
+
+
+    for (CaptureWindowThread * thread : needStopThreadList)
+    {
+        thread->stopRecord(true);
+        delete thread;
+    }
+}
+
+void MediaManager::inputPcmFrame(PCMFramePtr pcmFrame, void *param)
+{
+    mCond_Audio->Lock();
+    ///将数据存入队列,然后在后面的线程中,取出数据,混音后再拿去编码。
+    std::list<AudioManagerNode>::iterator iter;
+    for (iter=mAudioManagerList.begin();iter!=mAudioManagerList.end();iter++)
+    {
+        if ((*iter).thread == param)
+        {
+            bool isNeedPush = false;
+            if ((*iter).thread == mGetAudioThread && !mIsVirtualAudioMute)
+            {
+//                fprintf(stderr, ">> %s\n", __FUNCTION__);
+                isNeedPush = true;
+            }
+
+            if ((*iter).thread == mGetAudioThread_MicroPhone && !mIsMicroPhoneMute)
+            {
+//                fprintf(stderr, ".. %s\n", __FUNCTION__);
+                isNeedPush = true;
+            }
+
+            if (isNeedPush)
+            {
+                (*iter).pcmFrameList.push_back(pcmFrame);
+                (*iter).lastGetFrameTime = MoudleConfig::getTimeStamp_MilliSecond();
+            }
+//            else
+//            {
+//                fprintf(stderr, "none... %s\n", __FUNCTION__);
+//            }
+        }
+    }
+
+    if (param == mGetAudioThread_MicroPhone)
+    {
+        if (!mIsMicroPhoneMute)
+        {
+            #define PCMTYPE short
+            #define MAXPCMVALUE 32767
+
+            PCMTYPE *buffer = (PCMTYPE *)pcmFrame->getBuffer();
+
+            int volumeL = abs(buffer[0] * 100.0 / MAXPCMVALUE);
+            int volumeR = abs(buffer[1] * 100.0 / MAXPCMVALUE);
+
+//            fprintf(stderr, "%s %d %d %f\n", __FUNCTION__, volumeL, volumeR, *((float*)buffer));
+
+            volumeL = abs(*((float*)buffer) * 100);
+            volumeR = abs(*((float*)buffer) * 100);
+
+            volumeL *= 3;
+
+//            if (mRtmpSenderEventHandle != nullptr)
+//            {
+//                mRtmpSenderEventHandle->OnAudioVolumeUpdated(volumeL, volumeR);
+//            }
+        }
+    }
+
+    mCond_Audio->Unlock();
+    mCond_Audio->Signal();
+}
+
+void MediaManager::inputVideoFrame(VideoRawFramePtr videoFrame, void *param)
+{
+    mCond_Video->Lock();
+    ///将数据存入队列,然后在后面的线程中,取出数据,混音后再拿去编码。
+    std::list<VideoManagerNode>::iterator iter;
+    for (iter=mVideoManagerList.begin();iter!=mVideoManagerList.end();iter++)
+    {
+        if ((*iter).thread == param)
+        {
+            (*iter).videoFrame = videoFrame;
+            (*iter).lastGetFrameTime = MoudleConfig::getTimeStamp_MilliSecond();
+        }
+    }
+
+    mCond_Video->Unlock();
+    mCond_Video->Signal();
+}
+
+void MediaManager::inputExternalYuvFrame(VideoRawFramePtr videoFrame)
+{
+    int64_t pts = (MoudleConfig::getTimeStamp_MilliSecond() - mStartWriteVideoTime);
+    videoFrame->setPts(pts);
+//qDebug()<<__FUNCTION__<<isRecording()<<destYuvFrame->getWidth()<<destYuvFrame->getHeight()<<pts;
+    if (isRecording())
+    {
+        mVideoFileWriter->inputYuvFrame(videoFrame);
+    }
+}
+
+void MediaManager::inputExternalPcmFrame(PCMFramePtr pcmFrame)
+{
+    inputPcmFrame(pcmFrame, mGetAudioThread);
+}
+
+void MediaManager::audioBufferManagerThreadFunc()
+{
+    mIsAudioBufferManagerThreadRunning = true;
+
+    mAudioEncoder->openEncoder();
+
+    while(!mIsAudioBufferManagerStop)
+    {
+        std::list<PCMFramePtr> waitEncodeFrameList;
+
+        mCond_Audio->Lock();
+
+        do
+        {
+            ///判断队列里面是否有数据
+            bool hasBuffer = true;
+
+//            int i=0;
+
+            for (std::list<AudioManagerNode>::iterator iter = mAudioManagerList.begin(); iter!=mAudioManagerList.end(); iter++)
+            {
+                ///由于读取声卡数据的时候,当声卡没有输出的时候,采集就不会获取到数据,因此需要判断采集声卡的线程是否有数据。
+                if ( ((MoudleConfig::getTimeStamp_MilliSecond() - (*iter).lastGetFrameTime) < 1000)
+                     && ((*iter).pcmFrameList.size() <= 0)) //一秒内获取过数据,且队列是空的,那么继续等待
+                {
+                    hasBuffer = false;
+                    break;
+                }
+//qDebug()<<__FUNCTION__<<i++<<((*iter).thread==mGetAudioThread_MicroPhone)<<(*iter).pcmFrameList.size();
+            }
+
+            if (hasBuffer)
+            {
+                waitEncodeFrameList.clear();
+                for (std::list<AudioManagerNode>::iterator iter =mAudioManagerList.begin(); iter!=mAudioManagerList.end(); iter++)
+                {
+                    std::list<PCMFramePtr> &tmpFrameList = (*iter).pcmFrameList;
+                    if (!tmpFrameList.empty())
+                    {
+                        waitEncodeFrameList.push_back(tmpFrameList.front());
+                        tmpFrameList.pop_front();
+                    }
+                }
+//                fprintf(stderr, "%s waitEncodeFrameList size = %d \n",__FUNCTION__, waitEncodeFrameList.size());
+                break;
+            }
+            else
+            {
+                mCond_Audio->Wait(1000);
+            }
+
+            if (mIsAudioBufferManagerStop) break;
+
+        }while(1);
+
+        mCond_Audio->Unlock();
+
+        if (waitEncodeFrameList.size() > 0)
+        {
+            ///这里的PCM数据格式为:AV_SAMPLE_FMT_FLTP
+
+            ///实现混音
+            float *srcData[10] = {0};
+            int number=0;
+            int bufferSize = 0;
+
+            int64_t pts = 0;
+
+            for (PCMFramePtr & pcmFrame : waitEncodeFrameList)
+            {
+                srcData[number++] = (float*)pcmFrame->getBuffer();
+                bufferSize = pcmFrame->getSize(); //由于采集的时候做了处理,因此这里每一帧的size都是一样的。
+
+                pts = pcmFrame->getPts();
+            }
+
+            uint8_t * pcmBuffer = (uint8_t*)malloc(bufferSize);
+            PcmMix::NormalizedRemix(srcData, number, bufferSize, (float*)pcmBuffer);
+
+            PCMFramePtr pcmFramePtr = std::make_shared<PCMFrame>();
+            pcmFramePtr->setFrameBuffer(pcmBuffer, bufferSize, pts);
+
+            if (isRecording())
+            {
+                mVideoFileWriter->inputPcmFrame(pcmFramePtr);
+            }
+
+//            #define PCMTYPE short
+//            #define MAXPCMVALUE 32767
+
+//            PCMTYPE *buffer = (PCMTYPE *)pcmBuffer;
+
+//            int volumeL = abs(buffer[0] * 100.0 / MAXPCMVALUE);
+//            int volumeR = abs(buffer[1] * 100.0 / MAXPCMVALUE);
+
+//            AACFramePtr aacFrame = mAudioEncoder->encode(pcmBuffer, bufferSize);
+//            free(pcmBuffer);
+
+//            if (aacFrame != nullptr && aacFrame.get() != nullptr)
+//            {
+//#if 0 ///写入aac文件
+//                static FILE *aacFp = fopen("out.aac", "wb");
+//                fwrite(aacFrame->getBuffer(), 1, aacFrame->getSize(), aacFp);
+//#endif
+//                aacFrame->setPts(pts);
+
+//                mRtmpSender->inputAudioFrame(aacFrame);
+
+//                if (mRtmpSenderEventHandle != nullptr)
+//                {
+//                    mRtmpSenderEventHandle->OnAudioVolumeUpdated(volumeL, volumeR);
+//                }
+//            }
+        }
+    }
+
+    mAudioEncoder->closeEncoder();
+
+    mIsAudioBufferManagerThreadRunning = false;
+}
+
+void scaleI420(uint8_t *src_i420_data, int width, int height, uint8_t *dst_i420_data, int dst_width, int dst_height)
+{
+
+    int src_i420_y_size = width * height;
+    int src_i420_u_size = (width >> 1) * (height >> 1);
+    uint8_t *src_i420_y_data = src_i420_data;
+    uint8_t *src_i420_u_data = src_i420_data + src_i420_y_size;
+    uint8_t *src_i420_v_data = src_i420_data + src_i420_y_size + src_i420_u_size;
+
+    int dst_i420_y_size = dst_width * dst_height;
+    int dst_i420_u_size = (dst_width >> 1) * (dst_height >> 1);
+    uint8_t *dst_i420_y_data = dst_i420_data;
+    uint8_t *dst_i420_u_data = dst_i420_data + dst_i420_y_size;
+    uint8_t *dst_i420_v_data = dst_i420_data + dst_i420_y_size + dst_i420_u_size;
+
+    libyuv::I420Scale((const uint8_t *) src_i420_y_data, width,
+                      (const uint8_t *) src_i420_u_data, width >> 1,
+                      (const uint8_t *) src_i420_v_data, width >> 1,
+                      width, height,
+                      (uint8_t *) dst_i420_y_data, dst_width,
+                      (uint8_t *) dst_i420_u_data, dst_width >> 1,
+                      (uint8_t *) dst_i420_v_data, dst_width >> 1,
+                      dst_width, dst_height,
+                      libyuv::kFilterNone);
+}
+
+void yuvMerge(uint8_t *mainYuv420pBuffer, int mainWidth, int mainHeight,
+              uint8_t *childYuv420pBuffer, int childWidth, int childHeight,
+              int posX, int posY)
+{
+    uint8_t *mainYBuffer = mainYuv420pBuffer;
+    uint8_t *mainUBuffer = mainYBuffer + mainWidth * mainHeight;
+    uint8_t *mainVBuffer = mainUBuffer + (mainWidth * mainHeight / 4);
+
+    uint8_t *childYBuffer = childYuv420pBuffer;
+    uint8_t *childUBuffer = childYBuffer + childWidth * childHeight;
+    uint8_t *childVBuffer = childUBuffer + (childWidth * childHeight / 4);
+
+qDebug()<<__FUNCTION__<<mainWidth<<mainHeight<<childWidth<<childHeight;
+
+    for (int y = 0; y < childHeight; y++)
+    {
+        uint8_t *yBuffer = mainYBuffer + ((y+posY)*mainWidth) + posX;
+        uint8_t *yBufferChild = childYBuffer + (y*childWidth);
+
+        int len = childWidth;
+
+        memcpy(yBuffer, yBufferChild, len);
+    }
+
+    for (int y = 0; y < (childHeight/4); y++)
+    {
+        uint8_t *uBuffer = mainUBuffer + ((y+posY)*mainWidth) + posX;
+        uint8_t *uBufferChild = childUBuffer + (y*childWidth);
+
+        int len = childWidth;
+
+        memcpy(uBuffer, uBufferChild, len);
+    }
+
+    for (int y = 0; y < (childHeight/4); y++)
+    {
+        uint8_t *vBuffer = mainVBuffer + ((y+posY)*mainWidth) + posX;
+        uint8_t *vBufferChild = childVBuffer + (y*childWidth);
+
+        int len = childWidth;
+
+        memcpy(vBuffer, vBufferChild, len);
+    }
+}
+
+void MediaManager::videoBufferManagerThreadFunc()
+{
+    mIsVideoBufferManagerThreadRunning = true;
+
+    while(!mIsVideoBufferManagerStop)
+    {
+        int64_t currentSystemTime = MoudleConfig::getTimeStamp_MilliSecond();
+        if ((currentSystemTime - mLastInputRtmpVideoTime) >= (1000.0 / mRtmpFrameRate))
+        {
+            mLastInputRtmpVideoTime = currentSystemTime;
+
+            mCond_Video->Lock();
+
+            if (mVideoFrameBackGround == nullptr)
+            {
+                mCond_Video->Unlock();
+                continue;
+            }
+
+            VideoRawFramePtr destYuvFrame = std::make_shared<VideoRawFrame>();
+            destYuvFrame->initBuffer(mVideoFrameBackGround->getWidth(), mVideoFrameBackGround->getHeight(), VideoRawFrame::FRAME_TYPE_YUV420P);
+            destYuvFrame->setFramebuf(mVideoFrameBackGround->getBuffer());
+
+//            int64_t pts = 0;
+
+            for (VideoManagerNode node : mVideoManagerList)
+            {
+                VideoRawFramePtr videoFrameTmp = node.videoFrame;
+                VideoRawFramePtr videoFrame = videoFrameTmp;
+//qDebug()<<__FUNCTION__<<node.id<<(node.videoFrame==nullptr);
+                if (videoFrame != nullptr && videoFrame->getBuffer() != nullptr)
+                {
+//                    if (videoFrame->getPts() != 0)
+//                    {
+//                        pts = videoFrame->getPts();
+//                    }
+
+                    int destFrameWidth  = node.rect.destRect.right - node.rect.destRect.left;
+                    int destFrameHeight = node.rect.destRect.bottom - node.rect.destRect.top;
+
+                    ///捕获到的分辨率 与最终要的不一致,则执行一次压缩(主要是摄像头才会有这个现象)
+                    if (destFrameWidth != videoFrameTmp->getWidth() || destFrameHeight != videoFrameTmp->getHeight())
+                    {
+                        videoFrame = std::make_shared<VideoRawFrame>();
+                        videoFrame->initBuffer(destFrameWidth, destFrameHeight, VideoRawFrame::FRAME_TYPE_YUV420P, videoFrameTmp->getPts());
+
+                        scaleI420(videoFrameTmp->getBuffer(), videoFrameTmp->getWidth(), videoFrameTmp->getHeight(),
+                                  videoFrame->getBuffer(), destFrameWidth, destFrameHeight);
+////qDebug()<<__FUNCTION__<<videoFrameTmp->getWidth()<<videoFrameTmp->getHeight()<<destFrameWidth<<destFrameHeight<<node.rect.destRect.left<<node.rect.destRect.top;
+//                        FILE *fp = fopen("out-0.yuv", "wb");
+//                        fwrite(videoFrame->getBuffer(), 1, videoFrame->getSize(), fp);
+//                        fclose(fp);
+                    }
+
+                    codImageFrame frame;
+                    frame.data   = videoFrame->getBuffer();				// memory pointer
+                    frame.width  = videoFrame->getWidth();				// width of image
+                    frame.height = videoFrame->getHeight();				// height of image
+                    frame.stride = videoFrame->getWidth();				// stride of image
+                    frame.pixfmt = cod_fmt_i420;						// pixel format of image
+
+                    codImageFrame dstFrame;
+                    dstFrame.data   = destYuvFrame->getBuffer();		// memory pointer
+                    dstFrame.width  = destYuvFrame->getWidth();			// width of image
+                    dstFrame.height = destYuvFrame->getHeight();		// height of image
+                    dstFrame.stride = destYuvFrame->getWidth();			// stride of image
+                    dstFrame.pixfmt = cod_fmt_i420;						// pixel format of image
+
+                    blend_420p_planar (&frame, node.rect.destRect.left, node.rect.destRect.top, node.alpha, &dstFrame);
+
+                }
+            }
+
+            mCond_Video->Unlock();
+
+            int64_t pts = (MoudleConfig::getTimeStamp_MilliSecond() - mStartWriteVideoTime);
+            destYuvFrame->setPts(pts);
+//qDebug()<<__FUNCTION__<<isRecording()<<destYuvFrame->getWidth()<<destYuvFrame->getHeight()<<pts;
+            if (isRecording())
+            {
+                mVideoFileWriter->inputYuvFrame(destYuvFrame);
+            }
+
+            if (mFinalVideoFrameCallBackFunc != nullptr)
+            {
+                mFinalVideoFrameCallBackFunc(destYuvFrame, mFinalVideoFrameCallBackFuncParam);
+            }
+
+//            FILE *fp = fopen("out.yuv", "wb");
+//            fwrite(destYuvFrame->getBuffer(), 1, destYuvFrame->getSize(), fp);
+//            fclose(fp);
+        }
+        else
+        {
+            MoudleConfig::mSleep(5);
+        }
+    }
+
+    mIsVideoBufferManagerThreadRunning = false;
+}
+
+#if 0
+
+#if defined(WIN32)
+std::string UTF8ToGB(const char* str)
+{
+    std::string result;
+    WCHAR *strSrc;
+    LPSTR szRes;
+
+    //获得临时变量的大小
+    int i = MultiByteToWideChar(CP_UTF8, 0, str, -1, NULL, 0);
+    strSrc = new WCHAR[i + 1];
+    MultiByteToWideChar(CP_UTF8, 0, str, -1, strSrc, i);
+
+    //获得临时变量的大小
+    i = WideCharToMultiByte(CP_ACP, 0, strSrc, -1, NULL, 0, NULL, NULL);
+    szRes = new CHAR[i + 1];
+    WideCharToMultiByte(CP_ACP, 0, strSrc, -1, szRes, i, NULL, NULL);
+
+    result = szRes;
+    delete[]strSrc;
+    delete[]szRes;
+
+    return result;
+}
+#endif
+
+bool MediaManager::getDeviceList(std::list<DeviceNode> &videoDeviceList, std::list<DeviceNode> &audioDeviceList)
+{
+    bool isSucceed = false;
+
+    /// 执行ffmpeg命令行 获取音视频设备
+    /// 请将ffmpeg.exe和程序放到同一个目录下
+
+    char dirPath[512] = {0};
+    getcwd(dirPath, sizeof (dirPath));
+
+#ifdef WIN32
+
+    std::string ffmpegPath = std::string(dirPath) + "/ffmpeg.exe";
+    ffmpegPath = MoudleConfig::stringReplaceAll(ffmpegPath, "/","\\\\");
+
+    #if 0
+        std::string cmdStr = AppConfig::stringFormat(" /c \"%s\" -list_devices true -f dshow -i dummy 2>ffmpeg_device_out.txt", ffmpegPath.c_str());
+
+        std::wstring str;
+        {
+            char * c = (char*)cmdStr.c_str();
+            size_t m_encode = CP_ACP;
+            int len = MultiByteToWideChar(m_encode, 0, c, strlen(c), NULL, 0);
+            wchar_t*	m_wchar = new wchar_t[len + 1];
+            MultiByteToWideChar(m_encode, 0, c, strlen(c), m_wchar, len);
+            m_wchar[len] = '\0';
+            str = m_wchar;
+            delete m_wchar;
+        }
+
+        fprintf(stderr, "%s %s \n", __FUNCTION__, str.c_str());
+
+        SHELLEXECUTEINFO ShExecInfo = {0};
+        ShExecInfo.cbSize = sizeof(SHELLEXECUTEINFO);
+        ShExecInfo.fMask = SEE_MASK_FLAG_NO_UI;
+        ShExecInfo.hwnd = NULL;
+        ShExecInfo.lpVerb = NULL;
+        ShExecInfo.lpFile = L"cmd.exe";//调用的程序名
+    //    ShExecInfo.lpParameters = L" /c ffmpeg.exe -list_devices true -f dshow -i dummy 2>D:/a.txt";//调用程序的命令行参数
+        ShExecInfo.lpParameters = str.data();
+        ShExecInfo.lpDirectory = NULL;
+        ShExecInfo.nShow = SW_SHOWMINIMIZED;//窗口状态为隐藏
+        ShExecInfo.hInstApp = NULL;
+        int ret = ShellExecuteEx(&ShExecInfo);
+        WaitForSingleObject(ShExecInfo.hProcess, INFINITE);////等到该进程结束
+    #else
+        std::string cmdStr = MoudleConfig::stringFormat("cmd.exe /c \"%s\" -list_devices true -f dshow -i dummy 2>ffmpeg_device_out.txt", ffmpegPath.c_str());
+
+        int ret = WinExec(cmdStr.c_str(), SW_SHOWMINIMIZED);
+    #endif
+
+#else
+
+//    int ret = system(cmdStr.c_str());
+#endif
+
+    MoudleConfig::mSleep(2000);
+
+    for (int i=0;i<10;i++)
+    {
+        std::string deviceName;
+        std::string deviceID;
+
+        FILE *fp = fopen("ffmpeg_device_out.txt", "r");
+        if (fp != nullptr)
+        {
+            bool isVideoBegin = false;
+            bool isAudioBegin = false;
+
+            while (!feof(fp))
+            {
+                char ch[1024] = {0};
+                char*p = fgets(ch, 1024, fp);
+
+#if defined(WIN32)
+//                std::string str = UTF8ToGB(ch); //ffmpeg生成的文件是UTF8编码的
+                std::string str = (ch); //ffmpeg生成的文件是UTF8编码的
+#else
+                std::string str = std::string(ch);
+#endif
+//                fprintf(stderr, "[%s] %s [end]\n", str.c_str(), ch);
+
+                if ((str.find("DirectShow video devices") != std::string::npos) && (str.find("[dshow @") != std::string::npos))
+                {
+                    isVideoBegin = true;
+                    isAudioBegin = false;
+                    continue;
+                }
+
+                if ((str.find("DirectShow audio devices") != std::string::npos) && (str.find("[dshow @") != std::string::npos))
+                {
+                    isAudioBegin = true;
+                    isVideoBegin = false;
+                    continue;
+                }
+
+                if (str.find("[dshow @") != std::string::npos)
+                {
+                    std::string tmpStr = str;
+
+                    int index = str.find("\"");
+                    str = str.erase(0, index);
+
+                    str = MoudleConfig::stringReplaceAll(str, "\"", "");
+                    str = MoudleConfig::stringReplaceAll(str, "\n", "");
+                    str = MoudleConfig::stringReplaceAll(str, "\r", "");
+
+                    if (tmpStr.find("Alternative name") == std::string::npos)
+                    {
+                        ///是设备名字
+//                        if (str.find("virtual-audio-capturer") != std::string::npos)
+                        deviceName = str;
+                    }
+                    else
+                    {
+                        deviceID = str;
+
+                        DeviceNode deviceNode{deviceName, deviceID};
+
+                        ///是设备ID
+                        if (isVideoBegin)
+                        {
+    //                        fprintf(stderr, ">>>>>>>video>>>>>>> %s\n", str.c_str());
+                            if (!deviceName.empty())
+                                videoDeviceList.push_back(deviceNode);
+                        }
+                        else if (isAudioBegin)
+                        {
+//                            fprintf(stderr, ">>>>>>>audio>>>>>>> %s\n", str.c_str());
+                            if (!deviceName.empty())
+                                audioDeviceList.push_back(deviceNode);
+                        }
+                    }
+                }
+            }
+            fclose(fp);
+
+            isSucceed = true;
+            break;
+        }
+        else
+        {
+            MoudleConfig::mSleep(1000); //等待一秒再试一次
+        }
+//        fprintf(stderr, "####=======================###\n");
+    }
+
+    std::thread([]
+    {
+
+        MoudleConfig::mSleep(10000);
+        MoudleConfig::removeFile("ffmpeg_device_out.txt");
+
+    }).detach();
+
+    return isSucceed;
+}
+
+#else
+
+#include <initguid.h>
+#include <WinSock2.h>
+#include <Windows.h>
+#include <dshow.h>
+#include <stdio.h>
+#include <stdarg.h>  //定义成一个可变参数列表的指针
+//Strmiids.lib oleaut32.lib
+
+static void wcharTochar(const wchar_t *wchar, char *chr, int length)
+{
+    WideCharToMultiByte( CP_ACP, 0, wchar, -1,chr, length, NULL, NULL );
+}
+
+
+static char* GuidToString(const GUID &guid)
+{
+    int buf_len=64;
+    char *buf =(char *)malloc(buf_len);
+    StringCbPrintfA(
+        buf,
+        buf_len,
+        "{%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X}",
+        guid.Data1, guid.Data2, guid.Data3,
+        guid.Data4[0], guid.Data4[1],
+        guid.Data4[2], guid.Data4[3],
+        guid.Data4[4], guid.Data4[5],
+        guid.Data4[6], guid.Data4[7]);
+    //printf("%s\n",buf);
+    return buf;
+}
+
+bool MediaManager::getDeviceList(std::list<DeviceNode> &videoDeviceList, std::list<DeviceNode> &audioDeviceList)
+{
+    bool isSucceed = false;
+
+    do
+    {
+        // Init COM
+        HRESULT hr=NULL;
+        hr= CoInitialize(NULL);
+        if (FAILED(hr)){
+            fprintf(stderr,"Error, Can not init COM.");
+            break;
+        }
+        printf("===============Directshow Filters ===============\n");
+        ICreateDevEnum *pSysDevEnum = NULL;
+        hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
+            IID_ICreateDevEnum, (void **)&pSysDevEnum);
+        if (FAILED(hr)){
+            break;
+        }
+
+
+        //    HRESULT status= S_OK;
+
+        //    // create System Device Enumerator
+        //    ICreateDevEnum *pSystemDeviceEnumerator= NULL;
+        //    status= CoCreateInstance(  CLSID_SystemDeviceEnum,
+        //                                NULL,
+        //                                CLSCTX_INPROC,
+        //                                IID_ICreateDevEnum,
+        //                                (void**)&pSystemDeviceEnumerator);
+        //    if( FAILED(status))
+        //    {
+        ////        MessageBoxEx( NULL, "Creating System Device Enumerator failed!", __FUNCTION__, MB_ICONERROR, 0);
+        //        return false;
+        //    }
+
+        //    // create Class Enumerator that lists alls video input devices among the system devices
+        //    IEnumMoniker *pVideoInputDeviceEnumerator= NULL;
+        //    status= pSystemDeviceEnumerator->CreateClassEnumerator( CLSID_VideoInputDeviceCategory,
+        //                                                            &pVideoInputDeviceEnumerator,
+        //                                                            0);
+
+        IEnumMoniker *pEnumCat = NULL;
+        //Category
+        /************************************************************************
+        Friendly Name                         CLSID
+        -------------------------------------------------------------------------
+        Audio Capture Sources                 CLSID_AudioInputDeviceCategory
+        Audio Compressors                     CLSID_AudioCompressorCategory
+        Audio Renderers                       CLSID_AudioRendererCategory
+        Device Control Filters                CLSID_DeviceControlCategory
+        DirectShow Filters                    CLSID_LegacyAmFilterCategory
+        External Renderers                    CLSID_TransmitCategory
+        Midi Renderers                        CLSID_MidiRendererCategory
+        Video Capture Sources                 CLSID_VideoInputDeviceCategory
+        Video Compressors                     CLSID_VideoCompressorCategory
+        WDM Stream Decompression Devices      CLSID_DVDHWDecodersCategory
+        WDM Streaming Capture Devices         AM_KSCATEGORY_CAPTURE
+        WDM Streaming Crossbar Devices        AM_KSCATEGORY_CROSSBAR
+        WDM Streaming Rendering Devices       AM_KSCATEGORY_RENDER
+        WDM Streaming Tee/Splitter Devices    AM_KSCATEGORY_SPLITTER
+        WDM Streaming TV Audio Devices        AM_KSCATEGORY_TVAUDIO
+        WDM Streaming TV Tuner Devices        AM_KSCATEGORY_TVTUNER
+        WDM Streaming VBI Codecs              AM_KSCATEGORY_VBICODEC
+        ************************************************************************/
+    //    hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoCompressorCategory, &pEnumCat, 0);
+        hr = pSysDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, &pEnumCat, 0);
+        //hr = pSysDevEnum->CreateClassEnumerator(CLSID_AudioCompressorCategory, &pEnumCat, 0);
+    //    hr = pSysDevEnum->CreateClassEnumerator(CLSID_AudioInputDeviceCategory, &pEnumCat, 0);
+        //hr = pSysDevEnum->CreateClassEnumerator(CLSID_MediaMultiplexerCategory, &pEnumCat, 0);
+        //hr = pSysDevEnum->CreateClassEnumerator(CLSID_LegacyAmFilterCategory, &pEnumCat, 0);
+
+        if (hr != S_OK) {
+            pSysDevEnum->Release();
+            break;
+        }
+
+        isSucceed = true;
+
+        IMoniker *pMoniker = NULL;
+        ULONG monikerFetched;
+        //Filter
+        while(pEnumCat->Next(1, &pMoniker, &monikerFetched) == S_OK){
+            IPropertyBag *pPropBag;
+            VARIANT varName;
+            IBaseFilter *pFilter;
+            hr = pMoniker->BindToStorage(0, 0, IID_IPropertyBag,(void **)&pPropBag);
+            if (FAILED(hr)){
+                pMoniker->Release();
+                continue;
+            }
+            VariantInit(&varName);
+            hr = pPropBag->Read(L"FriendlyName", &varName, 0);
+            //"FriendlyName": The name of the device.
+            //"Description": A description of the device.
+            //Filter Info================
+    //        printf("[%s]\n",W2A(varName.bstrVal));
+
+            //wchar_t to char
+            char chr[128]={0};
+            wcharTochar(varName.bstrVal, chr, sizeof(chr));
+
+            DeviceNode node;
+            node.deviceName = chr;
+            videoDeviceList.push_back(node);
+
+//            fprintf(stderr,"[%s]\n", chr);
+
+            VariantClear(&varName);
+            //========================
+    #if OUTPUT_PIN
+            hr = pMoniker->BindToObject(NULL, NULL, IID_IBaseFilter,(void**)&pFilter);
+            if (!pFilter){
+                continue;
+            }
+
+            IEnumPins * pinEnum = NULL;
+            IPin * pin = NULL;
+            ULONG pinFetched = 0;
+            if (FAILED(pFilter->EnumPins(&pinEnum))){
+                pinEnum->Release();
+                continue;
+            }
+            pinEnum->Reset();
+            //Pin Info
+            while (SUCCEEDED(pinEnum->Next(1, &pin, &pinFetched)) && pinFetched){
+                if (!pin){
+                    continue;
+                }
+                PIN_INFO pinInfo;
+                if (FAILED(pin->QueryPinInfo(&pinInfo))){
+                    continue;
+                }
+                printf("\t[Pin] ");
+                    switch(pinInfo.dir){
+                    case PINDIR_INPUT:printf("Dir:Input  \t");break;
+                    case PINDIR_OUTPUT:printf("Dir:Output \t");break;
+                    default:printf("Dir:Unknown\n");break;
+                }
+    //            printf("Name:%s\n",W2A(pinInfo.achName));
+                    char     chr[128]={0};
+                    wcharTochar(pinInfo.achName, chr, sizeof(chr));
+                    printf("Name:%s\n", chr);
+
+                //MediaType
+    #if OUTPUT_MEDIATYPE
+                IEnumMediaTypes *mtEnum=NULL;
+                AM_MEDIA_TYPE   *mt=NULL;
+                if( FAILED( pin->EnumMediaTypes( &mtEnum )) )
+                    break;
+                mtEnum->Reset();
+
+                ULONG mtFetched = 0;
+
+                while (SUCCEEDED(mtEnum->Next(1, &mt, &mtFetched)) && mtFetched){
+
+                    printf("\t\t[MediaType]\n");
+                    //Video
+                    char *MEDIATYPE_Video_str=GuidToString(MEDIATYPE_Video);
+                    //Audio
+                    char *MEDIATYPE_Audio_str=GuidToString(MEDIATYPE_Audio);
+                    //Stream
+                    char *MEDIATYPE_Stream_str=GuidToString(MEDIATYPE_Stream);
+                    //Majortype
+                    char *majortype_str=GuidToString(mt->majortype);
+                    //Subtype
+                    char *subtype_str=GuidToString(mt->subtype);
+
+                    printf("\t\t  Majortype:");
+                    if(strcmp(majortype_str,MEDIATYPE_Video_str)==0){
+                        printf("Video\n");
+                    }else if(strcmp(majortype_str,MEDIATYPE_Audio_str)==0){
+                        printf("Audio\n");
+                    }else if(strcmp(majortype_str,MEDIATYPE_Stream_str)==0){
+                        printf("Stream\n");
+                    }else{
+                        printf("Other\n");
+                    }
+                    printf("\t\t  Subtype GUID:%s",subtype_str);
+
+                    free(MEDIATYPE_Video_str);
+                    free(MEDIATYPE_Audio_str);
+                    free(MEDIATYPE_Stream_str);
+                    free(subtype_str);
+                    free(majortype_str);
+                    printf("\n");
+
+                }
+    #endif
+                pin->Release();
+
+            }
+            pinEnum->Release();
+
+            pFilter->Release();
+    #endif
+            pPropBag->Release();
+            pMoniker->Release();
+        }
+        pEnumCat->Release();
+        pSysDevEnum->Release();
+        printf("=================================================\n");
+        CoUninitialize();
+
+    }while(0);
+
+    return isSucceed;
+}
+#endif

+ 232 - 0
module/ScreenRecorder/src/Media/MediaManager.h

@@ -0,0 +1,232 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef MEDIAMANAGER_H
+#define MEDIAMANAGER_H
+
+#include <list>
+#include <string>
+
+#include "Video/CaptureWindowThread.h"
+#include "Video/GetVideoThread.h"
+#include "Audio/GetAudioThread.h"
+
+#include "Audio/AudioEncoder.h"
+#include "Video/VideoFileWriter.h"
+
+#include "EventHandle/VideoRecorderEventHandle.h"
+
+struct DeviceNode
+{
+    std::string deviceName;
+    std::string deviceID;
+};
+
+struct VideoRECT
+{
+    RECT destRect;
+};
+
+enum TaskType
+{
+    TaskType_Window = 0,
+    TaskType_Camera,
+    TaskType_Picture,
+};
+
+class MediaManager
+{
+public:
+    MediaManager();
+    ~MediaManager();
+
+    /**
+     * @brief 添加捕获窗口的任务
+     * @param id       [in] 任务ID
+     * @param hWnd     [in] 窗口句柄
+     * @param srcRect  [in] 捕获的窗口区域
+     * @param destRect [in] 最终图像贴到主图的区域
+     * @param alpha    [in] 图像透明度0~1.0
+     */
+    void addCaptureWindowTask(const int &id, const HWND &hWnd,
+                              const RECT &srcRect, const RECT &destRect,
+                              const float &alpha=1.0f);
+
+    void addCaptureCameraTask(const int &id,
+                              const RECT &destRect,
+                              const float &alpha=1.0f);
+
+    void addCapturePictureTask(const int &id, const std::string &filePath,
+                               const RECT &destRect,
+                               const float &alpha=1.0f);
+
+    bool removeTask(const int &id);
+    void clearTask();
+
+    void stopAll(); //只能在释放之前调用此函数,否则会影响基本的功能
+
+    /**
+     * @brief startCapture
+     * @param [in] hWnd
+     * @param [in] audioDeviceName
+     * @param [in] enableVirtualAudio 是否捕获虚拟声卡声音
+     * @return
+     */
+    bool startCapture(const bool &enableVirtualAudio = true);
+    bool stopCapture(const bool &isBlock = true);
+
+    bool setMicroPhone(const std::string &deviceName);
+    bool startCaptureMic(const std::string &deviceName);
+    bool stopCaptureMic(const bool &isBlock = true);
+
+    bool muteMicroPhone(bool isMute); //静音麦克风
+    bool muteVirtualAudio(bool isMute); //静音声卡捕获的声音
+
+    void setVideoRecorderEventHandle(VideoRecorderEventHandle *handle);
+
+    int64_t getVideoFileCurrentTime(); //获取录屏的时间戳(毫秒)
+
+    ///设置最终生成的视频文件的分辨率(如果此值和采集桌面区域不一致,那么图像将是经过压缩后再编码写入文件)
+    void setVideoSize(const int &width, const int &height);
+    void setQuality(const int &quality); //设置质量 0-10
+    void setFrameRate(const int &frameRate);
+
+    ///摄像头相关
+    bool openCameraCaptureMode(const std::string &deviceName); //在直播设置界面打开的摄像头
+    bool openCameraWindowMode(const std::string &deviceName); //在窗口上执行的打开摄像头
+    bool closeCameraCaptureMode(); //在直播设置界面打开的摄像头
+    bool closeCameraWindowMode(); //在窗口上执行的打开摄像头
+
+    bool isRecording(); //是否正在录制
+    bool openFile(const std::string &filePath);
+    bool closeFile();
+    std::list<VideoFileInfo> getVideoFileList(); //获取文件列表
+
+    ///设置视频数据回调函数
+    void setCameraFrameCallBackFunc(std::function<void (VideoRawFramePtr yuvFrame, VideoRawFramePtr rgbFrame, void *param)> func = nullptr, void *param = nullptr);
+    void setFinalVideoFrameCallBackFunc(std::function<void (VideoRawFramePtr yuvFrame, void *param)> func = nullptr, void *param = nullptr);
+
+    ///输入外部音视频数据
+    void inputExternalYuvFrame(VideoRawFramePtr videoFrame);
+    void inputExternalPcmFrame(PCMFramePtr pcmFrame);
+
+    /**
+     * @brief 获取设备列表
+     * @param [out] videoDeviceList 存放获取到的视频设备列表
+     * @param [out] audioDeviceList 存放获取到的音频设备列表
+     * @return 获取成功为true
+     */
+    static  bool getDeviceList(std::list<DeviceNode> &videoDeviceList, std::list<DeviceNode> &audioDeviceList);
+
+protected:
+    bool mIsAudioBufferManagerStop;
+    bool mIsAudioBufferManagerThreadRunning;
+    void audioBufferManagerThreadFunc();
+
+    bool mIsVideoBufferManagerStop;
+    bool mIsVideoBufferManagerThreadRunning;
+    void videoBufferManagerThreadFunc();
+
+private:
+    int64_t mStartWriteVideoTime; //开始写视频文件的时间,用于计算传给视频保存的时间戳
+    int64_t mStartCaptureTime; //开始采集的时间,用于计算传给视频保存的时间戳
+    bool mIsCaptureNow; //当前处理音频和视频的线程是否正在允许
+
+    GetVideoThread *mGetCameraVideoThread;      //获取摄像头画面的线程
+    GetAudioThread *mGetAudioThread;  //捕获系统输出音频
+    GetAudioThread *mGetAudioThread_MicroPhone; //捕获麦克风音频
+
+    bool mIsMicroPhoneMute; //静音麦克风捕获
+    bool mIsVirtualAudioMute; //静音声卡捕获
+
+    int mVideoFileWidth;  //最终视频文件的分辨率
+    int mVideoFileHeight; //最终视频文件的分辨率
+    VideoFileWriter *mVideoFileWriter;
+
+    VideoRecorderEventHandle *mVideoRecorderEventHandle;
+
+    ///摄像头数据回调函数
+    std::function<void (VideoRawFramePtr yuvFrame, VideoRawFramePtr rgbFrame, void *param)> mCameraFrameCallBackFunc = nullptr; //摄像头数据回调函数
+    void *mCameraFrameCallBackFuncParam = nullptr; //回调函数用户参数
+
+    void startCaptureCamera(); //打开相机捕获线程
+    void stopCaptureCamera();  //停止相机捕获线程
+
+    ///实时数据回调函数
+    std::function<void (VideoRawFramePtr yuvFrame, void *param)> mFinalVideoFrameCallBackFunc = nullptr; //摄像头数据回调函数
+    void *mFinalVideoFrameCallBackFuncParam = nullptr; //回调函数用户参数
+
+    int64_t mLastInputRtmpVideoTime; //记录上一次往rtmp推入视频的时间
+    int mRtmpFrameRate;
+
+    ///存放采集到的音频数据(用于混音)
+    struct AudioManagerNode
+    {
+        GetAudioThread* thread;
+        std::list<PCMFramePtr> pcmFrameList;
+        int64_t lastGetFrameTime; //最近一次获取
+
+        AudioManagerNode()
+        {
+            lastGetFrameTime = 0;
+        }
+    };
+
+    VideoRawFramePtr mVideoFrameBackGround; //背景图
+
+    ///存放采集到的视频数据(用于图片叠加)
+    struct VideoManagerNode
+    {
+        int id;
+        void* thread;
+        TaskType type;
+        VideoRawFramePtr videoFrame;
+        VideoRECT rect;
+        float alpha;
+        int64_t lastGetFrameTime; //最近一次获取的时间
+
+        VideoManagerNode()
+        {
+            videoFrame = nullptr;
+            alpha = 1.0f;
+            lastGetFrameTime = 0;
+        }
+
+        bool operator == (const VideoManagerNode & node)//重载运算符函数的具体实现
+        {
+            bool isSame = false;
+            if (node.id == this->id)
+            {
+                isSame = true;
+            }
+            return isSame;
+        }
+    };
+
+    Cond *mCond_Audio;
+    std::list<AudioManagerNode> mAudioManagerList;
+
+    Cond *mCond_Video;
+    std::list<VideoManagerNode> mVideoManagerList;
+
+    AudioEncoder *mAudioEncoder;
+//    VideoEncoder *mVideoEncoder;
+
+    void inputPcmFrame(PCMFramePtr pcmFrame, void *param);
+    void inputVideoFrame(VideoRawFramePtr videoFrame, void *param);
+
+    void startAudioBufferMangerThread();
+    void stopAudioBufferMangerThread();
+
+    bool mIsCameraOpenCaptureMode = false;
+    bool mIsCameraOpenWindowMode  = false;
+    bool openCamera(const std::string &deviceName);
+    bool closeCamera();
+
+};
+
+
+#endif // MEDIAMANAGER_H

+ 933 - 0
module/ScreenRecorder/src/Media/Video/CaptureWindowThread.cpp

@@ -0,0 +1,933 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "CaptureWindowThread.h"
+
+#include "MoudleConfig.h"
+#include <QDebug>
+#if defined(WIN32)
+    #include <WinSock2.h>
+    #include <Windows.h>
+#else
+
+#endif
+
+enum window_search_mode {
+    INCLUDE_MINIMIZED,
+    EXCLUDE_MINIMIZED
+};
+
+static bool check_window_valid(HWND window, enum window_search_mode mode)
+{
+    DWORD styles, ex_styles;
+    RECT  rect;
+
+    if (/*!IsWindowVisible(window) ||*/
+        (mode == EXCLUDE_MINIMIZED && IsIconic(window)))
+        return false;
+
+    GetClientRect(window, &rect);
+    styles    = (DWORD)GetWindowLongPtr(window, GWL_STYLE);
+    ex_styles = (DWORD)GetWindowLongPtr(window, GWL_EXSTYLE);
+
+    if (ex_styles & WS_EX_TOOLWINDOW)
+        return false;
+    if (styles & WS_CHILD)
+        return false;
+    if (mode == EXCLUDE_MINIMIZED && (rect.bottom == 0 || rect.right == 0))
+        return false;
+
+    return true;
+}
+
+static inline HWND next_window(HWND window, enum window_search_mode mode)
+{
+    while (true) {
+        window = GetNextWindow(window, GW_HWNDNEXT);
+        if (!window || check_window_valid(window, mode))
+            break;
+    }
+
+    return window;
+}
+
+static inline HWND first_window(enum window_search_mode mode)
+{
+    HWND window = GetWindow(GetDesktopWindow(), GW_CHILD);
+    if (!check_window_valid(window, mode))
+        window = next_window(window, mode);
+    return window;
+}
+
+/////////////
+static BOOL IsMainWindow(HWND handle)
+{
+    return GetWindow(handle, GW_OWNER) == (HWND)0 && IsWindowVisible(handle);
+}
+
+/************************************************************************/
+/* hBitmap    为刚才的屏幕位图句柄
+/* lpFileName 为需要保存的位图文件名
+/************************************************************************/
+int SaveBitmapToFile(HBITMAP hBitmap,LPSTR lpFileName)
+{
+    HDC            hDC; //设备描述表
+    int            iBits;//当前显示分辨率下每个像素所占字节数
+    WORD           wBitCount;//位图中每个像素所占字节数
+    DWORD          dwPaletteSize=0;//定义调色板大小
+    DWORD          dwBmBitsSize;//位图中像素字节大小
+    DWORD          dwDIBSize;// 位图文件大小
+    DWORD          dwWritten;//写入文件字节数
+    BITMAP         Bitmap;//位图结构
+    BITMAPFILEHEADER   bmfHdr;   //位图属性结构
+    BITMAPINFOHEADER   bi;       //位图文件头结构
+    LPBITMAPINFOHEADER lpbi;     //位图信息头结构     指向位图信息头结构
+    HANDLE          fh;//定义文件句柄
+    HANDLE            hDib;//分配内存句柄
+    HANDLE            hPal;//分配内存句柄
+    HANDLE          hOldPal=NULL;//调色板句柄
+
+    //计算位图文件每个像素所占字节数
+    hDC = CreateDC(L"DISPLAY",NULL,NULL,NULL);
+    iBits = GetDeviceCaps(hDC, BITSPIXEL) * GetDeviceCaps(hDC, PLANES);
+    DeleteDC(hDC);
+
+    if (iBits <= 1)
+        wBitCount = 1;
+    else if (iBits <= 4)
+        wBitCount = 4;
+    else if (iBits <= 8)
+        wBitCount = 8;
+    else if (iBits <= 24)
+        wBitCount = 24;
+    else if (iBits<=32)
+        wBitCount = 24;
+
+
+    //计算调色板大小
+    if (wBitCount <= 8)
+        dwPaletteSize = (1 << wBitCount) *sizeof(RGBQUAD);
+
+
+
+    //设置位图信息头结构
+    GetObject(hBitmap, sizeof(BITMAP), (LPSTR)&Bitmap);
+    bi.biSize            = sizeof(BITMAPINFOHEADER);
+    bi.biWidth           = Bitmap.bmWidth;
+    bi.biHeight          = Bitmap.bmHeight;
+    bi.biPlanes          = 1;
+    bi.biBitCount         = wBitCount;
+    bi.biCompression      = BI_RGB;
+    bi.biSizeImage        = 0;
+    bi.biXPelsPerMeter     = 0;
+    bi.biYPelsPerMeter     = 0;
+    bi.biClrUsed         = 0;
+    bi.biClrImportant      = 0;
+    dwBmBitsSize = ((Bitmap.bmWidth *wBitCount+31)/32)* 4*Bitmap.bmHeight ;
+
+    //为位图内容分配内存
+    hDib  = GlobalAlloc(GHND,dwBmBitsSize+dwPaletteSize+sizeof(BITMAPINFOHEADER));
+    lpbi = (LPBITMAPINFOHEADER)GlobalLock(hDib);
+    if (lpbi==NULL)
+    {
+        return 0;
+    }
+
+    *lpbi = bi;
+    // 处理调色板
+    hPal = GetStockObject(DEFAULT_PALETTE);
+    if (hPal)
+    {
+        hDC  = GetDC(NULL);
+        hOldPal = ::SelectPalette(hDC, (HPALETTE)hPal, FALSE);
+        RealizePalette(hDC);
+    }
+    // 获取该调色板下新的像素值
+    GetDIBits(hDC, hBitmap, 0, (UINT) Bitmap.bmHeight,
+        (LPSTR)lpbi + sizeof(BITMAPINFOHEADER)+dwPaletteSize,
+        (LPBITMAPINFO)lpbi, DIB_RGB_COLORS);
+    //恢复调色板
+    if (hOldPal)
+    {
+        SelectPalette(hDC, (HPALETTE)hOldPal, TRUE);
+        RealizePalette(hDC);
+        ReleaseDC(NULL, hDC);
+    }
+    //创建位图文件
+    fh = CreateFileA(lpFileName, GENERIC_WRITE,
+        0, NULL, CREATE_ALWAYS,
+        FILE_ATTRIBUTE_NORMAL | FILE_FLAG_SEQUENTIAL_SCAN, NULL);
+
+    if (fh == INVALID_HANDLE_VALUE)
+        return FALSE;
+
+    // 设置位图文件头
+    bmfHdr.bfType = 0x4D42;  // "BM"
+    dwDIBSize    = sizeof(BITMAPFILEHEADER) + sizeof(BITMAPINFOHEADER)+ dwPaletteSize + dwBmBitsSize;
+    bmfHdr.bfSize = dwDIBSize;
+    bmfHdr.bfReserved1 = 0;
+    bmfHdr.bfReserved2 = 0;
+    bmfHdr.bfOffBits = (DWORD)sizeof(BITMAPFILEHEADER) + (DWORD)sizeof(BITMAPINFOHEADER)+ dwPaletteSize;
+
+    // 写入位图文件头
+    WriteFile(fh, (LPSTR)&bmfHdr, sizeof(BITMAPFILEHEADER), &dwWritten, NULL);
+
+    // 写入位图文件其余内容
+    WriteFile(fh, (LPSTR)lpbi, dwDIBSize, &dwWritten, NULL);
+
+    //清除
+    GlobalUnlock(hDib);
+    GlobalFree(hDib);
+    CloseHandle(fh);
+
+    return 1;
+}
+
+CaptureWindowThread::CaptureWindowThread()
+{
+    pFrameRGB = nullptr;
+    pFrameYUV = nullptr;
+
+    outBufferYUV = nullptr;
+    outBufferRGB = nullptr;
+    img_convert_ctx = nullptr;
+
+    mCallBackFunc      = nullptr;
+    mCallBackFuncParam = nullptr;
+
+    mIsStop = false;
+    mIsThreadRunning = false;
+
+    mIsPause = false;
+    mStartTime = 0; //第一次获取到数据的时间
+    mCurrentTime = 0;
+
+    mLastGetVideoTime = 0;
+    mFrameRate = 20;
+
+    mIsFollowMouseMode = false;
+
+    mX = 0;
+    mY = 0;
+    mW = 0;
+    mH = 0;
+}
+
+CaptureWindowThread::~CaptureWindowThread()
+{
+
+}
+
+void CaptureWindowThread::setHWND(const HWND &hWnd)
+{
+    mHWnd = hWnd;
+}
+
+void CaptureWindowThread::setQuantity(const int &value)
+{
+//    mVideoEncoder->setQuantity(value);
+}
+
+void CaptureWindowThread::setFrameRate(const int &frameRate)
+{
+    mFrameRate = frameRate;
+}
+
+void CaptureWindowThread::setRect(const int &x, const int &y, const int &width, const int &height)
+{
+    mX = x;
+    mY = y;
+    mW = width;
+    mH = height;
+}
+
+void CaptureWindowThread::setFollowMouseMode(const bool &value)
+{
+    mIsFollowMouseMode = value;
+}
+
+bool CaptureWindowThread::init(const int &width, const int &height)
+{
+    int y_size = 0;
+    int yuvSize = 0;
+
+    pFrameRGB = av_frame_alloc();
+    pFrameYUV = av_frame_alloc();
+
+    ///将数据转成YUV420P格式
+    img_convert_ctx = sws_getContext(width, height, AV_PIX_FMT_BGR24,
+                                     width, height, AV_PIX_FMT_YUV420P,
+                                     SWS_BICUBIC, nullptr, nullptr, nullptr);
+
+    y_size = width * height;
+//        yuvSize = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
+    yuvSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 1);  //按1字节进行内存对齐,得到的内存大小最接近实际大小
+//        image_buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 0);  //按0字节进行内存对齐,得到的内存大小是0
+//        image_buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 4);   //按4字节进行内存对齐,得到的内存大小稍微大一些
+
+    ///理论上 这里的 yuvSize = y_size * 3 / 2
+    unsigned int numBytes = static_cast<unsigned int>(yuvSize);
+    outBufferYUV = static_cast<uint8_t *>(av_malloc(numBytes * sizeof(uint8_t)));
+//    avpicture_fill((AVPicture *) pFrameYUV, outBufferYUV, AV_PIX_FMT_YUV420P,pCodecCtx->width, pCodecCtx->height);
+    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, outBufferYUV, AV_PIX_FMT_YUV420P, width, height, 1);
+
+    ///理论上 这里的 yuvSize = y_size * 3 / 2
+    unsigned int numBytesRgb = av_image_get_buffer_size(AV_PIX_FMT_BGR24, width, height, 1);
+    outBufferRGB = static_cast<uint8_t *>(av_malloc(numBytesRgb * sizeof(uint8_t)));
+    av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, outBufferRGB, AV_PIX_FMT_BGR24, width, height, 1);
+
+    {
+        ///反转图像 ,否则生成的rgb24图像是上下颠倒的
+        pFrameYUV->data[0] += pFrameYUV->linesize[0] * (height - 1);
+        pFrameYUV->linesize[0] *= -1;
+        pFrameYUV->data[1] += pFrameYUV->linesize[1] * (height / 2 - 1);
+        pFrameYUV->linesize[1] *= -1;
+        pFrameYUV->data[2] += pFrameYUV->linesize[2] * (height / 2 - 1);
+        pFrameYUV->linesize[2] *= -1;
+    }
+
+    return true;
+}
+
+void CaptureWindowThread::deInit()
+{
+    if (pFrameRGB != nullptr)
+    {
+        av_free(pFrameRGB);
+    }
+
+    if (pFrameYUV != nullptr)
+    {
+        av_free(pFrameYUV);
+    }
+
+    if (outBufferYUV != nullptr)
+    {
+        av_free(outBufferYUV);
+    }
+
+    if (outBufferRGB != nullptr)
+    {
+        av_free(outBufferRGB);
+    }
+
+    if (img_convert_ctx != nullptr)
+    {
+        sws_freeContext(img_convert_ctx);
+    }
+
+    pFrameRGB = nullptr;
+    pFrameYUV = nullptr;
+
+    outBufferYUV = nullptr;
+    outBufferRGB = nullptr;
+    img_convert_ctx = nullptr;
+}
+
+void CaptureWindowThread::startRecord(std::function<void (VideoRawFramePtr videoFramePtr, void *param)> func,
+                                      void *param, const int64_t &startPts)
+{
+    mIsStop = false;
+
+    mCallBackFunc      = func;
+    mCallBackFuncParam = param;
+
+    mStartTime = 0;
+
+    if (startPts > 0)
+    {
+        mStartTime = MoudleConfig::getTimeStamp_MilliSecond() - startPts;
+    }
+qDebug()<<__FUNCTION__<<mIsStop<<mIsThreadRunning<<startPts;
+
+    mIsThreadRunning = true;
+
+    //启动新的线程
+    std::thread([=](CaptureWindowThread *pointer)
+    {
+        pointer->run();
+
+    }, this).detach();
+
+}
+
+void CaptureWindowThread::pauseRecord()
+{
+    mIsPause = true;
+}
+
+void CaptureWindowThread::restoreRecord()
+{
+    mStartTime = MoudleConfig::getTimeStamp_MilliSecond() - mCurrentTime;
+    mIsPause = false;
+}
+
+void CaptureWindowThread::stopRecord(const bool &isBlock)
+{
+qDebug()<<__FUNCTION__<<mIsStop<<mIsThreadRunning;
+
+    mIsStop = true;
+
+    if (isBlock)
+    {
+        while(mIsThreadRunning)
+        {
+            MoudleConfig::mSleep(10);
+        }
+    }
+
+qDebug()<<__FUNCTION__<<mIsStop<<mIsThreadRunning<<"finished!";
+}
+
+void CaptureWindowThread::run()
+{
+    mIsThreadRunning = true;
+qDebug()<<__FUNCTION__<<"starting...";
+    if (mStartTime <= 0)
+    {
+        mStartTime = MoudleConfig::getTimeStamp_MilliSecond();
+    }
+
+    int VideoWidth  = 0;
+    int VideoHeight = 0;
+
+    while(!mIsStop)
+    {
+        if ((MoudleConfig::getTimeStamp_MilliSecond() - mLastGetVideoTime) <= (1000 / mFrameRate))
+        {
+            MoudleConfig::mSleep(5);
+            continue;
+        }
+
+        if (mIsPause)
+        {
+            MoudleConfig::mSleep(10);
+            continue;
+        }
+
+        HWND hWnd = mHWnd;
+
+        int width = 0;
+        int height = 0;
+
+        if (hWnd != nullptr)
+        {
+            if (IsIconic(hWnd) || IsCoveredByOtherWindow(hWnd))
+            {
+                MoudleConfig::mSleep(100);
+                continue;
+            }
+
+//            RECT rect;
+//    //        GetClientRect(hWnd, &rect);
+//            GetWindowRect(hWnd, &rect);
+
+            RECT rect = CaptureWindowThread::getWindowRect(hWnd);
+
+            width  = rect.right - rect.left;
+            height = rect.bottom - rect.top;
+
+            mX = rect.left;
+            mY = rect.top;
+            mW = width;
+            mH = height;
+
+    //fprintf(stderr, "record starting %d %d... \n", width, height);
+        }
+        else
+        {
+            width  = GetSystemMetrics(SM_CXVIRTUALSCREEN); /// 获取桌面总高度
+            height = GetSystemMetrics(SM_CYVIRTUALSCREEN);
+
+            int xScreen = ::GetSystemMetrics(SM_XVIRTUALSCREEN); // 获取桌面x坐标,可以为负值
+            int yScreen = ::GetSystemMetrics(SM_YVIRTUALSCREEN); // 获取桌面y坐标,可以为负值
+//            int nScreenCount = ::GetSystemMetrics(SM_CMONITORS); ///获取屏幕数量
+
+            if (mIsFollowMouseMode)
+            {
+                POINT point;
+                GetCursorPos(&point);            // 获取鼠标指针位置(屏幕坐标)
+                ScreenToClient(hWnd, &point);    // 将鼠标指针位置转换为窗口坐标
+
+                int x = point.x - (mW / 2);
+                int y = point.y - (mH / 2);
+
+                if (x < xScreen) x = xScreen;
+                if (y < yScreen) y = yScreen;
+
+                if ((x + mW) > width)
+                {
+                    x = width - mW;
+                }
+
+                if ((y + mH) > height)
+                {
+                    y = height - mH;
+                }
+
+                mX = x;
+                mY = y;
+
+//qDebug()<<__FUNCTION__<<point.x<<point.y<<x<<y<<width<<height;
+            }
+        }
+
+        if (mW > 0)
+        {
+            width = mW;
+        }
+
+        if (mH > 0)
+        {
+            height = mH;
+        }
+
+        /// 传给编码器的图形宽高必须是偶数
+
+        ///这里bitmap的宽度必须是4的倍数,否则data里面取到的rgb24数据有问题
+        if ((width % 4) != 0)
+        {
+            width += 4 - (width % 4);
+        }
+
+        if ((height % 2) != 0)
+        {
+            height -= 1;
+        }
+
+        if (VideoWidth != width || VideoHeight != height)
+        {
+            deInit();
+
+            init(width, height);
+
+            VideoWidth = width;
+            VideoHeight = height;
+        }
+
+//fprintf(stderr, "record starting %d %d... \n", width, height);
+
+        {
+            HDC     hDC;
+            HDC     MemDC;
+            BYTE*   Data;
+            HBITMAP   hBmp;
+            BITMAPINFO   bi;
+
+            memset(&bi,   0,   sizeof(bi));
+            bi.bmiHeader.biSize     = sizeof(BITMAPINFO);
+            bi.bmiHeader.biWidth    = width;//GetSystemMetrics(SM_CXSCREEN);
+            bi.bmiHeader.biHeight   = height;//GetSystemMetrics(SM_CYSCREEN);
+            bi.bmiHeader.biPlanes   = 1;
+            bi.bmiHeader.biBitCount = 24;
+
+//            hDC   =   GetDC(hWnd);
+            hDC   =   GetDC(NULL); //只采集桌面,然后裁图
+            MemDC =   CreateCompatibleDC(hDC);
+            hBmp  =   CreateDIBSection(MemDC,   &bi, DIB_RGB_COLORS,   (void**)&Data,   NULL,   0);
+            SelectObject(MemDC,   hBmp);
+            BitBlt(MemDC,   0,   0,   bi.bmiHeader.biWidth,   bi.bmiHeader.biHeight,hDC,   mX,   mY,   SRCCOPY);
+            ReleaseDC(NULL,   hDC);
+            DeleteDC(MemDC);
+
+            {
+
+                POINT point;
+                GetCursorPos(&point);            // 获取鼠标指针位置(屏幕坐标)
+                ScreenToClient(hWnd, &point);    // 将鼠标指针位置转换为窗口坐标
+
+                HDC hBmpDC=CreateCompatibleDC(NULL); //建立兼容DC
+                HBITMAP hBmpOld = (HBITMAP)SelectObject(hBmpDC, hBmp); //原图选入兼容DC
+
+                // Get information about the global cursor.
+                CURSORINFO ci;
+                ci.cbSize = sizeof(ci);
+                GetCursorInfo(&ci);
+
+                // Draw the cursor into the canvas.
+                DrawIcon(hBmpDC, point.x, point.y, ci.hCursor);
+
+//                LineTo(hBmpDC,100,100);  //画线
+        //        HBITMAP hBmpRet= (HBITMAP)SelectObject(hBmpDC,hBmpOld); //返回画线后的位图
+
+                DeleteObject(hBmpOld);
+                DeleteDC(hBmpDC);
+            }
+
+//            fwrite(Data, 1, width * height * 3, fp);
+
+            mCurrentTime = MoudleConfig::getTimeStamp_MilliSecond() - mStartTime;
+//fprintf(stderr, "mCurrentTime %d... \n", mCurrentTime);
+            {
+                mLastGetVideoTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+                memcpy(outBufferRGB, Data, width*height*3);
+
+                /// 转换成YUV420
+                /// 由于解码后的数据不一定是yuv420p,比如硬件解码后会是yuv420sp,因此这里统一转成yuv420p
+                sws_scale(img_convert_ctx, (const uint8_t* const*)pFrameRGB->data, pFrameRGB->linesize, 0, height, pFrameYUV->data, pFrameYUV->linesize);
+
+                VideoRawFramePtr yuvFrame = std::make_shared<VideoRawFrame>();
+
+                yuvFrame->initBuffer(width, height, VideoRawFrame::FRAME_TYPE_YUV420P, mCurrentTime);
+                yuvFrame->setFramebuf(outBufferYUV);
+
+                if (mCallBackFunc != nullptr)
+                {
+                    mCallBackFunc(yuvFrame, mCallBackFuncParam);
+                }
+
+            }
+
+//            SaveBitmapToFile(hBmp, (char*)"out.bmp");
+//fwrite(Data, 1, width*height*3, fp);
+            DeleteObject(hBmp);
+        }
+    }
+
+    fprintf(stderr, "%s record stopping... \n", __FUNCTION__);
+
+    deInit();
+
+    fprintf(stderr, "%s record finished! \n", __FUNCTION__);
+
+    mIsPause = false;
+
+    mIsThreadRunning = false;
+
+    qDebug()<<__FUNCTION__<<"stopping...";
+}
+
+//FILE *fp = fopen("out.rgb", "wb");
+//void CaptureWindowThread::run()
+//{
+//    mIsThreadRunning = true;
+//qDebug()<<__FUNCTION__<<"starting...";
+//    if (mStartTime <= 0)
+//    {
+//        mStartTime = MoudleConfig::getTimeStamp_MilliSecond();
+//    }
+
+//    int VideoWidth  = 0;
+//    int VideoHeight = 0;
+
+//    while(!mIsStop)
+//    {
+//        if ((MoudleConfig::getTimeStamp_MilliSecond() - mLastGetVideoTime) <= (1000 / mFrameRate))
+//        {
+//            MoudleConfig::mSleep(5);
+//            continue;
+//        }
+
+//        if (mIsPause)
+//        {
+//            MoudleConfig::mSleep(10);
+//            continue;
+//        }
+
+//        HWND hWnd = mHWnd;
+
+//        int width = 0;
+//        int height = 0;
+
+//        if (hWnd != nullptr)
+//        {
+//            RECT rect;
+//    //        GetClientRect(hWnd, &rect);
+//            GetWindowRect(hWnd, &rect);
+
+//            width  = rect.right - rect.left;
+//            height = rect.bottom - rect.top;
+//    //fprintf(stderr, "record starting %d %d... \n", width, height);
+//        }
+//        else
+//        {
+//            width  = GetSystemMetrics(SM_CXSCREEN);
+//            height = GetSystemMetrics(SM_CYSCREEN);
+//        }
+
+//        if (mW > 0)
+//        {
+//            width = mW;
+//        }
+
+//        if (mH > 0)
+//        {
+//            height = mH;
+//        }
+
+//        /// 传给编码器的图形宽高必须是偶数
+
+//        ///这里bitmap的宽度必须是4的倍数,否则data里面取到的rgb24数据有问题
+//        if ((width % 4) != 0)
+//        {
+//            width += 4 - (width % 4);
+//        }
+
+//        if ((height % 2) != 0)
+//        {
+//            height -= 1;
+//        }
+
+//        if (VideoWidth != width || VideoHeight != height)
+//        {
+//            deInit();
+
+//            init(width, height);
+
+//            VideoWidth = width;
+//            VideoHeight = height;
+//        }
+
+////fprintf(stderr, "record starting %d %d... \n", width, height);
+
+////        if (hWnd != nullptr)
+//        {
+//            HDC     hDC;
+//            HDC     MemDC;
+//            BYTE*   Data;
+//            HBITMAP   hBmp;
+//            BITMAPINFO   bi;
+
+//            memset(&bi,   0,   sizeof(bi));
+//            bi.bmiHeader.biSize     = sizeof(BITMAPINFO);
+//            bi.bmiHeader.biWidth    = width;//GetSystemMetrics(SM_CXSCREEN);
+//            bi.bmiHeader.biHeight   = height;//GetSystemMetrics(SM_CYSCREEN);
+//            bi.bmiHeader.biPlanes   = 1;
+//            bi.bmiHeader.biBitCount = 24;
+
+//            hDC   =   GetDC(hWnd);
+//            MemDC =   CreateCompatibleDC(hDC);
+//            hBmp  =   CreateDIBSection(MemDC,   &bi, DIB_RGB_COLORS,   (void**)&Data,   NULL,   0);
+//            SelectObject(MemDC,   hBmp);
+//            BitBlt(MemDC,   0,   0,   bi.bmiHeader.biWidth,   bi.bmiHeader.biHeight,hDC,   mX,   mY,   SRCCOPY);
+//            ReleaseDC(NULL,   hDC);
+//            DeleteDC(MemDC);
+
+//            {
+
+//                POINT point;
+//                GetCursorPos(&point);            // 获取鼠标指针位置(屏幕坐标)
+//                ScreenToClient(hWnd, &point);    // 将鼠标指针位置转换为窗口坐标
+
+//                HDC hBmpDC=CreateCompatibleDC(NULL); //建立兼容DC
+//                HBITMAP hBmpOld = (HBITMAP)SelectObject(hBmpDC, hBmp); //原图选入兼容DC
+
+//                // Get information about the global cursor.
+//                CURSORINFO ci;
+//                ci.cbSize = sizeof(ci);
+//                GetCursorInfo(&ci);
+
+//                // Draw the cursor into the canvas.
+//                DrawIcon(hBmpDC, point.x, point.y, ci.hCursor);
+
+////                LineTo(hBmpDC,100,100);  //画线
+//        //        HBITMAP hBmpRet= (HBITMAP)SelectObject(hBmpDC,hBmpOld); //返回画线后的位图
+
+//                DeleteObject(hBmpOld);
+//                DeleteDC(hBmpDC);
+//            }
+
+////            fwrite(Data, 1, width * height * 3, fp);
+
+//            mCurrentTime = MoudleConfig::getTimeStamp_MilliSecond() - mStartTime;
+////fprintf(stderr, "mCurrentTime %d... \n", mCurrentTime);
+//            {
+//                mLastGetVideoTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+//                memcpy(outBufferRGB, Data, width*height*3);
+
+//                /// 转换成YUV420
+//                /// 由于解码后的数据不一定是yuv420p,比如硬件解码后会是yuv420sp,因此这里统一转成yuv420p
+//                sws_scale(img_convert_ctx, (const uint8_t* const*)pFrameRGB->data, pFrameRGB->linesize, 0, height, pFrameYUV->data, pFrameYUV->linesize);
+
+//                VideoRawFramePtr yuvFrame = std::make_shared<VideoRawFrame>();
+
+//                yuvFrame->initBuffer(width, height, VideoRawFrame::FRAME_TYPE_YUV420P, mCurrentTime);
+//                yuvFrame->setFramebuf(outBufferYUV);
+
+//                if (mCallBackFunc != nullptr)
+//                {
+//                    mCallBackFunc(yuvFrame, mCallBackFuncParam);
+//                }
+
+//            }
+
+////            SaveBitmapToFile(hBmp, (char*)"out.bmp");
+////fwrite(Data, 1, width*height*3, fp);
+//            DeleteObject(hBmp);
+//        }
+//    }
+
+//    fprintf(stderr, "%s record stopping... \n", __FUNCTION__);
+
+//    deInit();
+
+//    fprintf(stderr, "%s record finished! \n", __FUNCTION__);
+
+//    mIsPause = false;
+//qDebug()<<__FUNCTION__<<"stopping...";
+//    mIsThreadRunning = false;
+//}
+
+RECT CaptureWindowThread::getWindowRect(HWND hWnd)
+{
+    RECT outRect = RECT{0, 0, 0, 0};
+
+    RECT rect;
+    if (GetWindowRect(hWnd, &rect))
+    {
+
+        int x = 0;
+        int y = 0;
+        int w = 0;
+        int h = 0;
+
+        RECT rect2;
+        GetClientRect(hWnd, &rect2);
+
+        int width  = rect.right - rect.left;
+        int height = rect.bottom - rect.top;
+
+        int width2  = rect2.right - rect2.left;
+        int height2 = rect2.bottom - rect2.top;
+
+        x = rect.left;
+        y = rect.top;
+        w = rect.right - rect.left;
+        h = rect.bottom - rect.top;
+
+        int value = width - width2;
+
+        w -= value;
+        h -= (value/2);
+        x += (value/2);
+
+//    qDebug()<<__FUNCTION__<<x<<y<<w<<h<<value;
+
+        ///这里把宽度计算成4的整数倍,方便后面使用
+        if ((w % 4) != 0)
+        {
+//            w += 4 - (w % 4);
+            w -= (w % 4);
+        }
+
+        outRect = RECT{x, y, x+w, y+h};
+    }
+
+    return outRect;
+}
+
+std::list<HWND> CaptureWindowThread::getCaptureWindowList()
+{
+    std::list<HWND> handleList;
+
+    HWND pWnd = first_window(INCLUDE_MINIMIZED); //得到第一个窗口句柄
+
+    if (IsWindowAvailable(pWnd))
+        handleList.push_back(pWnd);
+
+    while(pWnd != nullptr)
+    {
+        pWnd = next_window(pWnd, INCLUDE_MINIMIZED);//得到下一个窗口句柄
+
+        if (IsWindowAvailable(pWnd))
+            handleList.push_back(pWnd);
+    }
+
+    return handleList;
+
+}
+
+bool CaptureWindowThread::IsWindowAvailable(HWND hWnd)
+{
+    bool isAvailable = false;
+
+    if (IsMainWindow(hWnd))
+    {
+//                if(GetWindowLong(hWnd, GWL_EXSTYLE) & WS_EX_TOOLWINDOW)
+//                {
+//qDebug()<<true;
+//                }
+
+//            RECT rect;
+//            GetWindowRect(hWnd, &rect);
+
+//        WCHAR szTitle[256] = {0};
+//        GetWindowText(hWnd, szTitle, 256 );
+//        QString title = QString::fromWCharArray(szTitle);
+
+        char className[512] = {0};
+        GetClassNameA(hWnd, className, 512);
+
+        if (strcmp(className, "Windows.UI.Core.CoreWindow") == 0
+                || strcmp(className, "ApplicationFrameWindow") == 0)
+        {
+
+        }
+        else
+        {
+            isAvailable = true;
+        }
+    }
+
+    return isAvailable;
+}
+
+bool CaptureWindowThread::IsCoveredByOtherWindow(HWND hWnd)
+{
+    std::list<HWND> availableHandleList = getCaptureWindowList();
+
+    RECT rcTarget;
+    ::GetWindowRect(hWnd, &rcTarget);
+
+    bool isChild = (WS_CHILD == (::GetWindowLong(hWnd, GWL_STYLE) & WS_CHILD));
+
+    if (::GetDesktopWindow() == hWnd)
+        hWnd = ::GetWindow(::GetTopWindow(hWnd), GW_HWNDLAST);
+
+    do{
+        HWND hCurWnd = hWnd;
+
+        while(NULL != (hWnd = ::GetNextWindow(hWnd, GW_HWNDPREV)))
+        {
+            if (std::find(availableHandleList.begin(), availableHandleList.end(), hWnd) != availableHandleList.end())
+            {
+                //存在
+            }
+            else
+            {
+                //不存在
+                continue;
+            }
+
+            if(IsWindowAvailable(hWnd))
+            {
+                RECT rcWnd;
+                ::GetWindowRect(hWnd, &rcWnd);
+
+                if(!((rcWnd.right < rcTarget.left) || (rcWnd.left > rcTarget.right) ||
+                    (rcWnd.bottom < rcTarget.top) || (rcWnd.top > rcTarget.bottom)))
+                {
+                    return true;
+                }
+            }
+        }
+
+        if(isChild)
+        {
+            hWnd = ::GetParent(hCurWnd);
+            isChild = hWnd ? (WS_CHILD == (::GetWindowLong(hWnd, GWL_STYLE) & WS_CHILD)) : false;
+        }
+        else
+        {
+            break;
+        }
+
+    }while(true);
+
+ return false;
+}

+ 95 - 0
module/ScreenRecorder/src/Media/Video/CaptureWindowThread.h

@@ -0,0 +1,95 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef CaptureWindowThread_H
+#define CaptureWindowThread_H
+
+#include <thread>
+#include <stdint.h>
+#include <functional>
+
+#include "VideoEncoder.h"
+
+extern "C"
+{
+    #include <libavcodec/avcodec.h>
+    #include <libavformat/avformat.h>
+    #include <libswscale/swscale.h>
+    #include <libavdevice/avdevice.h>
+
+    #include <libavutil/imgutils.h>
+}
+
+/**
+ * @brief The CaptureWindowThread class  通过windows Api采集指定窗口图形
+ */
+
+class CaptureWindowThread
+{
+
+public:
+    explicit CaptureWindowThread();
+    ~CaptureWindowThread();
+
+    void setHWND(const HWND &hWnd);
+
+    void setQuantity(const int &value);
+    void setFrameRate(const int &frameRate);
+    void setRect(const int &x, const int &y, const int &width, const int &height);
+    void setFollowMouseMode(const bool &value);
+
+    void startRecord(std::function<void (VideoRawFramePtr videoFramePtr, void *param)> func = nullptr,
+                     void *param = nullptr, const int64_t &startPts = 0);
+    void pauseRecord();
+    void restoreRecord();
+    void stopRecord(const bool &isBlock = true);
+
+    static RECT getWindowRect(HWND hWnd);
+    static std::list<HWND> getCaptureWindowList(); //获取可以捕获的窗体列表
+    static bool IsWindowAvailable(HWND hWnd);
+    static bool IsCoveredByOtherWindow(HWND hWnd);
+
+protected:
+    void run();
+
+private:
+
+    HWND mHWnd;
+
+    std::list<HWND> mIgnorHandleList; //忽略盖住的窗体句柄
+
+    ///裁图区域
+    int mX;
+    int mY;
+    int mW;
+    int mH;
+
+    bool mIsFollowMouseMode; //追随鼠标模式
+
+    bool mIsStop;
+    bool mIsThreadRunning;
+
+    bool mIsPause;
+    int64_t mStartTime; //第一次获取到数据的时间
+    int64_t mCurrentTime; //当前时间戳
+
+    int64_t mLastGetVideoTime; //上一次传入编码的时间(用来控制帧率)
+    int mFrameRate;
+
+    AVFrame	*pFrameRGB, *pFrameYUV;
+    SwsContext *img_convert_ctx;
+    uint8_t *outBufferYUV;
+    uint8_t *outBufferRGB;
+
+    ///采集到屏幕数据的回调函数
+    std::function<void (VideoRawFramePtr videoFramePtr, void *param)> mCallBackFunc; //回调函数
+    void *mCallBackFuncParam; //回调函数用户参数
+
+    bool init(const int &width, const int &height);
+    void deInit();
+};
+
+#endif // GetVideoThread_H

+ 524 - 0
module/ScreenRecorder/src/Media/Video/GetVideoThread.cpp

@@ -0,0 +1,524 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "MoudleConfig.h"
+#include "GetVideoThread.h"
+
+//'1' Use Dshow
+//'0' Use VFW
+#define USE_DSHOW 0
+
+////Show Dshow Device
+//void show_dshow_device()
+//{
+//    AVFormatContext *pFormatCtx = avformat_alloc_context();
+//    AVDictionary* options = nullptr;
+//    av_dict_set(&options,"list_devices","true",0);
+//    AVInputFormat *iformat = av_find_input_format("dshow");
+//    printf("========Device Info=============\n");
+//    avformat_open_input(&pFormatCtx,"video=dummy",iformat,&options);
+//    printf("================================\n");
+//}
+
+////Show Dshow Device Option
+//void show_dshow_device_option()
+//{
+//    AVFormatContext *pFormatCtx = avformat_alloc_context();
+//    AVDictionary* options = nullptr;
+//    av_dict_set(&options,"list_options","true",0);
+//    AVInputFormat *iformat = av_find_input_format("dshow");
+//    printf("========Device Option Info======\n");
+//    avformat_open_input(&pFormatCtx,"video=Integrated Camera",iformat,&options);
+//    printf("================================\n");
+//}
+
+////Show VFW Device
+//void show_vfw_device()
+//{
+//    AVFormatContext *pFormatCtx = avformat_alloc_context();
+//    AVInputFormat *iformat = av_find_input_format("vfwcap");
+//    printf("========VFW Device Info======\n");
+//    avformat_open_input(&pFormatCtx,"list",iformat,nullptr);
+//    printf("=============================\n");
+//}
+
+////Show AVFoundation Device
+//void show_avfoundation_device()
+//{
+//    AVFormatContext *pFormatCtx = avformat_alloc_context();
+
+//    AVDictionary* options = nullptr;
+//    av_dict_set(&options,"list_devices","true",0);
+//    AVInputFormat *iformat = av_find_input_format("avfoundation");
+//    printf("==AVFoundation Device Info===\n");
+//    avformat_open_input(&pFormatCtx, "",iformat, &options);
+//    printf("=============================\n");
+//}
+
+GetVideoThread::GetVideoThread()
+{
+    mIsCloseCamera = true;
+    mIsStop = false;
+    mIsThreadRuning = false;
+    mIsReadingVideo = false;
+
+    pFormatCtx = nullptr;
+    out_buffer = nullptr;
+    out_buffer_rgb24 = nullptr;
+
+    pFrame = nullptr;
+    pFrameYUV = nullptr;
+    pFrameRGB = nullptr;
+
+    pCodecCtx = nullptr;
+
+    mCallBackFunc = nullptr;
+    mCallBackFuncParam = nullptr;
+
+    m_pause = false;
+
+    mCond = new Cond();
+}
+
+GetVideoThread::~GetVideoThread()
+{
+
+}
+
+GetVideoThread::ErroCode GetVideoThread::openCamera(const std::string &deviceName)
+{
+    mCond->Lock();
+
+    if (mDeviceName != deviceName)
+    {
+        closeCamera(true);
+    }
+
+    mDeviceName = deviceName;
+//    mIsCloseCamera = false;
+    ErroCode code = init(mDeviceName.c_str());
+
+    mIsCloseCamera = false;
+
+    mCond->Unlock();
+
+    return code;
+}
+
+void GetVideoThread::closeCamera(bool isBlock)
+{
+    mIsCloseCamera = true;
+
+    if (isBlock)
+    {
+        while(mIsReadingVideo)
+        {
+            MoudleConfig::mSleep(100);
+        }
+    }
+
+}
+
+GetVideoThread::ErroCode GetVideoThread::init(const std::string deviceName)
+{
+    if (pFormatCtx != nullptr)
+    {
+        return SUCCEED;
+    }
+
+    AVCodec			*pCodec = nullptr;
+
+    pFormatCtx = avformat_alloc_context();
+
+    ///设置成非阻塞模式。
+    pFormatCtx->flags |= AVFMT_FLAG_NONBLOCK;
+
+#if defined(WIN32)
+
+//    //Show Dshow Device
+//    show_dshow_device();
+//    //Show Device Options
+//    show_dshow_device_option();
+//    //Show VFW Options
+//    show_vfw_device();
+
+    AVInputFormat *ifmt = av_find_input_format("dshow"); //使用dshow
+
+    char deviceNameStr[512] = {0};
+    sprintf(deviceNameStr, "video=%s", deviceName.c_str());
+
+//    if(avformat_open_input(&pFormatCtx, "video=screen-capture-recorder", ifmt, nullptr)!=0)
+//    if(avformat_open_input(&pFormatCtx, "video=Techshino TCF242", ifmt, nullptr)!=0)
+    if(avformat_open_input(&pFormatCtx, deviceNameStr, ifmt, nullptr)!=0)
+    {
+//        fprintf(stderr, "Couldn't open input stream video.(无法打开输入流)\n");
+
+        avformat_close_input(&pFormatCtx);
+        avformat_free_context(pFormatCtx);
+
+        pFormatCtx = nullptr;
+
+        return VideoOpenFailed;
+    }
+#elif defined __linux
+//Linux
+//    AVInputFormat *ifmt=av_find_input_format("video4linux2");
+//    if(avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)!=0)
+//    {
+//        fprintf(stderr, "Couldn't open input stream.\n");
+//        return -1;
+//    }
+
+    AVDictionary* options = NULL;
+//    av_dict_set(&options,"list_devices","true", 0);
+    /* set frame per second */
+//    av_dict_set( &options,"framerate","30", 0);
+    av_dict_set( &options,"show_region","1", 0);
+//    av_dict_set( &options,"video_size","1240x480", 0);
+//    av_dict_set( &options, "preset", "medium", 0 );
+
+    /*
+    X11 video input device.
+    To enable this input device during configuration you need libxcb installed on your system. It will be automatically detected during configuration.
+    This device allows one to capture a region of an X11 display.
+    refer : https://www.ffmpeg.org/ffmpeg-devices.html#x11grab
+    */
+    AVInputFormat *ifmt = av_find_input_format("x11grab");
+    if(avformat_open_input(&pFormatCtx, ":0.0+10,250", ifmt, &options) != 0)
+//    if(avformat_open_input(&pFormatCtx, ":0.0", ifmt, &options) != 0)
+    {
+        fprintf(stderr, "\nerror in opening input device\n");
+        return VideoOpenFailed;
+    }
+#else
+    show_avfoundation_device();
+    //Mac
+    AVInputFormat *ifmt=av_find_input_format("avfoundation");
+    //Avfoundation
+    //[video]:[audio]
+    if(avformat_open_input(&pFormatCtx,"0",ifmt,NULL)!=0)
+    {
+        fprintf(stderr, "Couldn't open input stream.\n");
+        return VideoOpenFailed;
+    }
+#endif
+
+    videoindex=-1;
+    pCodecCtx = NULL;
+
+    for(i=0; i<pFormatCtx->nb_streams; i++)
+        if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO)
+        {
+            videoindex=i;
+            break;
+        }
+
+    if(videoindex==-1)
+    {
+        printf("Didn't find a video stream.(没有找到视频流)\n");
+        return VideoOpenFailed;
+    }
+
+    pCodecCtx = pFormatCtx->streams[videoindex]->codec;
+    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
+
+    if(pCodec == NULL)
+    {
+        fprintf(stderr, "\n###\n video Codec not found. pCodecCtx->codec_id=%d %d \n", pCodecCtx->codec_id, AV_CODEC_ID_MJPEG);
+        return VideoDecoderOpenFailed;
+    }
+
+    if(avcodec_open2(pCodecCtx, pCodec,NULL)<0)
+    {
+        printf("Could not open video codec.\n");
+        return VideoDecoderOpenFailed;
+    }
+
+    if (pCodecCtx->pix_fmt == AV_PIX_FMT_NONE)
+    if (pCodecCtx->codec_id == AV_CODEC_ID_MJPEG)
+    {
+        pCodecCtx->pix_fmt = AV_PIX_FMT_YUV422P;
+    }
+
+//    fprintf(stderr, "\n###\n video Codec not found. pCodecCtx->codec_id=%d %d \n", pCodecCtx->codec_id, AV_CODEC_ID_MJPEG);
+
+    pFrame = av_frame_alloc();
+    pFrameYUV = av_frame_alloc();
+    pFrameRGB = av_frame_alloc();
+
+    //***************
+//    int Screen_W = GetSystemMetrics(SM_CXSCREEN); //获取屏幕宽高
+//    int Screen_H = GetSystemMetrics(SM_CYSCREEN);
+
+    return SUCCEED;
+}
+
+void GetVideoThread::deInit()
+{
+    if (out_buffer)
+    {
+        av_free(out_buffer);
+        out_buffer = NULL;
+    }
+
+    if (out_buffer_rgb24)
+    {
+        av_free(out_buffer_rgb24);
+        out_buffer_rgb24 = NULL;
+    }
+
+    if (pFrame)
+    {
+        av_free(pFrame);
+        pFrame = NULL;
+    }
+
+    if (pFrameYUV)
+    {
+        av_free(pFrameYUV);
+        pFrameYUV = NULL;
+    }
+
+    if (pFrameRGB)
+    {
+        av_free(pFrameRGB);
+        pFrameRGB = NULL;
+    }
+
+    if (pCodecCtx)
+        avcodec_close(pCodecCtx);
+
+    if (pFormatCtx != nullptr)
+    {
+        avformat_close_input(&pFormatCtx);
+        avformat_free_context(pFormatCtx);
+    }
+}
+
+void GetVideoThread::startRecord(std::function<void (VideoRawFramePtr yuvFrame, VideoRawFramePtr rgbFrame, void *param)> func, void *param)
+{
+    mCallBackFunc = func;
+    mCallBackFuncParam = param;
+
+    mIsStop = false;
+    mIsThreadRuning = false;
+
+    //启动新的线程
+    std::thread([&](GetVideoThread *pointer)
+    {
+        pointer->run();
+
+    }, this).detach();
+
+}
+
+void GetVideoThread::pauseRecord()
+{
+    m_pause = true;
+}
+
+void GetVideoThread::restoreRecord()
+{
+    m_getFirst = false;
+    m_pause = false;
+}
+
+void GetVideoThread::stopRecord(const bool &isBlock)
+{
+    mIsStop = true;
+
+    if (isBlock)
+    {
+        while(mIsThreadRuning)
+        {
+            MoudleConfig::mSleep(5);
+        }
+    }
+}
+
+void GetVideoThread::run()
+{
+    mIsThreadRuning = true;
+
+while(!mIsStop)
+{
+    if (mIsCloseCamera)
+    {
+        MoudleConfig::mSleep(1000);
+        continue;
+    }
+
+    mIsReadingVideo = true;
+
+    if (openCamera(mDeviceName) != SUCCEED)
+    {
+        MoudleConfig::mSleep(1000);
+        continue;
+    }
+
+    struct SwsContext *img_convert_ctx = NULL;
+    struct SwsContext *img_convert_ctx_rgb24 = NULL;
+
+    int y_size = 0;
+    int yuvSize = 0;
+
+    if (pCodecCtx)
+    {
+        y_size = pCodecCtx->width * pCodecCtx->height;
+        yuvSize = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
+        ///理论上 这里的 size = y_size * 3 / 2
+
+        int numBytes = yuvSize;
+        out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
+        avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,pCodecCtx->width, pCodecCtx->height);
+
+        img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
+                                         pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P,
+                                         SWS_BICUBIC, NULL, NULL, NULL);
+
+//int mjpegSize = av_image_get_buffer_size(pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, 1);  //按1字节进行内存对齐,得到的内存大小最接近实际大小
+//fprintf(stderr, "%s mjpegSize=%d \n", __FUNCTION__, mjpegSize);
+
+        int rgb24Size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);  //按1字节进行内存对齐,得到的内存大小最接近实际大小
+    //    int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 0);  //按0字节进行内存对齐,得到的内存大小是0
+    //    int yuvSize = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 4);   //按4字节进行内存对齐,得到的内存大小稍微大一些
+
+        out_buffer_rgb24 = static_cast<uint8_t *>(av_malloc(static_cast<unsigned int>(rgb24Size) * sizeof(uint8_t)));
+        av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer_rgb24, AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1);
+
+        img_convert_ctx_rgb24 = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
+                                         pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB24,
+                                         SWS_BICUBIC, NULL, NULL, NULL);
+
+    }
+
+    AVPacket *packet=(AVPacket *)av_malloc(sizeof(AVPacket));
+
+    int64_t firstTime = MoudleConfig::getTimeStamp_MilliSecond();
+    m_getFirst = false;
+    int64_t timeIndex = 0;
+
+    mLastReadFailedTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+    while(!mIsStop)
+    {
+        if (mIsCloseCamera)
+        {
+            break;
+        }
+
+        if (av_read_frame(pFormatCtx, packet)<0)
+        {
+            ///一秒内都没有读取到过数据,则认为真的失败了。
+            if ((MoudleConfig::getTimeStamp_MilliSecond() - mLastReadFailedTime) > 1000)
+            {
+                fprintf(stderr, "camera read failed! %s\n", mDeviceName.c_str());
+                break;
+            }
+
+            MoudleConfig::mSleep(10);
+            continue;
+        }
+
+        mLastReadFailedTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+        if (m_pause)
+        {
+            av_packet_unref(packet);
+            MoudleConfig::mSleep(10);
+            continue;
+        }
+
+        if(packet->stream_index==videoindex)
+        {
+            int64_t time = 0;
+
+            if (m_getFirst)
+            {
+                int64_t secondTime = MoudleConfig::getTimeStamp_MilliSecond();
+                time = secondTime - firstTime + timeIndex;
+            }
+            else
+            {
+                firstTime = MoudleConfig::getTimeStamp_MilliSecond();
+                timeIndex = 0;
+                m_getFirst = true;
+            }
+
+            if (avcodec_send_packet(pCodecCtx, packet) != 0)
+            {
+               fprintf(stderr, "input AVPacket to decoder failed!\n");
+               av_packet_unref(packet);
+               continue;
+            }
+
+            while (0 == avcodec_receive_frame(pCodecCtx, pFrame))
+            {
+//int64_t t1 = MoudleConfig::getTimeStamp_MilliSecond();
+
+                /// 转换成YUV420
+                /// 由于解码后的数据不一定是yuv420p,比如硬件解码后会是yuv420sp,因此这里统一转成yuv420p
+                sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
+
+//int64_t t2 = MoudleConfig::getTimeStamp_MilliSecond();
+
+                /// 转换成RGB24
+                sws_scale(img_convert_ctx_rgb24, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
+
+//int64_t t3 = MoudleConfig::getTimeStamp_MilliSecond();
+
+//fprintf(stderr, "%s %d %d %I64d %I64d \n",__FUNCTION__, pCodecCtx->pix_fmt, AV_PIX_FMT_YUV420P, t2-t1, t3-t2);
+
+                if (mCallBackFunc != nullptr)
+                {
+                    VideoRawFramePtr yuvFrame = std::make_shared<VideoRawFrame>();
+                    VideoRawFramePtr rgbFrame = std::make_shared<VideoRawFrame>();
+
+                    yuvFrame->initBuffer(pCodecCtx->width, pCodecCtx->height, VideoRawFrame::FRAME_TYPE_YUV420P);
+                    yuvFrame->setFramebuf(out_buffer);
+
+                    rgbFrame->initBuffer(pCodecCtx->width, pCodecCtx->height, VideoRawFrame::FRAME_TYPE_RGB24);
+                    rgbFrame->setFramebuf(out_buffer_rgb24);
+//FILE *fp = fopen("out.rgb24", "wb");
+//fwrite(out_buffer_rgb24, 1, av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1), fp);
+//fclose(fp);
+//fprintf(stderr, "%s %d %d %d \n", __FUNCTION__,
+//        av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height, 1),
+//        pCodecCtx->width,pCodecCtx->height);
+                    mCallBackFunc(yuvFrame, rgbFrame, mCallBackFuncParam);
+                }
+
+            }
+        }
+        else
+        {
+            fprintf(stderr, "other %d \n", packet->stream_index);
+        }
+        av_packet_unref(packet);
+
+    }
+
+    sws_freeContext(img_convert_ctx);
+    sws_freeContext(img_convert_ctx_rgb24);
+
+//    fprintf(stderr, "record stopping... \n");
+
+    m_pause = false;
+
+    deInit();
+
+    if (mIsCloseCamera)
+    {
+        mIsReadingVideo = false;
+    }
+}
+
+    fprintf(stderr, "record finished! \n");
+    mIsReadingVideo = false;
+    mIsThreadRuning = false;
+
+}
+

+ 90 - 0
module/ScreenRecorder/src/Media/Video/GetVideoThread.h

@@ -0,0 +1,90 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+#ifndef GetVideoThread_H
+#define GetVideoThread_H
+
+#include <thread>
+
+#include "VideoFrame/VideoRawFrame.h"
+#include "Mutex/Cond.h"
+
+extern "C"
+{
+#include <libavformat/avformat.h>
+#include <libswscale/swscale.h>
+#include <libavdevice/avdevice.h>
+#include <libavutil/imgutils.h>
+}
+
+/**
+ * @brief The GetVideoThread class  此类主要负责采集屏幕
+ */
+
+class GetVideoThread
+{
+
+public:
+    enum ErroCode
+    {
+        AudioOpenFailed = 0,
+        VideoOpenFailed,
+        AudioDecoderOpenFailed,
+        VideoDecoderOpenFailed,
+        SUCCEED
+    };
+
+    explicit GetVideoThread();
+    ~GetVideoThread();
+
+    GetVideoThread::ErroCode openCamera(const std::string &deviceName);
+    void closeCamera(bool isBlock = false);
+
+    void startRecord(std::function<void (VideoRawFramePtr yuvFrame, VideoRawFramePtr rgbFrame, void *param)> func = nullptr, void *param = nullptr);
+    void pauseRecord();
+    void restoreRecord();
+    void stopRecord(const bool &isBlock);
+
+protected:
+    void run();
+
+private:
+
+    std::function<void (VideoRawFramePtr yuvFrame, VideoRawFramePtr rgbFrame, void *param)> mCallBackFunc; //回调函数
+    void *mCallBackFuncParam; //回调函数用户参数
+
+
+    AVFormatContext	*pFormatCtx;
+    int				i, videoindex;
+    AVCodecContext	*pCodecCtx;
+
+    AVFrame	*pFrame,*pFrameYUV,*pFrameRGB;
+    uint8_t *out_buffer;
+    uint8_t *out_buffer_rgb24;
+
+    bool m_pause;
+
+    bool mIsCloseCamera;
+    bool mIsStop;
+    bool mIsThreadRuning;
+    bool mIsReadingVideo;
+
+    bool m_getFirst; //是否获取到了时间基准
+    int64_t mLastReadFailedTime; //记录上一次视频读取失败的时间,因为是非阻塞模式,因此需要通过判断一定时间内没有读取过数据才可以认为真的读取失败了
+
+    Cond *mCond;
+    std::string mDeviceName;
+
+    /**
+     * @brief init 初始化打开录屏设备
+     * @param videoDevName
+     * @return
+     */
+    ErroCode init(const std::string deviceName);
+    void deInit();
+
+};
+
+#endif // GetVideoThread_H

+ 529 - 0
module/ScreenRecorder/src/Media/Video/VideoEncoder.cpp

@@ -0,0 +1,529 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "MoudleConfig.h"
+#include "VideoEncoder.h"
+
+//#define ENCODE_H265
+
+VideoEncoder::VideoEncoder()
+{
+    mBitRate = 450000;
+    mQuality = 10;
+    mCurrentQuality = 0;
+
+    mFrameRate = 15;
+
+    pCodecCtx = nullptr;
+    pCodec    = nullptr;
+
+    picture_buf = nullptr;
+    picture     = nullptr;
+
+    mCallBackFunc      = nullptr;
+    mCallBackFuncParam = nullptr;
+
+    mPacket = {0};
+}
+
+VideoEncoder::~VideoEncoder()
+{
+
+}
+
+void VideoEncoder::setQuality(int value)
+{
+    mBitRate = 450000 + (value - 5) * 50000;
+    mQuality = value;
+}
+
+void VideoEncoder::setWidth(int w, int h)
+{
+    mWidth = w;
+    mHeight = h;
+}
+
+std::list<VideoEncodedFramePtr> VideoEncoder::encode(VideoRawFramePtr yuvFramePtr, const int64_t &framePts)
+{
+    std::list<VideoEncodedFramePtr> videoFrameList;
+
+do
+{
+    if (yuvFramePtr != nullptr && yuvFramePtr->getBuffer() != nullptr)
+    {
+        bool isNeedOpenEncoder = false;
+
+        if (pCodecCtx == nullptr || pCodecCtx->width != yuvFramePtr->getWidth() || pCodecCtx->height != yuvFramePtr->getHeight()
+                || (mCurrentQuality != mQuality))
+        {
+            mWidth  = yuvFramePtr->getWidth();
+            mHeight = yuvFramePtr->getHeight();
+            isNeedOpenEncoder = true;
+        }
+
+        if (isNeedOpenEncoder)
+        {
+            closeEncoder();
+            openEncoder();
+        }
+
+//            picture->data[0] = node.buffer;     // 亮度Y
+//            picture->data[1] = node.buffer + y_size;  // U
+//            picture->data[2] = node.buffer + y_size*5/4; // V
+
+        memcpy(picture_buf, yuvFramePtr->getBuffer(), yuvFramePtr->getSize());
+
+        picture->pts = framePts;
+
+        int ret = avcodec_send_frame(pCodecCtx, picture);
+        if (ret != 0)
+        {
+            char buff[128]={0};
+            av_strerror(ret, buff, 128);
+
+            fprintf(stderr, "Error sending a frame for encoding! (%s)\n", buff);
+            break;
+        }
+
+        AVPacket pkt = {0};
+
+        while (0 == avcodec_receive_packet(pCodecCtx, &pkt))
+        {
+            bool isKeyFrame = pkt.flags & AV_PKT_FLAG_KEY; //判断是否关键帧
+//fprintf(stderr, "%s : %d x %d %d %d %d %d\n", __FUNCTION__ , mWidth, mHeight, yuvFramePtr->getWidth(), yuvFramePtr->getHeight(), isKeyFrame, pkt.size);
+            #ifdef ENCODE_H265
+                T_NALU_TYPE naluType = T_NALU_H265;
+            #else
+                T_NALU_TYPE naluType = T_NALU_H264;
+            #endif
+
+            int h264BufferSize = pkt.size;
+            if (isKeyFrame)
+            {
+                h264BufferSize += pCodecCtx->extradata_size;
+            }
+
+            uint8_t *h264Buffer = (uint8_t*)malloc(h264BufferSize);
+            int bufferIndex = 0;
+
+            if (isKeyFrame)
+            {
+                memcpy(h264Buffer + bufferIndex, pCodecCtx->extradata, pCodecCtx->extradata_size);
+                bufferIndex += pCodecCtx->extradata_size;
+            }
+
+            memcpy(h264Buffer + bufferIndex, pkt.data, pkt.size);
+
+            VideoEncodedFramePtr framePtr = std::make_shared<VideoEncodedFrame>();
+            framePtr->setNalu(h264Buffer, h264BufferSize, false, naluType, yuvFramePtr->getPts());
+            framePtr->setIsKeyFrame(isKeyFrame);
+
+            videoFrameList.push_back(framePtr);
+
+            if (mCallBackFunc != nullptr)
+            {
+                mCallBackFunc(framePtr, mCallBackFuncParam);
+            }
+
+#if 0
+    FILE *h264Fp = fopen("out.h264","wb");
+    if (isKeyFrame)
+    {
+        fwrite(c->extradata, 1, c->extradata_size, h264Fp);
+    }
+    fwrite(pkt.data, 1, pkt.size, h264Fp);
+    fclose(h264Fp);
+#endif
+            av_packet_unref(&mPacket);
+            av_packet_move_ref(&mPacket, &pkt);
+        }
+    }
+
+}while(0);
+
+    return videoFrameList;
+
+}
+
+AVDictionary *VideoEncoder::setEncoderParam(const AVCodecID &codec_id)
+{
+    int in_w = mWidth;
+    int in_h = mHeight;//宽高
+
+    pCodecCtx->codec_id = codec_id;
+    pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
+    pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+    pCodecCtx->width = in_w;
+    pCodecCtx->height = in_h;
+    pCodecCtx->time_base.num = 1;
+    pCodecCtx->time_base.den = mFrameRate;//帧率(既一秒钟多少张图片)
+    pCodecCtx->bit_rate = mBitRate; //比特率(调节这个大小可以改变编码后视频的质量)
+    pCodecCtx->gop_size = mFrameRate * 2;
+    //H264 还可以设置很多参数 自行研究吧
+    ////    pCodecCtx->me_range = 16;
+    ////    pCodecCtx->max_qdiff = 4;
+    ////    pCodecCtx->qcompress = 0.6;
+    ////    pCodecCtx->qmin = 10;
+    ////    pCodecCtx->qmax = 51;
+    //    pCodecCtx->me_range = 16;
+    //    pCodecCtx->max_qdiff = 1;
+    //    pCodecCtx->qcompress = 0.6;
+    //    pCodecCtx->qmin = 10;
+    //    pCodecCtx->qmax = 51;
+    //    //Optional Param
+    //    pCodecCtx->max_b_frames=3;
+
+    //    视频编码器常用的码率控制方式包括abr(平均码率),crf(恒定码率),cqp(恒定质量),
+    //    ffmpeg中AVCodecContext显示提供了码率大小的控制参数,但是并没有提供其他的控制方式。
+    //    ffmpeg中码率控制方式分为以下几种情况:
+    //    1.如果设置了AVCodecContext中bit_rate的大小,则采用abr的控制方式;
+    //    2.如果没有设置AVCodecContext中的bit_rate,则默认按照crf方式编码,crf默认大小为23(此值类似于qp值,同样表示视频质量);
+    //    3.如果用户想自己设置,则需要借助av_opt_set函数设置AVCodecContext的priv_data参数。下面给出三种控制方式的实现代码:
+
+#if 0
+    ///平均码率
+    //目标的码率,即采样的码率;显然,采样码率越大,视频大小越大
+    pCodecCtx->bit_rate = mBitRate;
+
+#elif 1
+    ///恒定码率
+//    量化比例的范围为0~51,其中0为无损模式,23为缺省值,51可能是最差的。该数字越小,图像质量越好。从主观上讲,18~28是一个合理的范围。18往往被认为从视觉上看是无损的,它的输出视频质量和输入视频一模一样或者说相差无几。但从技术的角度来讲,它依然是有损压缩。
+//    若Crf值加6,输出码率大概减少一半;若Crf值减6,输出码率翻倍。通常是在保证可接受视频质量的前提下选择一个最大的Crf值,如果输出视频质量很好,那就尝试一个更大的值,如果看起来很糟,那就尝试一个小一点值。
+    av_opt_set(pCodecCtx->priv_data, "crf", "31.000", AV_OPT_SEARCH_CHILDREN);
+
+#else
+    ///qp的值和crf一样
+//    av_opt_set(pCodecCtx->priv_data, "qp", "31.000",AV_OPT_SEARCH_CHILDREN);
+#endif
+
+#if TEST_OPT
+
+    av_opt_set(pCodecCtx,"b","400000",0);		//bitrate
+    //Another method
+    //av_opt_set_int(pCodecCtx,"b",400000,0);	//bitrate
+
+    av_opt_set(pCodecCtx,"time_base","1/25",0);	//time_base
+    av_opt_set(pCodecCtx,"bf","5",0);			//max b frame
+    av_opt_set(pCodecCtx,"g","25",0);			//gop
+    av_opt_set(pCodecCtx,"qmin","10",0);		//qmin/qmax
+    av_opt_set(pCodecCtx,"qmax","51",0);
+
+#else
+
+//    pCodecCtx->time_base.num = 1;
+//    pCodecCtx->time_base.den = 25;
+//    pCodecCtx->max_b_frames=5;
+//    pCodecCtx->bit_rate = 400000;
+//    pCodecCtx->gop_size=25;
+//    pCodecCtx->qmin = 10;
+//    pCodecCtx->qmax = 51;
+
+    /* time base: this is the fundamental unit of time (in seconds) in terms
+       of which frame timestamps are represented. for fixed-fps content,
+       timebase should be 1/framerate and timestamp increments should be
+       identically 1. */
+//    ost->st->time_base = { 1, m_videoFrameRate };
+//    pCodecCtx->time_base       = ost->st->time_base;
+//    c->gop_size = 12; /* emit one intra frame every twelve frames at most */
+//    pCodecCtx->gop_size = m_videoFrameRate * 2; ///I帧间隔
+
+//    //固定允许的码率误差,数值越大,视频越小
+//    c->bit_rate_tolerance = mBitRate;
+
+//    //H264 还可以设置很多参数 自行研究吧
+////    pCodecCtx->me_range = 16;
+////    pCodecCtx->max_qdiff = 1;
+//    c->qcompress = 0.85;
+//    c->qmin = 18;
+
+    pCodecCtx->qmin = 28;
+    pCodecCtx->qmax = 38;
+//    pCodecCtx->qmin = 16+(10-mQuality)*2;
+//    pCodecCtx->qmax = 31+(10-mQuality)*2;
+
+////    //采用(qmin/qmax的比值来控制码率,1表示局部采用此方法,0表示全局)
+////    c->rc_qsquish = 0;
+
+////    //因为我们的量化系数q是在qmin和qmax之间浮动的,
+////    //qblur表示这种浮动变化的变化程度,取值范围0.0~1.0,取0表示不削减
+////    c->qblur = 1.0;
+
+//std::cout<<"mBitRate"<<mBitRate<<m_videoFrameRate<<std::endl;
+
+////    ///b_frame_strategy
+////    ///如果为true,则自动决定什么时候需要插入B帧,最高达到设置的最大B帧数。
+////    ///如果设置为false,那么最大的B帧数被使用。
+////    c->b_frame_strategy = 1;
+////    c->max_b_frames = 5;
+
+#endif
+
+    // some formats want stream headers to be separate
+    if (pCodecCtx->flags & AVFMT_GLOBALHEADER)
+        pCodecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
+
+    //编码器预设
+    AVDictionary *param = 0;
+    if(pCodecCtx->codec_id == AV_CODEC_ID_H264)
+    {
+//        //H.264
+//        //av_dict_set(&param, "preset", "slow", 0);
+//        av_dict_set(&param, "preset", "superfast", 0);
+//        av_dict_set(&param, "tune", "zerolatency", 0);  //实现实时编码
+
+        ///下面的可以启用硬件编码
+        av_dict_set(&param, "preset", "medium", 0);
+    //        av_dict_set(&param, "preset", "superfast", 0);
+        av_dict_set(&param, "tune", "zerolatency", 0); //实现实时编码
+        av_dict_set(&param, "profile", "main", 0);
+    }
+    else if(pCodecCtx->codec_id == AV_CODEC_ID_H265)
+    {
+        av_dict_set(&param, "preset", "ultrafast", 0);
+        av_dict_set(&param, "tune", "zerolatency", 0);
+        av_dict_set(&param, "profile", "main", 0);
+    }
+
+    return param;
+
+}
+
+///打开视频编码器
+bool VideoEncoder::openVideoEncoder(const AVCodecID &codec_id)
+{
+    bool isSucceed = false;
+    bool isHardWareEncoderOpened = false;
+
+//    bool mIsSupportHardEncoder = true;
+//    if (mIsSupportHardEncoder)
+    {
+        ///尝试打开cuvid编码器器
+        isHardWareEncoderOpened = openHardEncoder_Cuvid(codec_id);
+
+        ///cuvid打开失败了 继续尝试 qsv
+        if (!isHardWareEncoderOpened)
+        {
+            isHardWareEncoderOpened = openHardEncoder_Qsv(codec_id);
+        }
+    }
+
+    //尝试打开硬件解码器失败了 改用软解码
+    if (!isHardWareEncoderOpened)
+    {
+        isSucceed = openSoftEncoder(codec_id);
+    }
+    else
+    {
+        isSucceed = true;
+    }
+
+    return isSucceed;
+}
+
+///打开硬件编码器(英伟达)
+bool VideoEncoder::openHardEncoder_Cuvid(const AVCodecID &codec_id)
+{
+    bool isSucceed = false;
+
+    fprintf(stderr,"open hardware encoder cuvid...\n");
+
+    ///查找硬件解码器
+    char hardWareDecoderName[32] = {0};
+
+    if (AV_CODEC_ID_H264 == codec_id)
+    {
+        sprintf(hardWareDecoderName, "h264_nvenc");
+    }
+    else if (AV_CODEC_ID_HEVC == codec_id)
+    {
+        sprintf(hardWareDecoderName, "hevc_nvenc");
+    }
+
+    if (strlen(hardWareDecoderName) > 0)
+    {
+        pCodec = avcodec_find_encoder_by_name(hardWareDecoderName);
+
+        if (pCodec != nullptr)
+        {
+            pCodecCtx = avcodec_alloc_context3(pCodec);
+
+            AVDictionary *param = setEncoderParam(codec_id);
+
+            ///打开解码器
+            if (avcodec_open2(pCodecCtx, pCodec, &param) < 0)
+            {
+                avcodec_close(pCodecCtx);
+                avcodec_free_context(&pCodecCtx);
+                pCodecCtx = nullptr;
+                isSucceed = false;
+
+                fprintf(stderr,"Could not open codec %s\n",hardWareDecoderName);
+            }
+            else
+            {
+                isSucceed = true;
+                fprintf(stderr,"open codec %s succeed! %d %d\n",hardWareDecoderName,pCodec->id,pCodecCtx->codec_id);
+            }
+        }
+        else
+        {
+            fprintf(stderr,"Codec %s not found.\n",hardWareDecoderName);
+        }
+    }
+
+    return isSucceed;
+}
+
+///打开硬件编码器(intel)
+bool VideoEncoder::openHardEncoder_Qsv(const AVCodecID &codec_id)
+{
+    bool isSucceed = false;
+
+    fprintf(stderr,"open hardware encoder cuvid...\n");
+
+    ///查找硬件解码器
+    char hardWareDecoderName[32] = {0};
+
+    if (AV_CODEC_ID_H264 == codec_id)
+    {
+        sprintf(hardWareDecoderName, "h264_qsv");
+    }
+    else if (AV_CODEC_ID_HEVC == codec_id)
+    {
+        sprintf(hardWareDecoderName, "hevc_qsv");
+    }
+
+    if (strlen(hardWareDecoderName) > 0)
+    {
+        pCodec = avcodec_find_encoder_by_name(hardWareDecoderName);
+
+        if (pCodec != NULL)
+        {
+            pCodecCtx = avcodec_alloc_context3(pCodec);
+
+            AVDictionary *param = setEncoderParam(codec_id);
+
+            ///打开解码器
+            if (avcodec_open2(pCodecCtx, pCodec, &param) < 0)
+            {
+                avcodec_close(pCodecCtx);
+                avcodec_free_context(&pCodecCtx);
+                pCodecCtx = nullptr;
+                isSucceed = false;
+
+                fprintf(stderr,"Could not open codec %s\n",hardWareDecoderName);
+            }
+            else
+            {
+                isSucceed = true;
+                fprintf(stderr,"open codec %s succeed! %d %d\n",hardWareDecoderName,pCodec->id,pCodecCtx->codec_id);
+            }
+        }
+        else
+        {
+            fprintf(stderr,"Codec %s not found.\n",hardWareDecoderName);
+        }
+    }
+
+    return isSucceed;
+}
+
+///打开软编码器
+bool VideoEncoder::openSoftEncoder(const AVCodecID &codec_id)
+{
+    bool isSucceed = false;
+
+    fprintf(stderr,"open software encoder... \n");
+
+    pCodec = avcodec_find_encoder(codec_id);
+
+    if (pCodec == nullptr)
+    {
+        fprintf(stderr, "Codec not found.\n");
+        isSucceed = false;
+    }
+    else
+    {
+        pCodecCtx = avcodec_alloc_context3(pCodec);
+        pCodecCtx->thread_count = 8;
+
+        AVDictionary *param = setEncoderParam(codec_id);
+
+        ///打开解码器
+        if (int ret = avcodec_open2(pCodecCtx, pCodec, &param) && ret < 0)
+        {
+            avcodec_close(pCodecCtx);
+            avcodec_free_context(&pCodecCtx);
+            pCodecCtx = nullptr;
+            isSucceed = false;
+
+            char str[128] = {0};
+            av_strerror(ret, str, 128);
+
+            fprintf(stderr,"Could not open codec. %s\n", str);
+        }
+        else
+        {
+            isSucceed = true;
+        }
+    }
+
+    return isSucceed;
+}
+
+bool VideoEncoder::openEncoder()
+{
+
+#ifdef ENCODE_H265
+    AVCodecID codec_id = AV_CODEC_ID_H265;
+#else
+    AVCodecID codec_id = AV_CODEC_ID_H264;
+#endif
+
+    bool isSucceed = openVideoEncoder(codec_id);
+
+    if (isSucceed)
+    {
+        picture = av_frame_alloc();
+
+        picture->format = pCodecCtx->pix_fmt;
+        picture->width  = pCodecCtx->width;
+        picture->height = pCodecCtx->height;
+
+        int size = avpicture_get_size(pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height); //计算需要用到的数据大小
+        picture_buf = (uint8_t *)av_malloc(size); //分配空间
+        avpicture_fill((AVPicture *)picture, picture_buf, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height);
+
+        mCurrentQuality = mQuality;
+
+        fprintf(stderr, " 编码器打开成功!pCodecCtx->pix_fmt=%d %d %d %d %d %d mCurrentQuality=%d\n",
+                pCodecCtx->pix_fmt, AV_PIX_FMT_YUV420P, AV_PIX_FMT_NV12, pCodecCtx->width, pCodecCtx->height, size, mCurrentQuality);
+    }
+
+    return isSucceed;
+}
+
+bool VideoEncoder::closeEncoder()
+{
+    if (picture != nullptr)
+        av_free(picture);
+
+    if (picture_buf != nullptr)
+        av_free(picture_buf);
+
+    if (pCodecCtx != nullptr)
+        avcodec_close(pCodecCtx);
+
+    av_packet_unref(&mPacket);
+
+    pCodecCtx = nullptr;
+    pCodec = nullptr;
+
+    picture_buf = nullptr;
+    picture = nullptr;
+
+    return true;
+}

+ 73 - 0
module/ScreenRecorder/src/Media/Video/VideoEncoder.h

@@ -0,0 +1,73 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef VIDEOENCORDER_H
+#define VIDEOENCORDER_H
+
+#include <list>
+#include <thread>
+#include "Mutex/Cond.h"
+
+#include "VideoFrame/VideoRawFrame.h"
+#include "VideoFrame/VideoEncodedFrame.h"
+
+extern "C"
+{
+    #include "libavcodec/avcodec.h"
+    #include "libavformat/avformat.h"
+    #include "libswscale/swscale.h"
+    #include "libavdevice/avdevice.h"
+}
+
+/// 编码h.264的线程  这里把编码和采集分开 放到单独的线程 是因为编码也比较耗时
+class VideoEncoder
+{
+public:
+    explicit VideoEncoder();
+    ~VideoEncoder();
+
+    void setWidth(int w, int h);//设置编码后的图像高宽(这个必须和输入的yuv图像数据一样 且必须是偶数)
+    void setFrameRate(int value){mFrameRate = value;}
+    void setQuality(int value);// 设置编码质量 0~10 10画质最高
+
+    bool openEncoder(); //打开编码器
+    bool closeEncoder(); //关闭编码器
+
+    std::list<VideoEncodedFramePtr> encode(VideoRawFramePtr yuvFramePtr, const int64_t &framePts);
+
+    AVPacket *getLastEncodePacket(){return &mPacket;}
+
+private:
+
+    AVCodecContext* pCodecCtx;
+    AVCodec* pCodec;
+
+    uint8_t* picture_buf;
+    AVFrame* picture;
+
+    AVPacket mPacket;
+
+    int mBitRate; //video bitRate
+    int mQuality;
+    int mCurrentQuality; //已经打开的编码器质量值
+
+    int mWidth;
+    int mHeight;
+    int mFrameRate;
+
+    AVDictionary *setEncoderParam(const AVCodecID &codec_id); //设置编码器参数
+
+    bool openVideoEncoder(const AVCodecID &codec_id); //打开视频编码器
+    bool openHardEncoder_Cuvid(const AVCodecID &codec_id); //打开硬件编码器(英伟达)
+    bool openHardEncoder_Qsv(const AVCodecID &codec_id);   //打开硬件编码器(intel)
+    bool openSoftEncoder(const AVCodecID &codec_id);//打开软编码器
+
+    ///回调函数(用于编码完成后数据输出给父类)
+    std::function<void (VideoEncodedFramePtr videoFramePtr, void *param)> mCallBackFunc; //回调函数
+    void *mCallBackFuncParam; //回调函数用户参数
+};
+
+#endif // VIDEOENCORDER_H

+ 24 - 0
module/ScreenRecorder/src/Media/Video/VideoFileInfoTypes.h

@@ -0,0 +1,24 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef VideoFileInfoTypes_H
+#define VideoFileInfoTypes_H
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+#include <functional>
+
+struct VideoFileInfo
+{
+    std::string filePath;
+    int64_t length; //时长
+    int width;
+    int height;
+};
+
+#endif // VideoFileInfoTypes_H

+ 375 - 269
src/video/savevideofile.cpp → module/ScreenRecorder/src/Media/Video/VideoFileWriter.cpp

@@ -1,9 +1,22 @@
-#include "savevideofile.h"
+/**
+ * �海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "VideoFileWriter.h"
 #include "AppConfig.h"
 
 #include <QFileInfo>
 #include <QDir>
 
+#include <thread>
+
+#include "MoudleConfig.h"
+
+#include <QString>
+#include "AppConfig.h"
+
 //double videoPts = 0.0;
 //static void log_packet(const AVFormatContext *fmt_ctx, const AVPacket *pkt)
 //{
@@ -49,19 +62,19 @@
 *  AAC data.
 *
 *  Note the packetLen must count in the ADTS header itself !!! .
-*×¢Ò⣬ÕâÀïµÄpacketLen²ÎÊýΪraw aac Packet Len + 7; 7 bytes adts header
+*注�,这里的packetLen�数为raw aac Packet Len + 7; 7 bytes adts header
 **/
-void addADTStoPacket(uint8_t* packet, int packetLen)
+static void addADTStoPacket(uint8_t* packet, int packetLen)
 {
-   int profile = 2;  //AAC LC£¬MediaCodecInfo.CodecProfileLevel.AACObjectLC;
-   int freqIdx = 4;  //32K, ¼ûºóÃæ×¢ÊÍavpriv_mpeg4audio_sample_ratesÖÐ32000¶ÔÓ¦µÄÊý×éϱ꣬À´×ÔffmpegÔ´Âë
-   int chanCfg = 2;  //¼ûºóÃæ×¢ÊÍchannel_configuration£¬SteroË«ÉùµÀÁ¢ÌåÉù
+   int profile = 2;  //AAC LC,MediaCodecInfo.CodecProfileLevel.AACObjectLC;
+   int freqIdx = 4;  //32K, ���注释avpriv_mpeg4audio_sample_rates中32000对应的数组下标,�自ffmpeg��
+   int chanCfg = 2;  //���注释channel_configuration,Stero�声�立体声
 
    /*int avpriv_mpeg4audio_sample_rates[] = {
        96000, 88200, 64000, 48000, 44100, 32000,
                24000, 22050, 16000, 12000, 11025, 8000, 7350
    };
-   channel_configuration: ±íʾÉùµÀÊýchanCfg
+   channel_configuration: 表示声�数chanCfg
    0: Defined in AOT Specifc Config
    1: 1 channel: front-center
    2: 2 channels: front-left, front-right
@@ -83,210 +96,193 @@ void addADTStoPacket(uint8_t* packet, int packetLen)
    packet[6] = (uint8_t)0xFC;
 }
 
-SaveVideoFileThread::SaveVideoFileThread()
+VideoFileWriter::VideoFileWriter()
 {
-    isStop = false;
+    mIsStop = false;
+    mIsThreadRunning = false;
 
     m_containsVideo = true;
     m_containsAudio = true;
 
-    videoDataQueneHead = NULL;
-    videoDataQueneTail = NULL;
+    mCondAudio = new Cond();
+    mCondVideo = new Cond();
 
-    videoBufferCount = 0;
+    mVideoFrameRate = 15;
 
-    m_videoFrameRate = 15;
+    mLastVideoFrame = nullptr;
 
-    lastVideoNode = NULL;
+    WIDTH  = 0;
+    HEIGHT = 0;
 
     mBitRate = 450000;
+    mQuality = 10;
 
     audio_pts = 0;
     video_pts = 0;
 
+    mLastFileVideoPts = 0;
+
+    mIsUseMuteAudio   = false;
+    mLastGetAudioTime = 0;
+
+    mVideoEncoder = new VideoEncoder();
 }
 
-SaveVideoFileThread::~SaveVideoFileThread()
+VideoFileWriter::~VideoFileWriter()
 {
 
 }
 
-void SaveVideoFileThread::setContainsVideo(bool value)
+void VideoFileWriter::setContainsVideo(bool value)
 {
     m_containsVideo = value;
 }
 
-void SaveVideoFileThread::setContainsAudio(bool value)
+void VideoFileWriter::setContainsAudio(bool value)
 {
     m_containsAudio = value;
 }
 
-void SaveVideoFileThread::setVideoFrameRate(int value)
+void VideoFileWriter::setVideoFrameRate(int value)
 {
-    m_videoFrameRate = value;
+    mVideoFrameRate = value;
 }
 
-void SaveVideoFileThread::setFileName(QString filePath)
+void VideoFileWriter::setFileName(const std::string &filePath)
 {
     mFilePath = filePath;
+//    mFilePath = QString::fromStdString(filePath);
 }
 
-void SaveVideoFileThread::setQuantity(int value)
+std::list<VideoFileInfo> VideoFileWriter::getVideoFileList()
+{
+    std::list<VideoFileInfo> list = mVideoFileList;
+    mVideoFileList.clear();
+    return list;
+}
+
+void VideoFileWriter::setQuality(const int &value)
 {
     mBitRate = 450000 + (value - 5) * 50000;
+    mQuality = value;
+
+    mVideoEncoder->setQuality(value);
 }
 
-void SaveVideoFileThread::videoDataQuene_Input(uint8_t * buffer, int size, int64_t time)
+void VideoFileWriter::inputYuvFrame(VideoRawFramePtr yuvFrame)
 {
-//    qDebug()<<"void SaveVideoFileThread::videoDataQuene_Input(uint8_t * buffer,int size,long time)"<<time;
-    BufferDataNode * node = (BufferDataNode*)malloc(sizeof(BufferDataNode));
-    node->bufferSize = size;
-    node->next = NULL;
-    node->time = time;
+    mCondVideo->Lock();
+    mVideoFrameList.push_back(yuvFrame);
+    mCondVideo->Unlock();
+//qDebug()<<__FUNCTION__<<mVideoFrameList.size()<<mPcmFrameList.size();
+    mCondVideo->Signal();
+}
 
-    node->buffer = buffer;
-//    node->buffer = (uint8_t *)malloc(size);
-//    memcpy(node->buffer,buffer,size);
+VideoRawFramePtr VideoFileWriter::readYuvFrame(const int64_t &time)
+{
+    VideoRawFramePtr yuvFrame = nullptr;
 
-    mVideoMutex.lock();
+    mCondVideo->Lock();
 
-    if (videoDataQueneHead == NULL)
-    {
-        videoDataQueneHead = node;
-    }
-    else
+    if (!mVideoFrameList.empty())
     {
-        videoDataQueneTail->next = node;
-    }
-
-    videoDataQueneTail = node;
+        VideoRawFramePtr tmpFrame = mVideoFrameList.front();
 
-    videoBufferCount++;
+        if (time >= tmpFrame->getPts())
+        {
+            /// 期望的时间 比队列第一帧大.
+            /// 则�次往�找直到找到最接近的一帧,并丢弃中途中的帧.
 
-    mVideoMutex.unlock();
-//qDebug()<<__FUNCTION__<<videoBufferCount<<time;
-    if (videoBufferCount >= 30)
-    {
-        QString logStr = QString("!!!!!!!!!! encode too slow! count=%1")
-                    .arg(videoBufferCount);
-        AppConfig::WriteLog(logStr);
-    }
+            while(1)
+            {
+                std::list<VideoRawFramePtr>::iterator iter_fist = mVideoFrameList.begin();
+                if (iter_fist == mVideoFrameList.end()) break;
 
-}
+                VideoRawFramePtr framePtr = *iter_fist;
 
-BufferDataNode *SaveVideoFileThread::videoDataQuene_get(int64_t time)
-{
-    BufferDataNode * node = NULL;
+                std::list<VideoRawFramePtr>::iterator iter_second = ++iter_fist;
 
-    mVideoMutex.lock();
+                ///队列中数��足2个了,则先退出,等待有2个��。
+                if (iter_second == mVideoFrameList.end())
+                {
+                    yuvFrame = nullptr;
+                    break;
+                }
 
-    if (videoDataQueneHead != NULL)
-    {
-        node = videoDataQueneHead;
+                VideoRawFramePtr framePtr2 = *iter_second;
 
-//qDebug()<<"111:"<<time<<node->time<<videoBufferCount;
-        if (time >= node->time)
-        {
-//            qDebug()<<"000";
-            if (videoDataQueneHead->next != NULL)
-            {
-//                qDebug()<<"111";
-                while(node != NULL)
+                ///找到了,则退出
+                if (time < framePtr2->getPts())
                 {
-//                    qDebug()<<"222";
-                    if (node->next == NULL)
-                    {
-                        videoDataQueneHead = node;
-                        node = NULL;
-                        break;
-                    }
-//qDebug()<<"333"<<time << node->next->time;
-                    if (time < node->next->time)
-                    {
-                        break;
-                    }
-
-                    BufferDataNode * tmp = node;
-//qDebug()<<"222:"<<node->time<<time;
-                    node = node->next;
-                    videoBufferCount--;
-                    free(tmp->buffer);
-                    free(tmp);
+                    yuvFrame = framePtr;
+                    mVideoFrameList.pop_front();
+                    break;
                 }
-            }
-            else
-            {
-                node = NULL;
+
+                ///第一帧�符��求,则删掉第一帧,继续判断第二帧。
+//                mVideoFrameList.pop_front();
+                mVideoFrameList.erase(iter_fist);
             }
         }
-        else
-        {
-            node = lastVideoNode;
-        }
-
-        if (videoDataQueneTail == node)
+        else if (time == 0)
         {
-            videoDataQueneTail = NULL;
+            yuvFrame = tmpFrame; ///如果是0 则直接用队列开头第一帧
+            mVideoFrameList.pop_front();
         }
-
-        if (node != NULL && node != lastVideoNode)
+        else
         {
-            videoDataQueneHead = node->next;
-            videoBufferCount--;
+//            qDebug()<<__FUNCTION__<<time<<"use last frame";
+            ///期望的时间 比队列第一帧�, 则直接��上一帧
+            yuvFrame = mLastVideoFrame;
         }
-
     }
-//    qDebug()<<__FUNCTION__<<videoBufferCount<<node;
-    mVideoMutex.unlock();
 
-    return node;
+    mCondVideo->Unlock();
+
+    return yuvFrame;
 }
 
-void SaveVideoFileThread::audioDataQuene_Input(const uint8_t *buffer, const int &size)
+void VideoFileWriter::clearYuvFrame()
 {
-    BufferDataNode  node;
-//    node->buffer = buffer;
-    node.bufferSize = size;
-    node.next = NULL;
-
-    node.buffer = (uint8_t*)buffer;
-//    node->buffer = (uint8_t *)malloc(size);
-//    memcpy(node->buffer,buffer,size);
-
-    mAudioMutex.lock();
-
-    mAudioDataList.append(node);
-
-//qDebug()<<__FUNCTION__<<audioBufferCount<<size;
-    mAudioMutex.unlock();
-
+    mCondVideo->Lock();
+    mVideoFrameList.clear();
+    mCondVideo->Unlock();
 }
 
-bool SaveVideoFileThread::audioDataQuene_get(BufferDataNode &node)
+void VideoFileWriter::inputPcmFrame(PCMFramePtr pcmFrame)
 {
-    bool isSucceed = false;
+    mCondAudio->Lock();
+    mPcmFrameList.push_back(pcmFrame);
+    mCondAudio->Unlock();
+    mCondAudio->Signal();
+}
 
-    mAudioMutex.lock();
+PCMFramePtr VideoFileWriter::readPcmFrame()
+{
+    PCMFramePtr frame = nullptr;
 
-    if (!mAudioDataList.isEmpty())
+    mCondAudio->Lock();
+    if (!mPcmFrameList.empty())
     {
-        node = mAudioDataList.takeFirst();
-
-        isSucceed = true;
-
-//    qDebug()<<__FUNCTION__<<mAudioDataList.size();
+        frame = mPcmFrameList.front();
+        mPcmFrameList.pop_front();
     }
+    mCondAudio->Unlock();
 
-    mAudioMutex.unlock();
-
-    return isSucceed;
+    return frame;
 }
 
+void VideoFileWriter::clearPcmFrame()
+{
+    mCondAudio->Lock();
+    mPcmFrameList.clear();
+    mCondAudio->Unlock();
+}
 
 /*
  * add an audio output stream
  */
-void SaveVideoFileThread::add_audio_stream(OutputStream *ost, AVFormatContext *oc,
+void VideoFileWriter::add_audio_stream(OutputStream *ost, AVFormatContext *oc,
                                                 AVCodec **codec,
                                                 enum AVCodecID codec_id)
 {
@@ -317,8 +313,8 @@ void SaveVideoFileThread::add_audio_stream(OutputStream *ost, AVFormatContext *o
         exit(1);
     }
 
-    ///ÏÈÓÃÕâ¾ä»°ÕÒ³ö aac±àÂëÆ÷Ö§³ÖµÄ sample_fmt
-    /// ÎÒÕÒ³öµÄÊÇ AV_SAMPLE_FMT_FLTP
+    ///先用这��找出 aac编�器支�的 sample_fmt
+    /// 我找出的是 AV_SAMPLE_FMT_FLTP
     const enum AVSampleFormat *p = aCodec->sample_fmts;
     fprintf(stderr, "aac encoder sample format is: %s \n",av_get_sample_fmt_name(*p));
 
@@ -333,7 +329,7 @@ void SaveVideoFileThread::add_audio_stream(OutputStream *ost, AVFormatContext *o
 //    aCodecCtx->channels       = av_get_channel_layout_nb_channels(aCodecCtx->channel_layout);
 //    aCodecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
 
-//    aCodecCtx->profile=FF_PROFILE_AAC_LOW; //£¨¿É²Î¿¼AAC¸ñʽ¼ò½é£©
+//    aCodecCtx->profile=FF_PROFILE_AAC_LOW; //(��考AAC格�简介)
 //    aCodecCtx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
 
 //    aCodecCtx->bit_rate = 64000;
@@ -367,7 +363,7 @@ void SaveVideoFileThread::add_audio_stream(OutputStream *ost, AVFormatContext *o
 
 }
 
-void SaveVideoFileThread::open_audio(AVFormatContext *oc, AVCodec *codec, OutputStream *ost)
+void VideoFileWriter::open_audio(AVFormatContext *oc, AVCodec *codec, OutputStream *ost)
 {
     AVCodecContext *aCodecCtx = ost->enc;
 
@@ -384,12 +380,12 @@ void SaveVideoFileThread::open_audio(AVFormatContext *oc, AVCodec *codec, Output
     ost->frameBuffer     = (uint8_t *)av_malloc(mONEFrameSize);
     ost->frameBufferSize = mONEFrameSize;
 
-    ///Õâ¾ä»°±ØÐëÒª(ÉèÖÃÕâ¸öframeÀïÃæµÄ²ÉÑùµã¸öÊý)
-    int oneChannelBufferSize = mONEFrameSize / aCodecCtx->channels; //¼ÆËã³öÒ»¸öÉùµÀµÄÊý¾Ý
-    int nb_samplesize = oneChannelBufferSize / av_get_bytes_per_sample(aCodecCtx->sample_fmt); //¼ÆËã³ö²ÉÑùµã¸öÊý
+    ///这��必须�(设置这个frame里�的采样点个数)
+    int oneChannelBufferSize = mONEFrameSize / aCodecCtx->channels; //计算出一个声�的数�
+    int nb_samplesize = oneChannelBufferSize / av_get_bytes_per_sample(aCodecCtx->sample_fmt); //计算出采样点个数
     ost->frame->nb_samples = nb_samplesize;
 
-    ///Õâ2ÖÖ·½Ê½¶¼¿ÉÒÔ
+    ///这2�方�都�以
 //    avcodec_fill_audio_frame(ost->frame, aCodecCtx->channels, aCodecCtx->sample_fmt,(const uint8_t*)ost->frameBuffer, mONEFrameSize, 0);
     av_samples_fill_arrays(ost->frame->data, ost->frame->linesize, ost->frameBuffer, aCodecCtx->channels, ost->frame->nb_samples, aCodecCtx->sample_fmt, 0);
 
@@ -410,7 +406,7 @@ void SaveVideoFileThread::open_audio(AVFormatContext *oc, AVCodec *codec, Output
  * return 1 when encoding is finished, 0 otherwise
  */
 //static int write_audio_frame(AVFormatContext *oc, OutputStream *ost)
-bool SaveVideoFileThread::write_audio_frame(AVFormatContext *oc, OutputStream *ost)
+bool VideoFileWriter::write_audio_frame(AVFormatContext *oc, OutputStream *ost)
 {
     AVCodecContext *aCodecCtx = ost->enc;
 
@@ -419,24 +415,37 @@ bool SaveVideoFileThread::write_audio_frame(AVFormatContext *oc, OutputStream *o
 
     AVPacket *packet = &pkt;
 
-    AVFrame *aFrame;
+    AVFrame *aFrame = nullptr;
 
-    BufferDataNode node;
 
-    if (audioDataQuene_get(node))
-    {
-        aFrame = ost->frame;
-
-        memcpy(ost->frameBuffer, node.buffer, node.bufferSize);
+    PCMFramePtr pcmFrame = readPcmFrame();
 
-        free(node.buffer);
+    if (pcmFrame != nullptr)
+    {
+		memset(ost->frameBuffer, 0x0, ost->frameBufferSize);
+        memcpy(ost->frameBuffer, pcmFrame->getBuffer(), pcmFrame->getSize());
 
+		aFrame = ost->frame;
         aFrame->pts = ost->next_pts;
         ost->next_pts  += aFrame->nb_samples;
+
+        mLastGetAudioTime = av_gettime();
     }
     else
     {
-        return false;
+        ///超过3秒没有获�到外部�的数�,则直接用�音数�写入文件
+        if ((av_gettime() - mLastGetAudioTime) >= 3000000)
+        {
+            memset(ost->frameBuffer, 0x0, ost->frameBufferSize);
+
+			aFrame = ost->frame;
+			aFrame->pts = ost->next_pts;
+			ost->next_pts += aFrame->nb_samples;
+        }
+        else
+        {
+            return false;
+        }
     }
 
     if (aFrame)
@@ -473,7 +482,7 @@ bool SaveVideoFileThread::write_audio_frame(AVFormatContext *oc, OutputStream *o
             return false;
         }
 
-#if 0 ///дÈëaacÎļþ
+#if 0 ///写入aac文件
         uint8_t * aac_buf = (uint8_t *)malloc(packet->size+7);
         addADTStoPacket(aac_buf, 7+packet->size);
         memcpy(aac_buf+7, packet->data, packet->size);
@@ -488,8 +497,8 @@ bool SaveVideoFileThread::write_audio_frame(AVFormatContext *oc, OutputStream *o
 
 //        audio_pts = pkt.pts;
 
-        ///ÓÉÓÚMP4ÎļþµÄʱ¼ä»ù²»ÊÇ1/1000£¬Òò´ËÕâÀïת³ÉºÁÃëµÄÐÎʽ£¬·½±ãÏÔʾºÍ¼ÆËã¡£
-        ///½«Ptsת»»³ÉºÁÃëµÄÐÎʽ£¬ÕâÀïpts½ö½öÓÃÓÚÏÔʾ£¬²»»áÐÞ¸ÄдÈëÎļþµÄpts
+        ///由于MP4文件的时间基�是1/1000,因此这里转�毫秒的形�,方便显示和计算。
+        ///将Pts转��毫秒的形�,这里pts仅仅用于显示,�会修改写入文件的pts
         audio_pts = av_rescale_q(pkt.pts, ost->st->time_base, {1, 1000});
 
         /* Write the compressed frame to the media file. */
@@ -512,7 +521,7 @@ bool SaveVideoFileThread::write_audio_frame(AVFormatContext *oc, OutputStream *o
 
 }
 
-void SaveVideoFileThread::close_audio(AVFormatContext *oc, OutputStream *ost)
+void VideoFileWriter::close_audio(AVFormatContext *oc, OutputStream *ost)
 {
     avcodec_free_context(&ost->enc);
     av_frame_free(&ost->frame);
@@ -529,7 +538,7 @@ void SaveVideoFileThread::close_audio(AVFormatContext *oc, OutputStream *ost)
 
 
 /* add a video output stream */
-void SaveVideoFileThread::add_video_stream(OutputStream *ost, AVFormatContext *oc,
+void VideoFileWriter::add_video_stream(OutputStream *ost, AVFormatContext *oc,
                        AVCodec **codec,
                        enum AVCodecID codec_id)
 {
@@ -571,31 +580,31 @@ qDebug()<<__FUNCTION__<<c<<c->codec<<c->codec_id<<codec_id;
 //    c->time_base.den = m_videoFrameRate;
 //    c->time_base.num = 1;
 //    c->gop_size = 12; /* emit one intra frame every twelve frames at most */
-    c->gop_size = m_videoFrameRate;
+    c->gop_size = mVideoFrameRate;
     c->pix_fmt = AV_PIX_FMT_YUV420P;
 
-//    ÊÓÆµ±àÂëÆ÷³£ÓõÄÂëÂÊ¿ØÖÆ·½Ê½°üÀ¨abr(ƽ¾ùÂëÂÊ)£¬crf(ºã¶¨ÂëÂÊ)£¬cqp(ºã¶¨ÖÊÁ¿)£¬
-//    ffmpegÖÐAVCodecContextÏÔʾÌṩÁËÂëÂÊ´óСµÄ¿ØÖƲÎÊý£¬µ«ÊDz¢Ã»ÓÐÌṩÆäËûµÄ¿ØÖÆ·½Ê½¡£
-//    ffmpegÖÐÂëÂÊ¿ØÖÆ·½Ê½·ÖΪÒÔϼ¸ÖÖÇé¿ö£º
-//    1.Èç¹ûÉèÖÃÁËAVCodecContextÖÐbit_rateµÄ´óС£¬Ôò²ÉÓÃabrµÄ¿ØÖÆ·½Ê½£»
-//    2.Èç¹ûûÓÐÉèÖÃAVCodecContextÖеÄbit_rate£¬ÔòĬÈϰ´ÕÕcrf·½Ê½±àÂ룬crfĬÈÏ´óСΪ23£¨´ËÖµÀàËÆÓÚqpÖµ£¬Í¬Ñù±íʾÊÓÆµÖÊÁ¿£©£»
-//    3.Èç¹ûÓû§Ïë×Ô¼ºÉèÖã¬ÔòÐèÒª½èÖúav_opt_setº¯ÊýÉèÖÃAVCodecContextµÄpriv_data²ÎÊý¡£ÏÂÃæ¸ø³öÈýÖÖ¿ØÖÆ·½Ê½µÄʵÏÖ´úÂ룺
+//    视频编�器常用的�率控制方�包括abr(平��率),crf(�定�率),cqp(�定质�),
+//    ffmpeg中AVCodecContext显示�供了�率大�的控制�数,但是并没有�供其他的控制方�。
+//    ffmpeg中�率控制方�分为以下几�情况:
+//    1.如果设置了AVCodecContext中bit_rate的大�,则采用abr的控制方�;
+//    2.如果没有设置AVCodecContext中的bit_rate,则默认按照crf方�编�,crf默认大�为23(此值类似于qp值,�样表示视频质�);
+//    3.如果用户想自己设置,则需�借助av_opt_set函数设置AVCodecContext的priv_data�数。下�给出三�控制方�的实现代�:
 
     c->bit_rate = mBitRate;
 
 #if 0
-    ///ƽ¾ùÂëÂÊ
-    //Ä¿±êµÄÂëÂÊ£¬¼´²ÉÑùµÄÂëÂÊ£»ÏÔÈ»£¬²ÉÑùÂëÂÊÔ½´ó£¬ÊÓÆµ´óСԽ´ó
+    ///平��率
+    //目标的�率,�采样的�率;显然,采样�率越大,视频大�越大
     c->bit_rate = mBitRate;
 
 #elif 1
-    ///ºã¶¨ÂëÂÊ
-//    Á¿»¯±ÈÀýµÄ·¶Î§Îª0~51£¬ÆäÖÐ0ΪÎÞËðģʽ£¬23Ϊȱʡֵ£¬51¿ÉÄÜÊÇ×î²îµÄ¡£¸ÃÊý×ÖԽС£¬Í¼ÏñÖÊÁ¿Ô½ºÃ¡£´ÓÖ÷¹ÛÉϽ²£¬18~28ÊÇÒ»¸öºÏÀíµÄ·¶Î§¡£18ÍùÍù±»ÈÏΪ´ÓÊÓ¾õÉÏ¿´ÊÇÎÞËðµÄ£¬ËüµÄÊä³öÊÓÆµÖÊÁ¿ºÍÊäÈëÊÓÆµÒ»Ä£Ò»Ñù»òÕß˵Ïà²îÎÞ¼¸¡£µ«´Ó¼¼ÊõµÄ½Ç¶ÈÀ´½²£¬ËüÒÀÈ»ÊÇÓÐËðѹËõ¡£
-//    ÈôCrfÖµ¼Ó6£¬Êä³öÂëÂÊ´ó¸Å¼õÉÙÒ»°ë£»ÈôCrfÖµ¼õ6£¬Êä³öÂëÂÊ·­±¶¡£Í¨³£ÊÇÔÚ±£Ö¤¿É½ÓÊÜÊÓÆµÖÊÁ¿µÄǰÌáÏÂÑ¡ÔñÒ»¸ö×î´óµÄCrfÖµ£¬Èç¹ûÊä³öÊÓÆµÖÊÁ¿ºÜºÃ£¬ÄǾͳ¢ÊÔÒ»¸ö¸ü´óµÄÖµ£¬Èç¹û¿´ÆðÀ´ºÜÔ㣬ÄǾͳ¢ÊÔÒ»¸öСһµãÖµ¡£
+    ///�定�率
+//    �化比例的范围为0~51,其中0为无�模�,23为缺�值,51�能是最差的。该数字越�,图�质�越好。从主观上讲,18~28是一个��的范围。18往往被认为从视觉上看是无�的,它的输出视频质�和输入视频一模一样或者说相差无几。但从技术的角度�讲,它�然是有�压缩。
+//    若Crf值加6,输出�率大概�少一�;若Crf值�6,输出�率翻�。通常是在���接�视频质�的��下选择一个最大的Crf值,如果输出视频质�很好,那就�试一个更大的值,如果看起�很糟,那就�试一个�一点值。
 //    av_opt_set(c->priv_data, "crf", "31.000", AV_OPT_SEARCH_CHILDREN);
 
 #else
-    ///qpµÄÖµºÍcrfÒ»Ñù
+    ///qp的值和crf一样
 //    av_opt_set(c->priv_data, "qp", "31.000",AV_OPT_SEARCH_CHILDREN);
 #endif
 
@@ -625,33 +634,35 @@ qDebug()<<__FUNCTION__<<c<<c->codec<<c->codec_id<<codec_id;
        of which frame timestamps are represented. for fixed-fps content,
        timebase should be 1/framerate and timestamp increments should be
        identically 1. */
-    ost->st->time_base = { 1, m_videoFrameRate };
+    ost->st->time_base = { 1, mVideoFrameRate };
     c->time_base       = ost->st->time_base;
 //    c->gop_size = 12; /* emit one intra frame every twelve frames at most */
-    c->gop_size = m_videoFrameRate * 2; ///IÖ¡¼ä¸ô
+    c->gop_size = mVideoFrameRate * 2; ///I帧间隔
 
-//    //¹Ì¶¨ÔÊÐíµÄÂëÂÊÎó²î£¬ÊýÖµÔ½´ó£¬ÊÓÆµÔ½Ð¡
+//    //固定�许的�率误差,数值越大,视频越�
 //    c->bit_rate_tolerance = mBitRate;
 
-//    //H264 »¹¿ÉÒÔÉèÖúܶà²ÎÊý ×ÔÐÐÑо¿°É
+//    //H264 还�以设置很多�数 自行研究�
 ////    pCodecCtx->me_range = 16;
 ////    pCodecCtx->max_qdiff = 1;
 //    c->qcompress = 0.85;
-    c->qmin = 16;
-    c->qmax = 31;
 
-////    //²ÉÓã¨qmin/qmaxµÄ±ÈÖµÀ´¿ØÖÆÂëÂÊ£¬1±íʾ¾Ö²¿²ÉÓô˷½·¨£¬0±íʾȫ¾Ö£©
+
+    c->qmin = 16+(10-mQuality)*2;
+    c->qmax = 31+(10-mQuality)*2;
+
+////    //采用(qmin/qmax的比值�控制�率,1表示局部采用此方法,0表示全局)
 ////    c->rc_qsquish = 0;
 
-////    //ÒòΪÎÒÃǵÄÁ¿»¯ÏµÊýqÊÇÔÚqminºÍqmaxÖ®¼ä¸¡¶¯µÄ£¬
-////    //qblur±íʾÕâÖÖ¸¡¶¯±ä»¯µÄ±ä»¯³Ì¶È£¬È¡Öµ·¶Î§0.0¡«1.0£¬È¡0±íʾ²»Ï÷¼õ
+////    //因为我们的�化系数q是在qmin和qmax之间浮动的,
+////    //qblur表示这�浮动�化的�化程度,�值范围0.0~1.0,�0表示�削�
 ////    c->qblur = 1.0;
 
 //std::cout<<"mBitRate"<<mBitRate<<m_videoFrameRate;
 
 ////    ///b_frame_strategy
-////    ///Èç¹ûΪtrue£¬Ôò×Ô¶¯¾ö¶¨Ê²Ã´Ê±ºòÐèÒª²åÈëBÖ¡£¬×î¸ß´ïµ½ÉèÖõÄ×î´óBÖ¡Êý¡£
-////    ///Èç¹ûÉèÖÃΪfalse,ÄÇô×î´óµÄBÖ¡Êý±»Ê¹Óá£
+////    ///如果为true,则自动决定什么时候需��入B帧,最高达到设置的最大B帧数。
+////    ///如果设置为false,那么最大的B帧数被使用。
 ////    c->b_frame_strategy = 1;
 ////    c->max_b_frames = 5;
 
@@ -694,7 +705,7 @@ static AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
     return picture;
 }
 
-void SaveVideoFileThread::open_video(AVFormatContext *oc, AVCodec *codec, OutputStream *ost)
+void VideoFileWriter::open_video(AVFormatContext *oc, AVCodec *codec, OutputStream *ost)
 {
     AVCodecContext *c = ost->enc;
 
@@ -703,15 +714,18 @@ void SaveVideoFileThread::open_video(AVFormatContext *oc, AVCodec *codec, Output
     //H.264
     //av_dict_set(&param, "preset", "slow", 0);
     av_dict_set(&param, "preset", "superfast", 0);
-    av_dict_set(&param, "tune", "zerolatency", 0);  //ʵÏÖʵʱ±àÂë
+    av_dict_set(&param, "tune", "zerolatency", 0);  //实现实时编�
 
     c->thread_count = 16;
     c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
 
 qDebug()<<__FUNCTION__<<"333";
     int ret = 0;
-    if (ret = avcodec_open2(c, codec,&param) < 0){
-      qDebug()<<("Failed to open video encoder!\n")<<ret;
+    if (ret = avcodec_open2(c, codec,&param) < 0)
+    {
+        char errstr[AV_ERROR_MAX_STRING_SIZE] = {0};
+        av_make_error_string(errstr, AV_ERROR_MAX_STRING_SIZE, ret);
+        fprintf(stderr, "Failed to open video encoder!: %s \n", errstr);
       exit(1);
     }
 qDebug()<<__FUNCTION__<<"333"<<c->pix_fmt<<AV_PIX_FMT_YUV420P;
@@ -769,72 +783,60 @@ qDebug()<<__FUNCTION__<<"333"<<c->pix_fmt<<AV_PIX_FMT_YUV420P;
 
 }
 
-bool SaveVideoFileThread::write_video_frame(AVFormatContext *oc, OutputStream *ost, double time)
+bool VideoFileWriter::write_video_frame(AVFormatContext *oc, OutputStream *ost, int64_t time, bool &isNeedNewFile)
 {
-    int out_size, ret = 0;
+    isNeedNewFile = false;
+
+    int ret = 0;
     AVCodecContext *c;
-    int got_packet = 0;
 
     c = ost->enc;
-//qDebug()<<__FUNCTION__<<"0000"<<time;
-    BufferDataNode *node = videoDataQuene_get(time);
 
-    if (node == NULL)
+    VideoRawFramePtr yuvFrame = readYuvFrame(time);
+
+    if (yuvFrame == nullptr)
     {
-//        qDebug()<<__FUNCTION__<<"0000 000"<<time;
         return false;
     }
     else
     {
-        if (node != lastVideoNode)
-        {
-            if (lastVideoNode != NULL)
-            {
-                free(lastVideoNode->buffer);
-                free(lastVideoNode);
-            }
-
-            lastVideoNode = node;
-        }
-
-        memcpy(ost->frameBuffer, node->buffer, node->bufferSize);
-
-//²»ÖªÎªºÎÏÂÃæÕâÁ½ÖÖ·½Ê½¶¼²»ÐÐ
-//        int y_size = c->width * c->height;
-//        memcpy(ost->frame->data[0], node->buffer, y_size * 3 / 2);
-
-//        memcpy(ost->frame->data[0], node->buffer, y_size);
-//        memcpy(ost->frame->data[1], node->buffer+ y_size, y_size*5/4);
-//        memcpy(ost->frame->data[2], node->buffer+ y_size*5/4, y_size*5/4);
-//qDebug()<<__FUNCTION__<<"000 1111";
+        mLastVideoFrame = yuvFrame;
 
         ost->frame->pts = ost->next_pts++;
+    }
 
+    ///分辨率�生改�的时候,�新生�一个新的视频文件
+    if (yuvFrame->getWidth() != WIDTH || yuvFrame->getHeight() != HEIGHT)
+    {
+qDebug()<<__FUNCTION__<<"new file:"<<time<<yuvFrame->getWidth()<<yuvFrame->getHeight()<<WIDTH<<HEIGHT;
+//        this->setWidth(yuvFrame->getWidth(), yuvFrame->getHeight());
+        WIDTH = yuvFrame->getWidth();
+        HEIGHT = yuvFrame->getHeight();
+        isNeedNewFile = true;
+        return false;
     }
-//qDebug()<<__FUNCTION__<<"1111";
 
-    AVPacket pkt = { 0 };
-//    av_init_packet(&pkt);
+    std::list<VideoEncodedFramePtr> encodedVideoFrameList = mVideoEncoder->encode(mLastVideoFrame, ost->frame->pts);
 
-    /* encode the image */
-    out_size = avcodec_encode_video2(c, &pkt, ost->frame, &got_packet);
+    if (encodedVideoFrameList.size() > 1)
+    qDebug()<<__FUNCTION__<<"1111"<<time<<encodedVideoFrameList.size();
 
-    if (got_packet)
+    if (encodedVideoFrameList.size() > 0)
     {
-//qDebug()<<__FUNCTION__<<"111"<<ost->frame->pts<<pkt.pts<<c->time_base.num<<c->time_base.den<<ost->st->time_base.den<<ost->st->time_base.num;
+        AVPacket *pkt = mVideoEncoder->getLastEncodePacket();
+
         /* rescale output packet timestamp values from codec to stream timebase */
-        av_packet_rescale_ts(&pkt, c->time_base, ost->st->time_base);
-        pkt.stream_index = ost->st->index;
-//qDebug()<<__FUNCTION__<<"222"<<ost->frame->pts<<pkt.pts<<time<<node->time;
+        av_packet_rescale_ts(pkt, c->time_base, ost->st->time_base);
+        pkt->stream_index = ost->st->index;
 
-//        video_pts = pkt.pts;
+        video_pts = pkt->pts;
 
-        ///ÓÉÓÚMP4ÎļþµÄʱ¼ä»ù²»ÊÇ1/1000£¬Òò´ËÕâÀïת³ÉºÁÃëµÄÐÎʽ£¬·½±ãÏÔʾºÍ¼ÆËã¡£
-        ///½«Ptsת»»³ÉºÁÃëµÄÐÎʽ£¬ÕâÀïpts½ö½öÓÃÓÚÏÔʾ£¬²»»áÐÞ¸ÄдÈëÎļþµÄpts
-        video_pts = av_rescale_q(pkt.pts, ost->st->time_base, {1, 1000});
+        ///由于MP4文件的时间基�是1/1000,因此这里转�毫秒的形�,方便显示和计算。
+        ///将Pts转��毫秒的形�,这里pts仅仅用于显示,�会修改写入文件的pts
+        video_pts = av_rescale_q(pkt->pts, ost->st->time_base, {1, 1000});
 
         /* Write the compressed frame to the media file. */
-        ret = av_interleaved_write_frame(oc, &pkt);
+        ret = av_interleaved_write_frame(oc, pkt);
         if (ret < 0)
         {
             char errstr[AV_ERROR_MAX_STRING_SIZE] = {0};
@@ -845,14 +847,22 @@ bool SaveVideoFileThread::write_video_frame(AVFormatContext *oc, OutputStream *o
             AppConfig::WriteLog(logStr);
         }
 
-        av_packet_unref(&pkt);
+//        av_packet_unref(&pkt);
+    }
+
+    for (VideoEncodedFramePtr framePtr : encodedVideoFrameList)
+    {
+        if (mCallBackFunc != nullptr)
+        {
+            mCallBackFunc(framePtr, mCallBackFuncParam);
+        }
     }
 
     return true;
 
 }
 
-void SaveVideoFileThread::close_video(AVFormatContext *oc, OutputStream *ost)
+void VideoFileWriter::close_video(AVFormatContext *oc, OutputStream *ost)
 {
     avcodec_free_context(&ost->enc);
     av_frame_free(&ost->frame);
@@ -868,54 +878,86 @@ void SaveVideoFileThread::close_video(AVFormatContext *oc, OutputStream *ost)
 
 }
 
-int64_t SaveVideoFileThread::getVideoPts()
+int64_t VideoFileWriter::getVideoPts()
 {
-    return video_pts;
+    return video_pts + mLastFileVideoPts;
 }
 
-int64_t SaveVideoFileThread::getAudioPts()
+int64_t VideoFileWriter::getAudioPts()
 {
     return audio_pts;
 }
 
-void SaveVideoFileThread::run()
+void VideoFileWriter::sig_StartWriteFile(const std::string & filePath)
 {
-    int writeFileIndex = 1;
+    if (AppConfig::gIsDebugMode)
+    {
+        AppConfig::WriteLog(QString("%1 startWriteFile %1... \n").arg(__FUNCTION__).arg(QString::fromStdString(filePath)));
+    }
+}
+
+void VideoFileWriter::sig_StopWriteFile(const std::string & filePath)
+{
+    if (AppConfig::gIsDebugMode)
+    {
+        AppConfig::WriteLog(QString("%1 stopWriteFile %1... \n").arg(__FUNCTION__).arg(QString::fromStdString(filePath)));
+    }
+}
+
+void VideoFileWriter::run()
+{
+    mIsThreadRunning = true;
+
+    mLastFileVideoPts = 0;
+    mLastVideoFrame = nullptr;
+
+    int writeFileIndex = 0;
+
+    mVideoFileList.clear();
 
 while(1)
 {
-    if (isStop)
+    if (mIsStop)
     {
         break;
     }
 
     if (WIDTH <= 0 || HEIGHT <= 0)
     {
-        msleep(100);
+        MoudleConfig::mSleep(100);
         continue;
     }
 
-    QString filePath;
+    int currentVideoWidth = WIDTH;
+    int currentVideoHeight = HEIGHT;
+
+    std::string filePath;
 
-    if (writeFileIndex > 1)
+    if (writeFileIndex > 0)
     {
-        QFileInfo fileInfo(mFilePath);
+        QFileInfo fileInfo(QString::fromStdString(mFilePath));
+
+//        filePath = QString("%1/%2_%3.%4")
+//                .arg(fileInfo.absoluteDir().path())
+//                .arg(fileInfo.baseName())
+//                .arg(writeFileIndex)
+//                .arg(fileInfo.suffix());
 
         filePath = QString("%1/%2_%3.%4")
                 .arg(fileInfo.absoluteDir().path())
-                .arg(fileInfo.baseName())
+                .arg(fileInfo.fileName())
                 .arg(writeFileIndex)
-                .arg(fileInfo.suffix());
+                .arg(fileInfo.suffix()).toStdString();
+
     }
     else
     {
         filePath = mFilePath;
     }
 
-    emit sig_StartWriteFile(filePath);
+    sig_StartWriteFile(filePath);
 
-    char filename[1280] = {0};
-    strcpy(filename, filePath.toLocal8Bit());
+    const char *filename = filePath.c_str();
     writeFileIndex++;
 
     OutputStream video_st = { 0 }, audio_st = { 0 };
@@ -938,10 +980,10 @@ while(1)
     {
         fprintf(stderr,"Could not deduce output format from file extension: using MPEG.\n");
 
-        QString logStr = QString("!!!!!!!!!! Could not deduce output format from file extension ... %1").arg(filePath);
-        AppConfig::WriteLog(logStr);
+//        QString logStr = QString("!!!!!!!!!! Could not deduce output format from file extension ... %1").arg(filePath);
+//        AppConfig::WriteLog(logStr);
 
-        msleep(1000);
+        MoudleConfig::mSleep(1000);
 
         continue;
     }
@@ -950,7 +992,7 @@ while(1)
 
     if (m_containsVideo)
     {
-        qDebug()<<fmt->video_codec;
+//        qDebug()<<__FUNCTION__<<"fmt->video_codec:"<<fmt->video_codec<<AV_CODEC_ID_H264<<WIDTH<<HEIGHT;
         if (fmt->video_codec != AV_CODEC_ID_NONE)
         {
             add_video_stream(&video_st, oc, &video_codec, AV_CODEC_ID_H264);
@@ -986,10 +1028,10 @@ while(1)
             qDebug()<<"Could not open "<<filename;
 //            return;
 
-            QString logStr = QString("!!!!!!!!!! Could not open %1").arg(filePath);
-            AppConfig::WriteLog(logStr);
+//            QString logStr = QString("!!!!!!!!!! Could not open %1").arg(filePath);
+//            AppConfig::WriteLog(logStr);
 
-            msleep(1000);
+            MoudleConfig::mSleep(1000);
 
             continue;
 
@@ -1000,6 +1042,8 @@ while(1)
 //    av_write_header(oc);
     avformat_write_header(oc, NULL);
 
+    mLastGetAudioTime = av_gettime();
+
     video_pts = 0;
     audio_pts = 0;
 
@@ -1010,37 +1054,51 @@ while(1)
         /* select the stream to encode */
         if (!have_audio || (av_compare_ts(video_st.next_pts, video_st.enc->time_base, audio_st.next_pts, audio_st.enc->time_base) <= 0))
         {
-            if (!write_video_frame(oc, &video_st, video_pts))
+            bool isNeedNewFile = false;
+
+//            if (!write_video_frame(oc, &video_st, video_pts+mLastFileVideoPts, isNeedNewFile))
+//            {
+//                if (mIsStop || isNeedNewFile)
+//                {
+//                    if (isNeedNewFile)
+//                    {
+//                        mLastFileVideoPts += video_pts;
+//                    }
+//                    break;
+//                }
+//                MoudleConfig::mSleep(1);
+//            }
+            if (!write_video_frame(oc, &video_st, video_pts+mLastFileVideoPts, isNeedNewFile))
             {
-                if (isStop)
+                if (mIsStop)
                 {
                     break;
                 }
-                msleep(1);
+                MoudleConfig::mSleep(1);
             }
         }
         else
         {
             if (!write_audio_frame(oc, &audio_st))
             {
-                if (isStop)
+                if (mIsStop)
                 {
                     break;
                 }
-                msleep(1);
+                MoudleConfig::mSleep(1);
             }
         }
     }
 
-    QString logStr = QString("!!!!!!!!!! av_write_trailer ... %1").arg(filePath);
-    AppConfig::WriteLog(logStr);
+//    QString logStr = QString("!!!!!!!!!! av_write_trailer ... %1").arg(filePath);
+//    AppConfig::WriteLog(logStr);
 
     av_write_trailer(oc);
 
-    logStr = QString("!!!!!!!!!! av_write_trailer finised! %1").arg(filePath);
-    AppConfig::WriteLog(logStr);
+//    logStr = QString("!!!!!!!!!! av_write_trailer finised! %1").arg(filePath);
+//    AppConfig::WriteLog(logStr);
 
-    emit sig_StopWriteFile(filePath);
+    sig_StopWriteFile(filePath);
 
     qDebug()<<"void RTMPPushThread::run() finished!";
 
@@ -1063,28 +1121,76 @@ while(1)
 
     /* free the stream */
     avformat_free_context(oc);
+
+    VideoFileInfo fileInfo;
+    fileInfo.filePath = filePath;
+    fileInfo.length   = video_pts;
+    fileInfo.width = currentVideoWidth;
+    fileInfo.height = currentVideoHeight;
+
+    mVideoFileList.push_back(fileInfo);
+
+    mVideoEncoder->closeEncoder();
+
+    video_pts = 0;
+    audio_pts = 0;
+
+//    mLastVideoFrame = nullptr;
+
+//    clearYuvFrame();
+//    clearPcmFrame();
+
 }
 
+    mLastFileVideoPts = 0;
+    mLastVideoFrame = nullptr;
+
+    clearYuvFrame();
+    clearPcmFrame();
+
     qDebug()<<"void RTMPPushThread::run() finished! 222";
+
+    mIsThreadRunning = false;
+}
+
+void VideoFileWriter::setWidth(int width,int height)
+{
+    if (!mIsThreadRunning)
+    {
+        WIDTH = width;
+        HEIGHT = height;
+    }
 }
 
-void SaveVideoFileThread::setWidth(int width,int height)
+bool VideoFileWriter::isThreadRunning()
 {
-    WIDTH = width;
-    HEIGHT = height;
+    return mIsThreadRunning;
 }
 
-bool SaveVideoFileThread::startEncode()
+bool VideoFileWriter::startEncode(std::function<void (VideoEncodedFramePtr videoFramePtr, void *param)> func, void *param)
 {
-    isStop = false;
-    start();
+    mIsStop = false;
+
+    mCallBackFunc      = func;
+    mCallBackFuncParam = param;
+
+    std::thread([=]
+    {
+        this->run();
+
+    }).detach();
 
     return true;
 }
 
-bool SaveVideoFileThread::stopEncode()
+bool VideoFileWriter::stopEncode()
 {
-    isStop = true;
+    mIsStop = true;
+
+    while(mIsThreadRunning)
+    {
+        AppConfig::mSleep(100);
+    }
 
     return true;
 }

+ 159 - 0
module/ScreenRecorder/src/Media/Video/VideoFileWriter.h

@@ -0,0 +1,159 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef VIDEOFILEWRITER_H
+#define VIDEOFILEWRITER_H
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+#include <functional>
+
+#include <QMutex>
+#include <QDebug>
+
+extern"C"
+{
+    #include <libavutil/avassert.h>
+    #include <libavutil/channel_layout.h>
+    #include <libavutil/opt.h>
+    #include <libavutil/mathematics.h>
+    #include <libavutil/time.h>
+//    #include <libavutil/timestamp.h>
+    #include <libavformat/avformat.h>
+    #include <libswscale/swscale.h>
+    #include <libswresample/swresample.h>
+    #include <libavutil/imgutils.h>
+}
+
+#include "Mutex/Cond.h"
+#include "VideoEncoder.h"
+#include "VideoFrame/VideoRawFrame.h"
+#include "VideoFrame/VideoEncodedFrame.h"
+#include "AudioFrame/PCMFrame.h"
+
+#include "Media/Video/VideoFileInfoTypes.h"
+
+#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio
+
+// a wrapper around a single output AVStream
+typedef struct OutputStream
+{
+    AVStream *st;
+    AVCodecContext *enc;
+
+    /* pts of the next frame that will be generated */
+    int64_t next_pts;
+    int samples_count;
+
+    AVFrame *frame;
+    AVFrame *tmp_frame;
+
+    /// 如果是视频这是yuv420p数据
+    /// 如果是音频这是存放pcm数据,用来取出刚好的一帧数据传给编码器编码
+    uint8_t *frameBuffer;
+    int frameBufferSize;
+
+} OutputStream;
+
+class VideoFileWriter
+{
+public:
+    VideoFileWriter();
+    ~VideoFileWriter();
+
+    void setFileName(const std::string &filePath);
+    std::list<VideoFileInfo> getVideoFileList(); //获取文件列表
+
+    void setQuality(const int &value);
+    void setWidth(int width,int height);
+    bool startEncode(std::function<void (VideoEncodedFramePtr videoFramePtr, void *param)> func, void *param = nullptr);
+    bool stopEncode();
+
+    bool isThreadRunning();
+
+    void inputYuvFrame(VideoRawFramePtr yuvFrame);
+    VideoRawFramePtr readYuvFrame(const int64_t &time); //time是毫秒
+    void clearYuvFrame();
+
+    void inputPcmFrame(PCMFramePtr pcmFrame);
+    PCMFramePtr readPcmFrame();
+    void clearPcmFrame();
+
+    int getONEFrameSize(){return mONEFrameSize;}
+
+    void setContainsVideo(bool);
+    void setContainsAudio(bool);
+
+    void setVideoFrameRate(int value);
+
+    ///获取时间戳(毫秒)
+    int64_t getVideoPts();
+    int64_t getAudioPts();
+
+    void sig_StartWriteFile(const std::string & filePath);
+    void sig_StopWriteFile(const std::string & filePath);
+
+protected:
+    void run();
+
+private:
+    std::string mFilePath;
+
+    int mVideoFrameRate;
+
+    bool mIsStop;
+    bool mIsThreadRunning;
+
+    int64_t audio_pts, video_pts; //当前文件的时间(毫秒)
+    int64_t mLastFileVideoPts;     //上一个文件结束的时候的时间
+    std::list<VideoFileInfo> mVideoFileList; //记录最终生成的视频文件列表
+    bool mIsNewVideoFile = false;  //输入的图像分辨率改变时,是否重新生成一个视频文件
+
+    int mBitRate; //video bitRate
+    int mQuality;
+
+    int mONEFrameSize;
+
+    int WIDTH;
+    int HEIGHT;
+
+    bool m_containsVideo;
+    bool m_containsAudio;
+
+    int64_t mLastGetAudioTime; //上一次获取到音频的时间
+    bool mIsUseMuteAudio; //是否使用静音模式(直接传0x0的数据拿去编码)
+
+    VideoEncoder *mVideoEncoder;
+
+    Cond *mCondAudio;
+    std::list<PCMFramePtr> mPcmFrameList;
+
+    Cond *mCondVideo;
+    std::list<VideoRawFramePtr> mVideoFrameList;
+    VideoRawFramePtr mLastVideoFrame; //上一次的帧(帧不足的时候用上一次的帧来补全)
+
+    void open_audio(AVFormatContext *oc, AVCodec *codec, OutputStream *ost);
+    void close_audio(AVFormatContext *oc, OutputStream *ost);
+
+    void open_video(AVFormatContext *oc, AVCodec *codec, OutputStream *ost);
+    void close_video(AVFormatContext *oc, OutputStream *ost);
+
+    void add_video_stream(OutputStream *ost, AVFormatContext *oc, AVCodec **codec, AVCodecID codec_id);
+    void add_audio_stream(OutputStream *ost, AVFormatContext *oc, AVCodec **codec, AVCodecID codec_id);
+
+    bool write_audio_frame(AVFormatContext *oc, OutputStream *ost);
+    bool write_video_frame(AVFormatContext *oc, OutputStream *ost, int64_t time, bool &isNeedNewFile);
+
+
+    ///回调函数(用于编码完成后数据输出给父类)
+    std::function<void (VideoEncodedFramePtr videoFramePtr, void *param)> mCallBackFunc; //回调函数
+    void *mCallBackFuncParam; //回调函数用户参数
+
+};
+
+#endif // VIDEOFILEWRITER_H

+ 33 - 0
module/common/common.pri

@@ -0,0 +1,33 @@
+INCLUDEPATH += $$PWD/src
+INCLUDEPATH += $$PWD/src/Audio
+INCLUDEPATH += $$PWD/src/Video
+
+SOURCES += \
+        $$PWD/src/Audio/Mix/PcmMix.cpp \
+        $$PWD/src/MoudleConfig.cpp \
+        $$PWD/src/Audio/AudioFrame/AACFrame.cpp \
+        $$PWD/src/Audio/AudioFrame/PCMFrame.cpp \
+        $$PWD/src/Mutex/Cond.cpp \
+        $$PWD/src/Mutex/Mutex.cpp \
+        $$PWD/src/NALU/nalu.cpp \
+        $$PWD/src/Video/VideoFrame/VideoRawFrame.cpp \
+        $$PWD/src/Video/VideoFrame/VideoEncodedFrame.cpp \
+        $$PWD/src/LogWriter/LogWriter.cpp
+
+HEADERS += \
+        $$PWD/src/Audio/Mix/PcmMix.h \
+        $$PWD/src/MoudleConfig.h \
+        $$PWD/src/Audio/AudioFrame/AACFrame.h \
+        $$PWD/src/Audio/AudioFrame/PCMFrame.h \
+        $$PWD/src/Mutex/Cond.h \
+        $$PWD/src/Mutex/Mutex.h \
+        $$PWD/src/NALU/h264.h \
+        $$PWD/src/NALU/h265.h \
+        $$PWD/src/NALU/nalu.h \
+        $$PWD/src/Video/VideoFrame/VideoRawFrame.h \
+        $$PWD/src/Video/VideoFrame/VideoEncodedFrame.h \
+        $$PWD/src/LogWriter/LogWriter.h
+
+#### lib ### Begin
+#    include($$PWD/../lib/lib.pri)
+#### lib ### End

+ 48 - 0
module/common/src/Audio/AudioFrame/AACFrame.cpp

@@ -0,0 +1,48 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "AACFrame.h"
+
+AACFrame::AACFrame()
+{
+    mFrameBuffer = nullptr;
+    mFrameBufferSize = 0;
+
+    mPts = 0;
+
+}
+
+AACFrame::~AACFrame()
+{
+    if (mFrameBuffer != nullptr)
+    {
+        free(mFrameBuffer);
+
+        mFrameBuffer = nullptr;
+        mFrameBufferSize = 0;
+    }
+}
+
+void AACFrame::setAdtsHeader(const ADTS_HEADER &adts)
+{
+    mAdtsHeader = adts;
+}
+
+void AACFrame::setFrameBuffer(const uint8_t * const buffer, const unsigned int &size)
+{
+    if (mFrameBufferSize < size)
+    {
+        if (mFrameBuffer != nullptr)
+        {
+            free(mFrameBuffer);
+        }
+
+        mFrameBuffer = static_cast<uint8_t*>(malloc(size));
+    }
+
+    memcpy(mFrameBuffer, buffer, size);
+    mFrameBufferSize = size;
+}

+ 87 - 0
module/common/src/Audio/AudioFrame/AACFrame.h

@@ -0,0 +1,87 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef AACFRAME_H
+#define AACFRAME_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <memory>
+
+/*
+sampling_frequency_index sampling frequeny [Hz]
+0x0                           96000
+0x1                           88200
+0x2                           64000
+0x3                           48000
+0x4                           44100
+0x5                           32000
+0x6                           24000
+0x7                           22050
+0x8                           16000
+0x9                           2000
+0xa                           11025
+0xb                           8000
+0xc                           reserved
+0xd                           reserved
+0xe                           reserved
+0xf                           reserved
+*/
+typedef struct
+{
+    unsigned int syncword;  //12 bslbf 同步字The bit string ‘1111 1111 1111’,说明一个ADTS帧的开始
+    unsigned int id;        //1 bslbf   MPEG 标示符, 设置为1
+    unsigned int layer;     //2 uimsbf Indicates which layer is used. Set to ‘00’
+    unsigned int protection_absent;  //1 bslbf  表示是否误码校验
+    unsigned int profile;            //2 uimsbf  表示使用哪个级别的AAC,如01 Low Complexity(LC)--- AACLC
+    unsigned int sf_index;           //4 uimsbf  表示使用的采样率下标
+    unsigned int private_bit;        //1 bslbf
+    unsigned int channel_configuration;  //3 uimsbf  表示声道数
+    unsigned int original;               //1 bslbf
+    unsigned int home;                   //1 bslbf
+    /*下面的为改变的参数即每一帧都不同*/
+    unsigned int copyright_identification_bit;   //1 bslbf
+    unsigned int copyright_identification_start; //1 bslbf
+    unsigned int aac_frame_length;               // 13 bslbf  一个ADTS帧的长度包括ADTS头和raw data block
+    unsigned int adts_buffer_fullness;           //11 bslbf     0x7FF 说明是码率可变的码流
+
+    /*no_raw_data_blocks_in_frame 表示ADTS帧中有number_of_raw_data_blocks_in_frame + 1个AAC原始帧.
+    所以说number_of_raw_data_blocks_in_frame == 0
+    表示说ADTS帧中有一个AAC数据块并不是说没有。(一个AAC原始帧包含一段时间内1024个采样及相关数据)
+    */
+    unsigned int no_raw_data_blocks_in_frame;    //2 uimsfb
+} ADTS_HEADER;
+
+
+#define AACFramePtr std::shared_ptr<AACFrame>
+
+class AACFrame
+{
+public:
+    AACFrame();
+    ~AACFrame();
+
+    void setAdtsHeader(const ADTS_HEADER &adts);
+    void setFrameBuffer(const uint8_t * const buffer, const unsigned int &size);
+
+    uint8_t *getBuffer(){return mFrameBuffer;}
+    unsigned int getSize(){return  mFrameBufferSize;}
+
+    void setPts(const int64_t &time){mPts = time;}
+    int64_t getPts(){return mPts;}
+
+private:
+    ADTS_HEADER mAdtsHeader;
+
+    uint8_t *mFrameBuffer; //aac数据(包括adts头)
+    unsigned int mFrameBufferSize; //aac数据长度(包括adts头的大小)
+
+    int64_t mPts;
+};
+
+#endif // AACFRAME_H

+ 41 - 0
module/common/src/Audio/AudioFrame/PCMFrame.cpp

@@ -0,0 +1,41 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "PCMFrame.h"
+
+PCMFrame::PCMFrame()
+{
+    mFrameBuffer = nullptr;
+    mFrameBufferSize = 0;
+}
+
+PCMFrame::~PCMFrame()
+{
+    if (mFrameBuffer != nullptr)
+    {
+        free(mFrameBuffer);
+
+        mFrameBuffer = nullptr;
+        mFrameBufferSize = 0;
+    }
+}
+
+void PCMFrame::setFrameBuffer(const uint8_t * const buffer, const unsigned int &size, const int64_t &time)
+{
+    if (mFrameBufferSize < size)
+    {
+        if (mFrameBuffer != nullptr)
+        {
+            free(mFrameBuffer);
+        }
+
+        mFrameBuffer = static_cast<uint8_t*>(malloc(size));
+    }
+
+    memcpy(mFrameBuffer, buffer, size);
+    mFrameBufferSize = size;
+    mPts = time;
+}

+ 50 - 0
module/common/src/Audio/AudioFrame/PCMFrame.h

@@ -0,0 +1,50 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef PCMFRAME_H
+#define PCMFRAME_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <memory>
+
+//extern "C"
+//{
+//    #include <libavutil/samplefmt.h>
+//}
+
+#define PCMFramePtr std::shared_ptr<PCMFrame>
+
+///本程序固定使用AV_SAMPLE_FMT_S16 44100 双声道
+class PCMFrame
+{
+public:
+    PCMFrame();
+    ~PCMFrame();
+
+    void setFrameBuffer(const uint8_t * const buffer, const unsigned int &size, const int64_t  &time);
+
+    uint8_t *getBuffer(){return mFrameBuffer;}
+    unsigned int getSize(){return  mFrameBufferSize;}
+
+    void setPts(const int64_t &time){mPts = time;}
+    int64_t getPts(){return mPts;}
+
+private:
+    uint8_t *mFrameBuffer; //pcm数据
+    unsigned int mFrameBufferSize; //pcm数据长度
+
+    int64_t mPts;
+
+//    enum AVSampleFormat mSampleFmt;//输出的采样格式
+//    int mSampleRate;//采样率
+//    int mChannels; //声道数
+
+};
+
+#endif // PCMFRAME_H

+ 70 - 0
module/common/src/Audio/Mix/PcmMix.cpp

@@ -0,0 +1,70 @@
+#include "PcmMix.h"
+
+PcmMix::PcmMix()
+{
+
+}
+
+/**
+ * 归一化混音算法
+ * 要求两个音频参数一样
+ */
+void PcmMix::NormalizedRemix(float **src_data, const int &number, const int &buffer_size, float *out_data)
+{
+    //归一化混音
+    int i = 0,j = 0;
+
+    for (i=0; i < (buffer_size / sizeof (float)); i++)
+    {
+        //将所有音轨的值相加
+        float temp = 0;
+        for (j = 0; j < number; j++)
+        {
+            temp += *(float*)(src_data[j] + i);
+        }
+
+        *(float*)(out_data + i) = temp;
+    }
+}
+
+/**
+ * 归一化混音算法
+ * 要求两个音频参数一样
+ */
+void PcmMix::NormalizedRemix(short **src_data, const int &number, const int &buffer_size, short *out_data)
+{
+    //归一化混音
+    int const MAX=32767;
+    int const MIN=-32768;
+
+    double f=1;
+    int output;
+    int i = 0,j = 0;
+
+    for (i=0; i < (buffer_size / sizeof (short)); i++)
+    {
+        //将所有音轨的值相加
+        int temp = 0;
+        for (j = 0; j < number; j++)
+        {
+            temp += *(short*)(src_data[j] + i);
+        }
+        output=(int)(temp*f);
+        if (output > MAX)
+        {
+            f = (double)MAX / (double)(output);
+            output = MAX;
+        }
+        if (output < MIN)
+        {
+            f = (double)MIN / (double)(output);
+            output = MIN;
+        }
+        if (f < 1)
+        {
+            f += ((double)1 - f) / (double)32;
+        }
+
+        *(short*)(out_data + i) = (short)output;
+    }
+}

+ 29 - 0
module/common/src/Audio/Mix/PcmMix.h

@@ -0,0 +1,29 @@
+#ifndef PCMMIX_H
+#define PCMMIX_H
+
+#include <list>
+
+class PcmMix
+{
+public:
+    PcmMix();
+
+
+    /**
+     *
+     * 归一化混音算法
+     * 要求两个音频参数一样(只能处理16bit的音频)
+     *
+     * @param src_data    [in] 需要混音的原始数据
+     * @param number      [in] 输入的音频音轨数量
+     * @param buffer_size [in] 每一个音轨数据长度
+     * @param out_data    [out] 输出数据
+     */
+    ///输入为float的pcm
+    static void NormalizedRemix(float **src_data, const int &number, const int &buffer_size, float *out_data);
+    ///输入为16bit的pcm
+    static void NormalizedRemix(short **src_data, const int &number, const int &buffer_size, short *out_data);
+
+};
+
+#endif // PCMMIX_H

+ 243 - 0
module/common/src/LogWriter/LogWriter.cpp

@@ -0,0 +1,243 @@
+#include "LogWriter.h"
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#ifdef WIN32
+#include <direct.h>
+#include <io.h>                      //C (Windows)    access
+#define R_OK 0
+#else
+#include <unistd.h>                  //C (Linux)      access
+#endif
+
+#include "MoudleConfig.h"
+
+#if defined(WIN32)
+    #include <WinSock2.h>
+    #include <Windows.h>
+static DWORD WINAPI thread_Func(LPVOID pM)
+#else
+    #include <sys/time.h>
+    #include <stdio.h>
+    #include <time.h>
+    #include <stdlib.h>
+    #include <unistd.h>
+static void *thread_Func(void *pM)
+#endif
+{
+    LogWriter *pointer = (LogWriter*)pM;
+    pointer->run();
+
+    return 0;
+}
+
+#define TMPBUFFERLEN (1024 * 1024 * 3)
+
+LogWriter::LogWriter()
+{
+    mCondition = new Cond;
+
+    mTmpBuffer = new char[TMPBUFFERLEN];
+
+#if defined(WIN32)
+     HANDLE handle = CreateThread(NULL, 0, thread_Func, this, 0, NULL);
+#else
+    pthread_t thread1;
+    pthread_create(&thread1,NULL,thread_Func,this);
+#endif
+}
+
+LogWriter::~LogWriter()
+{
+    if (mTmpBuffer != NULL)
+    {
+        delete mTmpBuffer;
+        mTmpBuffer = NULL;
+    }
+
+    if (mCondition != NULL)
+    {
+        delete mCondition;
+        mCondition = NULL;
+    }
+}
+
+void LogWriter::addLogNode(const LogInfoNode &node)
+{
+    mCondition->Lock();
+    mLogNodeList.push_back(node);
+    mCondition->Signal();
+    mCondition->Unlock();
+}
+
+void LogWriter::writeLog(int cameraId, const std::string &str)
+{
+
+    LogInfoNode node;
+    node.cameraId = cameraId;
+    node.mCreateTime =  MoudleConfig::getTimeStamp_MilliSecond();
+
+#if defined(WIN32)
+    SYSTEMTIME sys;
+    GetLocalTime( &sys );
+
+    memset(mTmpBuffer, 0x0, TMPBUFFERLEN);
+
+    sprintf(mTmpBuffer,"[%d-%02d-%02d %02d:%02d:%02d.%03d] %s\n",
+            sys.wYear, sys.wMonth, sys.wDay, sys.wHour, sys.wMinute, sys.wSecond, sys.wMilliseconds, str.c_str());
+
+    node.logStr = mTmpBuffer;
+
+#else
+    struct timeval    tv;
+    struct timezone tz;
+
+    struct tm         *p;
+
+    gettimeofday(&tv, &tz);
+    p = localtime(&tv.tv_sec);
+
+
+    memset(mTmpBuffer, 0x0, TMPBUFFERLEN);
+
+//    memset(node.time,0x0,32);
+//    sprintf(node.time,"%d-%02d-%02d %02d:%02d:%02d.%03d",1900+p->tm_year, 1+p->tm_mon, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec, tv.tv_usec);
+
+    sprintf(mTmpBuffer,"[%d-%02d-%02d %02d:%02d:%02d.%03d] %s\n",
+            1900+p->tm_year, 1+p->tm_mon, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec, tv.tv_usec, str.c_str());
+
+    node.logStr = mTmpBuffer;
+
+#endif
+
+    addLogNode(node);
+
+//    if (cameraId == WRITE_LOG_ID_MAIN)
+//    {
+//        fprintf(stderr, "******:");
+//    }
+
+//    if (cameraId == WRITE_LOG_ID_MAIN)
+    {
+
+#if defined(WIN32)
+        fprintf(stderr,"[%d-%02d-%02d %02d:%02d:%02d.%03d] %s\n",
+                sys.wYear, sys.wMonth, sys.wDay, sys.wHour, sys.wMinute, sys.wSecond, sys.wMilliseconds, str.c_str());
+
+#else
+        fprintf(stderr,"[%d-%02d-%02d %02d:%02d:%02d.%03d] %s",
+                1900+p->tm_year, 1+p->tm_mon, p->tm_mday, p->tm_hour, p->tm_min, p->tm_sec, tv.tv_usec, str.c_str());
+#endif
+    }
+
+}
+
+void LogWriter::run()
+{
+
+    while(1)
+    {
+        mCondition->Lock();
+
+        if (mLogNodeList.empty())
+        {
+            mCondition->Wait();
+        }
+
+        bool isNeedWriteToFile = false;
+        if (mLogNodeList.size() >= 10)//日志文件超过10条 则写入文件
+        {
+            isNeedWriteToFile = true;
+        }
+        else
+        {
+            uint64_t startTime = mLogNodeList.front().mCreateTime;
+            uint64_t currentTime = MoudleConfig::getTimeStamp_MilliSecond();
+
+            if ((currentTime - startTime) > (10000)) //日志数据最迟10秒写入文件
+            {
+                isNeedWriteToFile = true;
+            }
+        }
+
+        if(isNeedWriteToFile)
+        {
+            std::list<LogInfoNode> LogNodeList = mLogNodeList;
+            mLogNodeList.clear();
+
+            mCondition->Unlock();
+
+            while(!LogNodeList.empty())
+            {
+                LogInfoNode node = LogNodeList.front();
+                LogNodeList.pop_front();
+
+#ifdef WIN32
+                char logDirName[20] = {0};
+                sprintf(logDirName,"log\\%d",node.cameraId);
+                ///如果log目录不存在 则创建
+                if (access(logDirName, R_OK)!=0)
+                {
+//                        _mkdir(logDirName);
+                    char cmd[32] = {0};
+                    sprintf(cmd,"mkdir %s",logDirName);
+                    system(cmd);
+                }
+#else
+                char logDirName[20] = {0};
+                sprintf(logDirName,"log/%d",node.cameraId);
+                ///如果log目录不存在 则创建
+                if (access(logDirName,R_OK)!=0)
+                {
+                    char cmd[32] = {0};
+                    sprintf(cmd,"mkdir %s -p",logDirName);
+                    system(cmd);
+                }
+#endif
+
+                ///一个文件最多5M 超过5M则创建下一个文件
+                int index = 0;
+                char fileName[36];
+                while(1)
+                {
+                    memset(fileName,0x0,36);
+                    sprintf(fileName,"log/%d/logfile_%d",node.cameraId,index++);
+                    if (access(fileName, R_OK)==0)
+                    {
+                        FILE * fp = fopen(fileName, "r");
+                        fseek(fp, 0L, SEEK_END);
+                        int size = ftell(fp);
+                        fclose(fp);
+                        if (size < 5*1024*1024) //小于5M则可以写
+                        {
+                            break;
+                        }
+                    }
+                    else
+                    {
+                        break;
+                    }
+                }
+
+                FILE * fp = fopen(fileName, "at+");
+                if (fp == NULL)
+                {
+                    fprintf(stderr,"写日志失败,请确保你有足够的权限写!\n");
+                }
+                else
+                {
+                    fwrite(node.logStr.c_str(),1,node.logStr.size(),fp);
+                    fclose(fp);
+                }
+            }
+        }
+        else
+        {
+            mCondition->Unlock();
+            MoudleConfig::mSleep(5000);
+            continue;
+        }
+    }
+}

+ 58 - 0
module/common/src/LogWriter/LogWriter.h

@@ -0,0 +1,58 @@
+#ifndef LOGWRITER_H
+#define LOGWRITER_H
+
+#include <time.h>
+#include <string.h>
+#include <list>
+#include <stdlib.h>
+#include <stdio.h>
+#include <stdint.h>
+#include <string>
+
+#include "Mutex/Cond.h"
+
+#define LOGSTR_MAX_LENGTH 512
+
+/**
+ * @brief The LogWriter class
+ * 写日志类 负责定时将日志信息写入文件  并管理日志文件
+ */
+
+class LogWriter
+{
+public:
+    struct LogInfoNode
+    {
+        int cameraId;
+        uint64_t mCreateTime; //创建的时间(用来判断过了多久)
+        std::string logStr;
+
+        LogInfoNode()
+        {
+            cameraId = 0;
+    //        memset(time, 0x0, 32);
+    //        memset(logStr, 0x0, LOGSTR_MAX_LENGTH);
+        }
+
+    };
+
+    LogWriter();
+    ~LogWriter();
+
+    void writeLog(int cameraId, const std::string &str);
+
+    void run();
+
+private:
+    char fileName[20];
+
+    char *mTmpBuffer;
+
+    void addLogNode(const LogInfoNode &node);
+
+    std::list<LogInfoNode> mLogNodeList; //数据队列
+    Cond *mCondition;
+
+};
+
+#endif // LOGWRITER_H

+ 271 - 0
module/common/src/MoudleConfig.cpp

@@ -0,0 +1,271 @@
+#include "MoudleConfig.h"
+
+#include <time.h>
+#if defined(WIN32)
+#include <WinSock2.h>
+#include <Windows.h>
+#include <direct.h>
+#include <io.h> //C (Windows)    access
+#else
+#include <sys/time.h>
+#include <stdio.h>
+#include <unistd.h>
+
+void Sleep(long mSeconds)
+{
+    usleep(mSeconds * 1000);
+}
+
+#endif
+
+int MoudleConfig::VERSION = 1;
+char MoudleConfig::VERSION_NAME[32] = "1.0.0";
+
+MoudleConfig::MoudleConfig()
+{
+
+}
+
+void MoudleConfig::mkdir(char *dirName)
+{
+#if defined(WIN32)
+    ///如果目录不存在 则创建它
+    if (access(dirName, 0)!=0)
+    {
+        _mkdir(dirName);
+    }
+#else
+    ///如果目录不存在 则创建它
+    if (access(dirName, R_OK)!=0)
+    {
+        char cmd[128];
+        sprintf(cmd,"mkdir %s", dirName);
+        system(cmd);
+    }
+#endif
+}
+
+void MoudleConfig::mkpath(char *path)
+{
+#if defined(WIN32)
+        ///windows创建文件夹命令 路径得是反斜杠 因此这里需要替换一下
+        char dirPath[128] = {0};
+        strcpy(dirPath, path);
+
+        MoudleConfig::replaceChar(dirPath, '/', '\\');
+
+        ///如果目录不存在 则创建它
+        if (access(dirPath, 0)!=0)
+        {
+    //        _mkdir(dirPath);
+            char cmd[128];
+            sprintf(cmd,"mkdir %s", dirPath);
+            system(cmd);
+        }
+
+#else
+    ///如果目录不存在 则创建它
+    if (access(path,R_OK)!=0)
+    {
+        char cmd[128];
+        sprintf(cmd,"mkdir %s -p",path);
+        system(cmd);
+    }
+#endif
+}
+
+void MoudleConfig::removeDir(char *dirName)
+{
+    if (strlen(dirName) <= 0) return;
+
+    if (access(dirName, 0) != 0 ) //文件夹不存在
+    {
+        return;
+    }
+
+#if defined(WIN32)
+
+    ///删除本地文件
+    char cmd[128];
+    sprintf(cmd,"rd /s /q \"%s\"", dirName);
+    system(cmd);
+
+#else
+
+    char cmd[128];
+    sprintf(cmd,"rm -rf \"%s\"",dirName);
+    system(cmd);
+
+#endif
+}
+
+void MoudleConfig::removeFile(const char *filePath)
+{
+    if (filePath == NULL || strlen(filePath) <= 0) return;
+
+#if defined(WIN32)
+
+        ///删除本地文件
+        remove(filePath);
+
+#else
+        ///删除本地文件
+        char cmd[128] = {0};
+        sprintf(cmd,"rm -rf \"%s\"",filePath);
+        system(cmd);
+#endif
+}
+
+void MoudleConfig::copyFile(const char *srcFile, const char *destFile)
+{
+
+#if defined(WIN32)
+        CopyFileA(srcFile, destFile, FALSE);
+#else
+
+        ///将文件拷贝到远端服务器
+        char copyfilecmd[512];
+        sprintf(copyfilecmd,"cp \"%s\" \"%s\"", srcFile, destFile);
+        system(copyfilecmd);
+
+#endif
+}
+
+
+std::string MoudleConfig::stringFormat(const char * fmt, ...)
+{
+#if defined(WIN32)
+    std::string _str;
+    va_list marker = NULL;
+    va_start(marker, fmt);
+
+    size_t num_of_chars = _vscprintf(fmt, marker);
+    _str.resize(num_of_chars);
+    vsprintf_s((char *)_str.c_str(), num_of_chars + 1, fmt, marker);
+    va_end(marker);
+
+    return _str;
+#else
+    std::string strResult="";
+    if (NULL != fmt)
+    {
+//            va_list marker = NULL;
+        va_list marker;
+        va_start(marker, fmt);                            //初始化变量参数
+        size_t nLength = vprintf(fmt, marker) + 1;    //获取格式化字符串长度
+        std::vector<char> vBuffer(nLength, '\0');        //创建用于存储格式化字符串的字符数组
+        int nWritten = vsnprintf(&vBuffer[0], vBuffer.size(), fmt, marker);
+        if (nWritten>0)
+        {
+            strResult = &vBuffer[0];
+        }
+        va_end(marker);                                    //重置变量参数
+    }
+    return strResult;
+#endif
+}
+
+std::string MoudleConfig::stringReplaceAll(std::string &str, const std::string &old_value, const std::string &new_value)
+{
+    for(std::string::size_type pos(0); pos!=std::string::npos; pos+=new_value.length())
+    {
+        if((pos=str.find(old_value,pos))!=std::string::npos)
+            str.replace(pos,old_value.length(),new_value);
+        else
+            break;
+    }
+    return   str;
+}
+
+void MoudleConfig::replaceChar(char *string, char oldChar, char newChar)
+{
+    int len = strlen(string);
+    int i;
+    for (i = 0; i < len; i++){
+        if (string[i] == oldChar){
+            string[i] = newChar;
+        }
+    }
+}
+
+std::string MoudleConfig::removeFirstAndLastSpace(std::string &s)
+{
+    if (s.empty())
+    {
+        return s;
+    }
+    s.erase(0,s.find_first_not_of(" "));
+    s.erase(s.find_last_not_of(" ") + 1);
+    return s;
+}
+
+std::string MoudleConfig::getIpFromRtspUrl(std::string rtspUrl)
+{
+    std::string strIP;
+    std::string strUrl = rtspUrl;
+    if(strUrl.find("@") == std::string::npos)
+    {
+        long nPos1 = strUrl.find("//");
+        long nPos2 = strUrl.rfind(":");
+        if(nPos1 != std::string::npos && nPos2 != std::string::npos)
+        {
+            long nOffset = nPos2 - nPos1 - strlen("//");
+            strIP = strUrl.substr(nPos1 + strlen("//"), nOffset);
+        }
+    }
+    else
+    {
+        long nPos1 = strUrl.find("@");
+        long nPos2 = strUrl.rfind(":");
+        if(nPos1 != std::string::npos && nPos2 != std::string::npos)
+        {
+            long nOffset = nPos2 - nPos1 - strlen("@");
+            strIP = strUrl.substr(nPos1 + strlen("@"), nOffset);
+
+            int index = strIP.find("/");
+            strIP = strIP.substr(0, index);
+        }
+    }
+    return strIP;
+}
+
+void MoudleConfig::mSleep(int mSecond)
+{
+#if defined(WIN32)
+    Sleep(mSecond);
+#else
+    usleep(mSecond * 1000);
+#endif
+}
+
+int64_t MoudleConfig::getTimeStamp_MilliSecond()
+{
+    int64_t mSecond = 0; //当前毫秒数
+
+#if defined(WIN32)
+
+    SYSTEMTIME sys;
+    GetLocalTime( &sys );
+
+    mSecond = sys.wMilliseconds;
+
+#else
+
+    struct timeval    tv;
+    struct timezone tz;
+
+    struct tm         *p;
+
+    gettimeofday(&tv, &tz);
+    p = localtime(&tv.tv_sec);
+
+    mSecond = tv.tv_usec / 1000;
+
+
+#endif
+
+    int64_t timeStamp = (int64_t)(time(NULL) * 1000) + mSecond;
+
+    return timeStamp;
+
+}

+ 56 - 0
module/common/src/MoudleConfig.h

@@ -0,0 +1,56 @@
+#ifndef MOUDLECONFIG_H
+#define MOUDLECONFIG_H
+
+#include <stdio.h>
+#include <string.h>
+#include <stdint.h>
+#include <string>
+
+#if defined(WIN32)
+
+#define PRINT_INT64_FORMAT "%I64d"
+
+#else
+#include <sys/time.h>
+#include <stdio.h>
+#include <unistd.h>
+
+void Sleep(long mSeconds);
+
+#define PRINT_INT64_FORMAT "%lld"
+
+#endif
+
+typedef unsigned char uchar;
+
+class MoudleConfig
+{
+public:
+    MoudleConfig();
+
+    static int VERSION;
+    static char VERSION_NAME[32];
+
+    static void mkdir(char *dirName); //创建文件夹
+    static void mkpath(char *path);   //创建多级文件夹
+
+    static void removeDir(char *dirName);
+    static void removeFile(const char *filePath);
+
+    static void copyFile(const char *srcFile, const char *destFile);
+
+    ///字符串处理
+    static std::string stringFormat(const char *fmt, ...);//字符串格式化函数
+    static std::string stringReplaceAll(std::string &str, const std::string &old_value, const std::string &new_value); //字符串替换函数
+
+    static void replaceChar(char *string, char oldChar, char newChar); //字符串替换字符
+    static std::string removeFirstAndLastSpace(std::string &s); //移除开始和结束的空格
+    static std::string getIpFromRtspUrl(std::string rtspUrl); //从rtsp地址中获取ip地址
+
+    static void mSleep(int mSecond);
+
+    static int64_t getTimeStamp_MilliSecond(); //获取时间戳(毫秒)
+
+};
+
+#endif // MOUDLECONFIG_H

+ 102 - 0
module/common/src/Mutex/Cond.cpp

@@ -0,0 +1,102 @@
+#include "Cond.h"
+
+Cond::Cond()
+{
+#if defined(WIN32) && !defined(MINGW)
+    InitializeCriticalSection(&m_mutex);
+    InitializeConditionVariable(&m_cond);
+#else
+    pthread_mutex_init(&m_mutex, NULL);
+    pthread_cond_init(&m_cond, NULL);
+#endif
+
+}
+
+Cond::~Cond()
+{
+#if defined(WIN32) && !defined(MINGW)
+    DeleteCriticalSection(&m_mutex);
+#else
+    pthread_mutex_destroy(&m_mutex);
+    pthread_cond_destroy(&m_cond);
+#endif
+
+}
+
+//加锁
+int Cond::Lock()
+{
+#if defined(WIN32) && !defined(MINGW)
+    EnterCriticalSection(&m_mutex);
+    return 0;
+#else
+    return pthread_mutex_lock(&m_mutex);
+#endif
+
+}
+
+//解锁
+int Cond::Unlock()
+{
+#if defined(WIN32) && !defined(MINGW)
+    LeaveCriticalSection(&m_mutex);
+    return 0;
+#else
+    return pthread_mutex_unlock(&m_mutex);
+#endif
+}
+
+int Cond::Wait(int timeOut)
+{
+#if defined(WIN32) && !defined(MINGW)
+    if (timeOut < 0)
+    {
+        timeOut = INFINITE;
+    }
+    DWORD ret = SleepConditionVariableCS((PCONDITION_VARIABLE)&m_cond, &m_mutex, timeOut);
+#else
+    int ret = pthread_cond_wait(&m_cond, &m_mutex);
+#endif
+
+    return ret;
+
+}
+
+//固定时间等待
+int Cond::TimedWait(int second)
+{
+#if defined(WIN32) && !defined(MINGW)
+    SleepConditionVariableCS((PCONDITION_VARIABLE)&m_cond, &m_mutex, second*1000);
+    return 0;
+#else
+    struct timespec abstime;
+    //获取从当前时间,并加上等待时间, 设置进程的超时睡眠时间
+    clock_gettime(CLOCK_REALTIME, &abstime);
+    abstime.tv_sec += second;
+    return pthread_cond_timedwait(&m_cond, &m_mutex, &abstime);
+#endif
+
+}
+
+int Cond::Signal()
+{
+#if defined(WIN32) && !defined(MINGW)
+    int ret = 0;
+    WakeConditionVariable((PCONDITION_VARIABLE)&m_cond);
+#else
+    int ret = pthread_cond_signal(&m_cond);
+#endif
+    return ret;
+}
+
+//唤醒所有睡眠线程
+int Cond::Broadcast()
+{
+#if defined(WIN32) && !defined(MINGW)
+    WakeAllConditionVariable((PCONDITION_VARIABLE)&m_cond);
+    return 0;
+#else
+    return pthread_cond_broadcast(&m_cond);
+#endif
+
+}

+ 55 - 0
module/common/src/Mutex/Cond.h

@@ -0,0 +1,55 @@
+#ifndef COND_H
+#define COND_H
+
+/// 注意Mingw的话使用的是linux下的api pthread
+/// 没有_MSC_VER这个宏 我们就认为他用的是mingw编译器
+
+#ifndef _MSC_VER
+#define MINGW
+#endif
+
+#if defined(WIN32) && !defined(MINGW)
+    #include <WinSock2.h>
+    #include <Windows.h>
+#else
+    #include <pthread.h>
+    #include <time.h>
+#endif
+
+class Cond
+{
+public:
+    Cond();
+    ~Cond();
+
+    //上锁
+    int Lock();
+
+    //解锁
+    int Unlock();
+
+    //timeOut-毫秒
+    int Wait(int timeOut = -1);
+
+    //固定时间等待
+    int TimedWait(int second);
+
+    //
+    int Signal();
+
+    //唤醒所有睡眠线程
+    int Broadcast();
+
+private:
+
+#if defined(WIN32) && !defined(MINGW)
+    CRITICAL_SECTION m_mutex;
+    RTL_CONDITION_VARIABLE m_cond;
+#else
+    pthread_mutex_t m_mutex;
+    pthread_cond_t m_cond;
+#endif
+
+};
+
+#endif // MUTEX_H

+ 44 - 0
module/common/src/Mutex/Mutex.cpp

@@ -0,0 +1,44 @@
+#include "Mutex.h"
+
+Mutex::Mutex()
+{
+#if defined(WIN32)
+    m_mutex = ::CreateMutex(NULL, FALSE, NULL);
+#else
+    pthread_mutex_init(&mutex, NULL);
+#endif
+
+}
+
+Mutex::~Mutex()
+{
+#if defined(WIN32)
+    ::CloseHandle(m_mutex);
+#else
+     pthread_mutex_destroy(&mutex);
+#endif
+
+}
+
+int Mutex::Lock() const
+{
+#if defined(WIN32)
+    DWORD ret = WaitForSingleObject(m_mutex, INFINITE);
+#else
+    int ret = pthread_mutex_lock((pthread_mutex_t*)&mutex);
+#endif
+
+    return ret;
+
+}
+
+int Mutex::Unlock() const
+{
+#if defined(WIN32)
+    bool ret = ::ReleaseMutex(m_mutex);
+#else
+    int ret = pthread_mutex_unlock((pthread_mutex_t*)&mutex);
+#endif
+    return ret;
+}
+

+ 35 - 0
module/common/src/Mutex/Mutex.h

@@ -0,0 +1,35 @@
+#ifndef MUTEX_H
+#define MUTEX_H
+
+
+#if defined(WIN32)
+    #include <WinSock2.h>
+    #include <Windows.h>
+//#elif defined(Q_OS_LINUX)
+#else
+    #include <pthread.h>
+#endif
+
+class Mutex
+{
+public:
+    Mutex();
+    ~Mutex();
+
+    //确保拥有互斥对象的线程对被保护资源的独自访问
+    int Lock() const;
+
+    //释放当前线程拥有的互斥对象,以使其它线程可以拥有互斥对象,对被保护资源进行访问
+    int Unlock() const;
+
+private:
+
+#if defined(WIN32)
+     HANDLE m_mutex;
+#else
+    pthread_mutex_t mutex;
+#endif
+
+};
+
+#endif // MUTEX_H

+ 34 - 0
module/common/src/NALU/h264.h

@@ -0,0 +1,34 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef H264_H
+#define H264_H
+
+#include <stdlib.h>
+
+typedef struct
+{
+  int startcodeprefix_len;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+  unsigned len;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
+  unsigned max_size;            //! Nal Unit Buffer size
+  int forbidden_bit;            //! should be always FALSE
+  int nal_reference_idc;        //! NALU_PRIORITY_xxxx
+  int nal_unit_type;            //! NALU_TYPE_xxxx
+  unsigned char *buf;                    //! contains the first byte followed by the EBSP
+  unsigned short lost_packets;  //! true, if packet loss is detected
+} T_H264_NALU;
+
+#pragma pack (1)
+typedef struct {
+    //byte 0
+    unsigned char TYPE:5;
+    unsigned char NRI:2;
+    unsigned char F:1;
+
+} T_H264_NALU_HEADER; /**//* 1 BYTES */
+#pragma pack ()
+
+#endif // H264_H

+ 76 - 0
module/common/src/NALU/h265.h

@@ -0,0 +1,76 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef H265_H
+#define H265_H
+
+#include <stdlib.h>
+
+typedef enum e_hevc_nalu_type
+{
+    HEVC_NAL_TRAIL_N    = 0,
+    HEVC_NAL_TRAIL_R    = 1,
+    HEVC_NAL_TSA_N      = 2,
+    HEVC_NAL_TSA_R      = 3,
+    HEVC_NAL_STSA_N     = 4,
+    HEVC_NAL_STSA_R     = 5,
+    HEVC_NAL_RADL_N     = 6,
+    HEVC_NAL_RADL_R     = 7,
+    HEVC_NAL_RASL_N     = 8,
+    HEVC_NAL_RASL_R     = 9,
+    HEVC_NAL_VCL_N10    = 10,
+    HEVC_NAL_VCL_R11    = 11,
+    HEVC_NAL_VCL_N12    = 12,
+    HEVC_NAL_VCL_R13    = 13,
+    HEVC_NAL_VCL_N14    = 14,
+    HEVC_NAL_VCL_R15    = 15,
+    HEVC_NAL_BLA_W_LP   = 16,
+    HEVC_NAL_BLA_W_RADL = 17,
+    HEVC_NAL_BLA_N_LP   = 18,
+    HEVC_NAL_IDR_W_RADL = 19,
+    HEVC_NAL_IDR_N_LP   = 20,
+    HEVC_NAL_CRA_NUT    = 21,
+    HEVC_NAL_IRAP_VCL22 = 22,
+    HEVC_NAL_IRAP_VCL23 = 23,
+    HEVC_NAL_RSV_VCL24  = 24,
+    HEVC_NAL_RSV_VCL25  = 25,
+    HEVC_NAL_RSV_VCL26  = 26,
+    HEVC_NAL_RSV_VCL27  = 27,
+    HEVC_NAL_RSV_VCL28  = 28,
+    HEVC_NAL_RSV_VCL29  = 29,
+    HEVC_NAL_RSV_VCL30  = 30,
+    HEVC_NAL_RSV_VCL31  = 31,
+    HEVC_NAL_VPS        = 32,
+    HEVC_NAL_SPS        = 33,
+    HEVC_NAL_PPS        = 34,
+    HEVC_NAL_AUD        = 35,
+    HEVC_NAL_EOS_NUT    = 36,
+    HEVC_NAL_EOB_NUT    = 37,
+    HEVC_NAL_FD_NUT     = 38,
+    HEVC_NAL_SEI_PREFIX = 39,
+    HEVC_NAL_SEI_SUFFIX = 40
+} E_HEVC_NALU_TYPE;
+
+#pragma pack (1)
+typedef struct t_h265_nalu_header
+{
+    unsigned char forbidden_zero_bit:1;
+    unsigned char nal_unit_type:6;
+    unsigned char nuh_layer_id:6;
+    unsigned char nuh_temporal_id_plus1:3;
+} T_H265_NALU_HEADER;
+
+typedef struct t_h265_nalu
+{
+    int startCodeLen;
+    T_H265_NALU_HEADER h265NaluHeader;
+    unsigned int len;
+    unsigned char *buf;
+} T_H265_NALU;
+
+#pragma pack ()
+
+#endif // H264_H

+ 210 - 0
module/common/src/NALU/nalu.cpp

@@ -0,0 +1,210 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "nalu.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+NALUParsing::NALUParsing()
+{
+    ///初始化一段内存 用于临时存放h264数据
+    mH264Buffer = (uint8_t*)malloc(1024*1024*10);
+    mBufferSize = 0;
+}
+
+int NALUParsing::inputH264Data(uint8_t *buf, const int &len)
+{
+    memcpy(mH264Buffer + mBufferSize, buf, len);
+    mBufferSize += len;
+
+    return mBufferSize;
+}
+
+T_NALU *NALUParsing::getNextFrame()
+{
+
+    /*根据h264文件的特性  逐个字节搜索 直到遇到h264的帧头 视为获取到了完整的一帧h264视频数据*/
+
+///    关于起始码startcode的两种形式:3字节的0x000001和4字节的0x00000001
+///    3字节的0x000001只有一种场合下使用,就是一个完整的帧被编为多个slice的时候,
+///    包含这些slice的nalu使用3字节起始码。其余场合都是4字节的。
+///    因此查找一帧的话,只需要查找四字节的起始码即可。
+
+    ///首先查找第一个起始码
+
+    int pos = 0; //记录当前处理的数据偏移量
+    int StartCode = 0;
+
+    while(1)
+    {
+        unsigned char* Buf = mH264Buffer + pos;
+        int lenth = mBufferSize - pos; //剩余没有处理的数据长度
+        if (lenth <= 4)
+        {
+            return NULL;
+        }
+
+        ///查找起始码(0x00000001)
+        if(Buf[0]==0 && Buf[1]==0 && Buf[2] ==0 && Buf[3] ==1)
+         //Check whether buf is 0x00000001
+        {
+            StartCode = 4;
+            break;
+        }
+        else
+        {
+            //否则 往后查找一个字节
+            pos++;
+        }
+    }
+
+
+    ///然后查找下一个起始码查找第一个起始码
+
+    int pos_2 = pos + StartCode; //记录当前处理的数据偏移量
+    int StartCode_2 = 0;
+
+    while(1)
+    {
+        unsigned char* Buf = mH264Buffer + pos_2;
+        int lenth = mBufferSize - pos_2; //剩余没有处理的数据长度
+        if (lenth <= 4)
+        {
+            return NULL;
+        }
+
+        ///查找起始码(0x00000001)
+        if(Buf[0]==0 && Buf[1]==0 && Buf[2] ==0 && Buf[3] ==1)
+         //Check whether buf is 0x00000001
+        {
+            StartCode_2 = 4;
+            break;
+        }
+        else
+        {
+            //否则 往后查找一个字节
+            pos_2++;
+        }
+    }
+
+    /// 现在 pos和pos_2之间的数据就是一帧数据了
+    /// 把他取出来
+
+    ///由于传递给ffmpeg解码的数据 需要带上起始码 因此这里的nalu带上了起始码
+    unsigned char* Buf = mH264Buffer + pos; //这帧数据的起始数据(包含起始码)
+    int naluSize = pos_2 - pos; //nalu数据大小 包含起始码
+
+    T_NALU * nalu = AllocNALU(naluSize, mVideoType);//分配nal 资源
+
+    if (mVideoType == T_NALU_H264)
+    {
+        T_H264_NALU_HEADER *nalu_header = (T_H264_NALU_HEADER *)(Buf + StartCode);
+
+        nalu->nalu.h264Nalu.startcodeprefix_len = StartCode;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+        nalu->nalu.h264Nalu.len = naluSize;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
+        nalu->nalu.h264Nalu.forbidden_bit = 0;            //! should be always FALSE
+        nalu->nalu.h264Nalu.nal_reference_idc = nalu_header->NRI;        //! NALU_PRIORITY_xxxx
+        nalu->nalu.h264Nalu.nal_unit_type = nalu_header->TYPE;            //! NALU_TYPE_xxxx
+        nalu->nalu.h264Nalu.lost_packets = false;  //! true, if packet loss is detected
+        memcpy(nalu->nalu.h264Nalu.buf, Buf, naluSize);  //! contains the first byte followed by the EBSP
+
+//        {
+//            char *bufTmp = (char*)(Buf + StartCode);
+//            char s[10];
+//            itoa(bufTmp[0], s, 2);
+//            fprintf(stderr, "%s %08s %x %d\n", __FUNCTION__, s, bufTmp[0] , nalu_header->TYPE);
+//        }
+    }
+    else
+    {
+        T_H265_NALU_HEADER *nalu_header = (T_H265_NALU_HEADER *)(Buf + StartCode);
+
+        nalu->nalu.h265Nalu.startCodeLen = StartCode;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+        nalu->nalu.h265Nalu.len = naluSize;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
+        nalu->nalu.h265Nalu.h265NaluHeader = *nalu_header;
+        memcpy(nalu->nalu.h265Nalu.buf, Buf, naluSize);  //! contains the first byte followed by the EBSP
+
+        {
+            char *bufTmp = (char*)(Buf);
+            fprintf(stderr, "%s %02x%02x%02x%02x%02x%02x %d %d\n", __FUNCTION__, bufTmp[0], bufTmp[1], bufTmp[2], bufTmp[3], bufTmp[4], bufTmp[5], nalu->nalu.h265Nalu.h265NaluHeader.nal_unit_type, nalu_header->nal_unit_type);
+        }
+    }
+
+    /// 将这一帧数据去掉
+    /// 把后一帧数据覆盖上来
+    int leftSize = mBufferSize - pos_2;
+    memmove(mH264Buffer, mH264Buffer + pos_2, leftSize);
+    mBufferSize = leftSize;
+
+    return nalu;
+}
+
+T_NALU *NALUParsing::AllocNALU(const int &buffersize, const T_NALU_TYPE &type, const bool &isAllocBuffer)
+{
+    T_NALU *n = nullptr;
+
+    n = (T_NALU*)malloc (sizeof(T_NALU));
+
+    n->type = type;
+
+    if (type == T_NALU_H264)
+    {
+        if (isAllocBuffer)
+        {
+            n->nalu.h264Nalu.max_size = buffersize;	//Assign buffer size
+            n->nalu.h264Nalu.buf = (unsigned char*)malloc (buffersize);
+            n->nalu.h264Nalu.len = buffersize;
+        }
+        else
+        {
+            n->nalu.h264Nalu.max_size = 0;	//Assign buffer size
+            n->nalu.h264Nalu.buf      = nullptr;
+            n->nalu.h264Nalu.len      = 0;
+        }
+    }
+    else
+    {
+        if (isAllocBuffer)
+        {
+            n->nalu.h265Nalu.buf = (unsigned char*)malloc (buffersize);
+            n->nalu.h265Nalu.len  = buffersize;
+        }
+        else
+        {
+            n->nalu.h265Nalu.buf = nullptr;
+            n->nalu.h265Nalu.len = 0;
+        }
+    }
+
+    return n;
+}
+
+void NALUParsing::FreeNALU(T_NALU *n)
+{
+    if (n == nullptr) return;
+
+    if (n->type == T_NALU_H264)
+    {
+        if (n->nalu.h264Nalu.buf != nullptr)
+        {
+            free(n->nalu.h264Nalu.buf);
+        }
+    }
+    else
+    {
+        if (n->nalu.h265Nalu.buf != nullptr)
+        {
+            free(n->nalu.h265Nalu.buf);
+        }
+    }
+
+    free(n);
+}

+ 63 - 0
module/common/src/NALU/nalu.h

@@ -0,0 +1,63 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef NALU_H
+#define NALU_H
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#include "h264.h"
+#include "h265.h"
+
+enum T_NALU_TYPE
+{
+    T_NALU_H264 = 0,
+    T_NALU_H265,
+};
+
+typedef struct
+{
+    T_NALU_TYPE type;
+
+    union
+    {
+        T_H264_NALU h264Nalu;
+        T_H265_NALU h265Nalu;
+    }nalu;
+
+} T_NALU;
+
+///用于从连续的h264/h265数据中解析出nalu
+class NALUParsing
+{
+public:
+    NALUParsing();
+
+    void setVideoType(const T_NALU_TYPE &type){mVideoType = type;}
+
+    int inputH264Data(uint8_t *buf, const int &len); //输入h264数据
+
+    ///从H264数据中查找出一帧数据
+    T_NALU* getNextFrame();
+
+private:
+    uint8_t *mH264Buffer;
+    int mBufferSize;
+
+    T_NALU_TYPE mVideoType; //类型 区分是264还是265
+
+public:
+    ///为NALU_t结构体分配内存空间
+    static T_NALU *AllocNALU(const int &buffersize, const T_NALU_TYPE &type, const bool &isAllocBuffer = true);
+
+    ///释放
+    static void FreeNALU(T_NALU *n);
+
+};
+
+
+#endif // NALU_H

+ 79 - 0
module/common/src/Video/VideoFrame/VideoEncodedFrame.cpp

@@ -0,0 +1,79 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#include "VideoEncodedFrame.h"
+
+VideoEncodedFrame::VideoEncodedFrame()
+{
+    mNalu = nullptr;
+    mPts = 0;
+    mIsKeyFrame = false;
+}
+
+VideoEncodedFrame::~VideoEncodedFrame()
+{
+    NALUParsing::FreeNALU(mNalu); //释放NALU内存
+    mNalu = nullptr;
+}
+
+void VideoEncodedFrame::setNalu(uint8_t *buffer, const int &len, const bool &isAllocBuffer, const T_NALU_TYPE &type, const int64_t &time)
+{
+    T_NALU *nalu = NALUParsing::AllocNALU(len, type, isAllocBuffer);
+
+    nalu->type = type;
+
+    if (type == T_NALU_H264)
+    {
+        T_H264_NALU_HEADER *nalu_header = (T_H264_NALU_HEADER *)(buffer);
+
+        nalu->nalu.h264Nalu.startcodeprefix_len = 4;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+//        nalu->nalu.h264Nalu.len = len;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
+        nalu->nalu.h264Nalu.forbidden_bit = 0;            //! should be always FALSE
+        nalu->nalu.h264Nalu.nal_reference_idc = nalu_header->NRI;        //! NALU_PRIORITY_xxxx
+        nalu->nalu.h264Nalu.nal_unit_type = nalu_header->TYPE;            //! NALU_TYPE_xxxx
+        nalu->nalu.h264Nalu.lost_packets = false;  //! true, if packet loss is detected
+
+        if (isAllocBuffer)
+        {
+            memcpy(nalu->nalu.h264Nalu.buf, buffer, len);  //! contains the first byte followed by the EBSP
+        }
+        else
+        {
+            nalu->nalu.h264Nalu.buf = buffer;
+        }
+        nalu->nalu.h264Nalu.max_size = len;
+        nalu->nalu.h264Nalu.len = len;
+
+//        {
+//            char *bufTmp = (char*)(Buf + StartCode);
+//            char s[10];
+//            itoa(bufTmp[0], s, 2);
+//            fprintf(stderr, "%s %08s %x %d\n", __FUNCTION__, s, bufTmp[0] , nalu_header->TYPE);
+//        }
+    }
+    else
+    {
+//        T_H265_NALU_HEADER *nalu_header = (T_H265_NALU_HEADER *)(Buf + StartCode);
+
+//        nalu->nalu.h265Nalu.startCodeLen = StartCode;      //! 4 for parameter sets and first slice in picture, 3 for everything else (suggested)
+//        nalu->nalu.h265Nalu.len = naluSize;                 //! Length of the NAL unit (Excluding the start code, which does not belong to the NALU)
+//        nalu->nalu.h265Nalu.h265NaluHeader = *nalu_header;
+//        memcpy(nalu->nalu.h265Nalu.buf, Buf, naluSize);  //! contains the first byte followed by the EBSP
+
+//        {
+//            char *bufTmp = (char*)(Buf);
+//            fprintf(stderr, "%s %02x%02x%02x%02x%02x%02x %d %d\n", __FUNCTION__, bufTmp[0], bufTmp[1], bufTmp[2], bufTmp[3], bufTmp[4], bufTmp[5], nalu->nalu.h265Nalu.h265NaluHeader.nal_unit_type, nalu_header->nal_unit_type);
+//        }
+    }
+
+    if (mNalu != nullptr)
+    {
+        NALUParsing::FreeNALU(mNalu); //释放NALU内存
+    }
+
+    mNalu = nalu;
+    mPts  = time;
+}

+ 42 - 0
module/common/src/Video/VideoFrame/VideoEncodedFrame.h

@@ -0,0 +1,42 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef VIDEOENCODEDFRAME_H
+#define VIDEOENCODEDFRAME_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <memory>
+
+#include "NALU/nalu.h"
+
+#define VideoEncodedFramePtr std::shared_ptr<VideoEncodedFrame>
+
+class VideoEncodedFrame
+{
+public:
+    VideoEncodedFrame();
+    ~VideoEncodedFrame();
+
+    void setNalu(uint8_t *buffer, const int &len, const bool & isAllocBuffer, const T_NALU_TYPE &type, const int64_t &time = 0);
+
+    void setIsKeyFrame(const bool &isKeyFrame){mIsKeyFrame = isKeyFrame;}
+
+    T_NALU *getNalu(){return mNalu;}
+    bool getIsKeyFrame(){return mIsKeyFrame;}
+    int64_t getPts(){return mPts;}
+
+private:
+    T_NALU *mNalu;
+
+    bool mIsKeyFrame;
+
+    int64_t mPts;
+};
+
+#endif // VIDEOFRAME_H

+ 86 - 0
module/common/src/Video/VideoFrame/VideoRawFrame.cpp

@@ -0,0 +1,86 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+extern "C"
+{
+#include <libavformat/avformat.h>
+#include <libswscale/swscale.h>
+#include <libavdevice/avdevice.h>
+#include <libavutil/imgutils.h>
+}
+
+#include "VideoRawFrame.h"
+
+VideoRawFrame::VideoRawFrame()
+{
+    mFrameBuffer = nullptr;
+    mFrameBufferSize = 0;
+    mPts = 0;
+}
+
+VideoRawFrame::~VideoRawFrame()
+{
+    if (mFrameBuffer != nullptr)
+    {
+        free(mFrameBuffer);
+        mFrameBuffer = nullptr;
+        mFrameBufferSize = 0;
+    }
+}
+
+void VideoRawFrame::initBuffer(const int &width, const int &height, const FrameType &type, int64_t time)
+{
+    if (mFrameBuffer != nullptr)
+    {
+        free(mFrameBuffer);
+        mFrameBuffer = nullptr;
+    }
+
+    mWidth  = width;
+    mHegiht = height;
+
+    mPts = time;
+
+    mType = type;
+
+    int size = 0;
+    if (type == FRAME_TYPE_YUV420P)
+    {
+        //size = width * height * 3 / 2;
+		size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, width, height, 1);  //按1字节进行内存对齐,得到的内存大小最接近实际大小
+    }
+    else if (type == FRAME_TYPE_RGB24)
+    {
+        size = width * height * 3;
+    }
+
+    mFrameBuffer = (uint8_t*)malloc(size);
+    mFrameBufferSize = size;
+}
+
+void VideoRawFrame::setFramebuf(const uint8_t *buf)
+{
+    memcpy(mFrameBuffer, buf, mFrameBufferSize);
+}
+
+void VideoRawFrame::setYbuf(const uint8_t *buf)
+{
+    int Ysize = mWidth * mHegiht;
+    memcpy(mFrameBuffer, buf, Ysize);
+}
+
+void VideoRawFrame::setUbuf(const uint8_t *buf)
+{
+    int Ysize = mWidth * mHegiht;
+    memcpy(mFrameBuffer + Ysize, buf, Ysize / 4);
+}
+
+void VideoRawFrame::setVbuf(const uint8_t *buf)
+{
+    int Ysize = mWidth * mHegiht;
+    memcpy(mFrameBuffer + Ysize + Ysize / 4, buf, Ysize / 4);
+}
+

+ 58 - 0
module/common/src/Video/VideoFrame/VideoRawFrame.h

@@ -0,0 +1,58 @@
+/**
+ * 叶海辉
+ * QQ群121376426
+ * http://blog.yundiantech.com/
+ */
+
+#ifndef VIDEORAWFRAME_H
+#define VIDEORAWFRAME_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <string.h>
+#include <memory>
+
+#define VideoRawFramePtr std::shared_ptr<VideoRawFrame>
+
+
+class VideoRawFrame
+{
+public:
+    enum FrameType
+    {
+        FRAME_TYPE_NONE = -1,
+        FRAME_TYPE_YUV420P,   ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
+        FRAME_TYPE_RGB24,     ///< packed RGB 8:8:8, 24bpp, RGBRGB...
+    };
+
+    VideoRawFrame();
+    ~VideoRawFrame();
+
+    void initBuffer(const int &width, const int &height, const FrameType &type, int64_t time = 0);
+
+    void setFramebuf(const uint8_t *buf);
+    void setYbuf(const uint8_t *buf);
+    void setUbuf(const uint8_t *buf);
+    void setVbuf(const uint8_t *buf);
+
+    uint8_t * getBuffer(){return mFrameBuffer;}
+    int getWidth(){return mWidth;}
+    int getHeight(){return mHegiht;}
+    int getSize(){return mFrameBufferSize;}
+
+    void setPts(const int64_t &pts){mPts=pts;}
+    int64_t getPts(){return mPts;}
+
+protected:
+    FrameType mType;
+
+    uint8_t *mFrameBuffer;
+    int mFrameBufferSize;
+
+    int mWidth;
+    int mHegiht;
+
+    int64_t mPts;
+};
+
+#endif // VIDEOFRAME_H

+ 56 - 0
module/lib/common/RtAudio/RtAudio.pri

@@ -0,0 +1,56 @@
+
+INCLUDEPATH += $$PWD/src
+
+INCLUDEPATH += $$PWD/include
+
+win32{
+
+HEADERS += \
+    $$PWD/include/asio.h \
+    $$PWD/include/asiodrivers.h \
+    $$PWD/include/asiodrvr.h \
+    $$PWD/include/asiolist.h \
+    $$PWD/include/asiosys.h \
+    $$PWD/include/dsound.h \
+    $$PWD/include/functiondiscoverykeys_devpkey.h \
+    $$PWD/include/ginclude.h \
+    $$PWD/include/iasiodrv.h \
+    $$PWD/include/iasiothiscallresolver.h \
+    $$PWD/include/soundcard.h \
+    $$PWD/src/RtAudio.h \
+    $$PWD/src/rtaudio_c.h
+
+SOURCES += \
+    $$PWD/include/asio.cpp \
+    $$PWD/include/asiodrivers.cpp \
+    $$PWD/include/asiolist.cpp \
+    $$PWD/include/iasiothiscallresolver.cpp \
+    $$PWD/src/RtAudio.cpp \
+    $$PWD/src/rtaudio_c.cpp
+
+    LIBS += -lAdvapi32 -luser32 -lole32 -ldsound
+}
+
+unix{
+
+HEADERS += \
+    $$PWD/include/asio.h \
+    $$PWD/include/asiodrivers.h \
+    $$PWD/include/asiodrvr.h \
+    $$PWD/include/asiolist.h \
+    $$PWD/include/asiosys.h \
+    $$PWD/include/dsound.h \
+    $$PWD/include/functiondiscoverykeys_devpkey.h \
+    $$PWD/include/ginclude.h \
+    $$PWD/include/soundcard.h \
+    $$PWD/src/RtAudio.h \
+    $$PWD/src/rtaudio_c.h
+
+SOURCES += \
+    $$PWD/src/RtAudio.cpp \
+    $$PWD/src/rtaudio_c.cpp
+
+}
+
+DISTFILES += \
+    $$PWD/src/rtaudio.pc.in

+ 258 - 0
module/lib/common/RtAudio/include/asio.cpp

@@ -0,0 +1,258 @@
+/*
+	Steinberg Audio Stream I/O API
+	(c) 1996, Steinberg Soft- und Hardware GmbH
+
+	asio.cpp
+	
+	asio functions entries which translate the
+	asio interface to the asiodrvr class methods
+*/ 
+	
+#include <string.h>
+#include "asiosys.h"		// platform definition
+#include "asio.h"
+
+#if MAC
+#include "asiodrvr.h"
+
+#pragma export on
+
+AsioDriver *theAsioDriver = 0;
+
+extern "C"
+{
+
+long main()
+{
+	return 'ASIO';
+}
+
+#elif WINDOWS
+
+#include <WinSock2.h>
+#include <Windows.h>
+#include "iasiodrv.h"
+#include "asiodrivers.h"
+
+IASIO *theAsioDriver = 0;
+extern AsioDrivers *asioDrivers;
+
+#elif SGI || SUN || BEOS || LINUX
+#include "asiodrvr.h"
+static AsioDriver *theAsioDriver = 0;
+#endif
+
+//-----------------------------------------------------------------------------------------------------
+ASIOError ASIOInit(ASIODriverInfo *info)
+{
+#if MAC || SGI || SUN || BEOS || LINUX
+	if(theAsioDriver)
+	{
+		delete theAsioDriver;
+		theAsioDriver = 0;
+	}		
+	info->driverVersion = 0;
+	strcpy(info->name, "No ASIO Driver");
+	theAsioDriver = getDriver();
+	if(!theAsioDriver)
+	{
+		strcpy(info->errorMessage, "Not enough memory for the ASIO driver!"); 
+		return ASE_NotPresent;
+	}
+	if(!theAsioDriver->init(info->sysRef))
+	{
+		theAsioDriver->getErrorMessage(info->errorMessage);
+		delete theAsioDriver;
+		theAsioDriver = 0;
+		return ASE_NotPresent;
+	}
+	strcpy(info->errorMessage, "No ASIO Driver Error");
+	theAsioDriver->getDriverName(info->name);
+	info->driverVersion = theAsioDriver->getDriverVersion();
+	return ASE_OK;
+
+#else
+
+	info->driverVersion = 0;
+	strcpy(info->name, "No ASIO Driver");
+	if(theAsioDriver)	// must be loaded!
+	{
+		if(!theAsioDriver->init(info->sysRef))
+		{
+			theAsioDriver->getErrorMessage(info->errorMessage);
+			theAsioDriver = 0;
+			return ASE_NotPresent;
+		}		
+
+		strcpy(info->errorMessage, "No ASIO Driver Error");
+		theAsioDriver->getDriverName(info->name);
+		info->driverVersion = theAsioDriver->getDriverVersion();
+		return ASE_OK;
+	}
+	return ASE_NotPresent;
+
+#endif	// !MAC
+}
+
+ASIOError ASIOExit(void)
+{
+	if(theAsioDriver)
+	{
+#if WINDOWS
+		asioDrivers->removeCurrentDriver();
+#else
+		delete theAsioDriver;
+#endif
+	}		
+	theAsioDriver = 0;
+	return ASE_OK;
+}
+
+ASIOError ASIOStart(void)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->start();
+}
+
+ASIOError ASIOStop(void)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->stop();
+}
+
+ASIOError ASIOGetChannels(long *numInputChannels, long *numOutputChannels)
+{
+	if(!theAsioDriver)
+	{
+		*numInputChannels = *numOutputChannels = 0;
+		return ASE_NotPresent;
+	}
+	return theAsioDriver->getChannels(numInputChannels, numOutputChannels);
+}
+
+ASIOError ASIOGetLatencies(long *inputLatency, long *outputLatency)
+{
+	if(!theAsioDriver)
+	{
+		*inputLatency = *outputLatency = 0;
+		return ASE_NotPresent;
+	}
+	return theAsioDriver->getLatencies(inputLatency, outputLatency);
+}
+
+ASIOError ASIOGetBufferSize(long *minSize, long *maxSize, long *preferredSize, long *granularity)
+{
+	if(!theAsioDriver)
+	{
+		*minSize = *maxSize = *preferredSize = *granularity = 0;
+		return ASE_NotPresent;
+	}
+	return theAsioDriver->getBufferSize(minSize, maxSize, preferredSize, granularity);
+}
+
+ASIOError ASIOCanSampleRate(ASIOSampleRate sampleRate)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->canSampleRate(sampleRate);
+}
+
+ASIOError ASIOGetSampleRate(ASIOSampleRate *currentRate)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->getSampleRate(currentRate);
+}
+
+ASIOError ASIOSetSampleRate(ASIOSampleRate sampleRate)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->setSampleRate(sampleRate);
+}
+
+ASIOError ASIOGetClockSources(ASIOClockSource *clocks, long *numSources)
+{
+	if(!theAsioDriver)
+	{
+		*numSources = 0;
+		return ASE_NotPresent;
+	}
+	return theAsioDriver->getClockSources(clocks, numSources);
+}
+
+ASIOError ASIOSetClockSource(long reference)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->setClockSource(reference);
+}
+
+ASIOError ASIOGetSamplePosition(ASIOSamples *sPos, ASIOTimeStamp *tStamp)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->getSamplePosition(sPos, tStamp);
+}
+
+ASIOError ASIOGetChannelInfo(ASIOChannelInfo *info)
+{
+	if(!theAsioDriver)
+	{
+		info->channelGroup = -1;
+		info->type = ASIOSTInt16MSB;
+		strcpy(info->name, "None");
+		return ASE_NotPresent;
+	}
+	return theAsioDriver->getChannelInfo(info);
+}
+
+ASIOError ASIOCreateBuffers(ASIOBufferInfo *bufferInfos, long numChannels,
+	long bufferSize, ASIOCallbacks *callbacks)
+{
+	if(!theAsioDriver)
+	{
+		ASIOBufferInfo *info = bufferInfos;
+		for(long i = 0; i < numChannels; i++, info++)
+			info->buffers[0] = info->buffers[1] = 0;
+		return ASE_NotPresent;
+	}
+	return theAsioDriver->createBuffers(bufferInfos, numChannels, bufferSize, callbacks);
+}
+
+ASIOError ASIODisposeBuffers(void)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->disposeBuffers();
+}
+
+ASIOError ASIOControlPanel(void)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->controlPanel();
+}
+
+ASIOError ASIOFuture(long selector, void *opt)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->future(selector, opt);
+}
+
+ASIOError ASIOOutputReady(void)
+{
+	if(!theAsioDriver)
+		return ASE_NotPresent;
+	return theAsioDriver->outputReady();
+}
+
+#if MAC
+}	// extern "C"
+#pragma export off
+#endif
+
+

Bu fark içinde çok fazla dosya değişikliği olduğu için bazı dosyalar gösterilmiyor